suprema-biostar-mcp 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- biostar_x_mcp_server/__init__.py +25 -0
- biostar_x_mcp_server/__main__.py +15 -0
- biostar_x_mcp_server/config.py +87 -0
- biostar_x_mcp_server/handlers/__init__.py +35 -0
- biostar_x_mcp_server/handlers/access_handler.py +2162 -0
- biostar_x_mcp_server/handlers/audit_handler.py +489 -0
- biostar_x_mcp_server/handlers/auth_handler.py +216 -0
- biostar_x_mcp_server/handlers/base_handler.py +228 -0
- biostar_x_mcp_server/handlers/card_handler.py +746 -0
- biostar_x_mcp_server/handlers/device_handler.py +4344 -0
- biostar_x_mcp_server/handlers/door_handler.py +3969 -0
- biostar_x_mcp_server/handlers/event_handler.py +1331 -0
- biostar_x_mcp_server/handlers/file_handler.py +212 -0
- biostar_x_mcp_server/handlers/help_web_handler.py +379 -0
- biostar_x_mcp_server/handlers/log_handler.py +1051 -0
- biostar_x_mcp_server/handlers/navigation_handler.py +109 -0
- biostar_x_mcp_server/handlers/occupancy_handler.py +541 -0
- biostar_x_mcp_server/handlers/user_handler.py +3568 -0
- biostar_x_mcp_server/schemas/__init__.py +21 -0
- biostar_x_mcp_server/schemas/access.py +158 -0
- biostar_x_mcp_server/schemas/audit.py +73 -0
- biostar_x_mcp_server/schemas/auth.py +24 -0
- biostar_x_mcp_server/schemas/cards.py +128 -0
- biostar_x_mcp_server/schemas/devices.py +496 -0
- biostar_x_mcp_server/schemas/doors.py +306 -0
- biostar_x_mcp_server/schemas/events.py +104 -0
- biostar_x_mcp_server/schemas/files.py +7 -0
- biostar_x_mcp_server/schemas/help.py +29 -0
- biostar_x_mcp_server/schemas/logs.py +33 -0
- biostar_x_mcp_server/schemas/occupancy.py +19 -0
- biostar_x_mcp_server/schemas/tool_response.py +29 -0
- biostar_x_mcp_server/schemas/users.py +166 -0
- biostar_x_mcp_server/server.py +335 -0
- biostar_x_mcp_server/session.py +221 -0
- biostar_x_mcp_server/tool_manager.py +172 -0
- biostar_x_mcp_server/tools/__init__.py +45 -0
- biostar_x_mcp_server/tools/access.py +510 -0
- biostar_x_mcp_server/tools/audit.py +227 -0
- biostar_x_mcp_server/tools/auth.py +59 -0
- biostar_x_mcp_server/tools/cards.py +269 -0
- biostar_x_mcp_server/tools/categories.py +197 -0
- biostar_x_mcp_server/tools/devices.py +1552 -0
- biostar_x_mcp_server/tools/doors.py +865 -0
- biostar_x_mcp_server/tools/events.py +305 -0
- biostar_x_mcp_server/tools/files.py +28 -0
- biostar_x_mcp_server/tools/help.py +80 -0
- biostar_x_mcp_server/tools/logs.py +123 -0
- biostar_x_mcp_server/tools/navigation.py +89 -0
- biostar_x_mcp_server/tools/occupancy.py +91 -0
- biostar_x_mcp_server/tools/users.py +1113 -0
- biostar_x_mcp_server/utils/__init__.py +31 -0
- biostar_x_mcp_server/utils/category_mapper.py +206 -0
- biostar_x_mcp_server/utils/decorators.py +101 -0
- biostar_x_mcp_server/utils/language_detector.py +51 -0
- biostar_x_mcp_server/utils/search.py +42 -0
- biostar_x_mcp_server/utils/timezone.py +122 -0
- suprema_biostar_mcp-1.0.1.dist-info/METADATA +163 -0
- suprema_biostar_mcp-1.0.1.dist-info/RECORD +61 -0
- suprema_biostar_mcp-1.0.1.dist-info/WHEEL +4 -0
- suprema_biostar_mcp-1.0.1.dist-info/entry_points.txt +2 -0
- suprema_biostar_mcp-1.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,3568 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import json
|
|
3
|
+
from typing import Sequence, Dict, Any, List, Optional, Tuple
|
|
4
|
+
from mcp.types import TextContent
|
|
5
|
+
import httpx
|
|
6
|
+
from pydantic import ValidationError
|
|
7
|
+
import os
|
|
8
|
+
import platform
|
|
9
|
+
import shutil
|
|
10
|
+
import asyncio
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
import csv
|
|
13
|
+
import io
|
|
14
|
+
import tempfile
|
|
15
|
+
import re
|
|
16
|
+
import base64
|
|
17
|
+
|
|
18
|
+
from .base_handler import BaseHandler
|
|
19
|
+
from ..schemas import (
|
|
20
|
+
CreateUserInput,
|
|
21
|
+
DeleteUserInput,
|
|
22
|
+
UpdateUserInput,
|
|
23
|
+
AdvancedSearchUserInput,
|
|
24
|
+
ExportCSVInput,
|
|
25
|
+
BulkAddUsersInput,
|
|
26
|
+
ToolResponse,
|
|
27
|
+
BulkEditUsersInput,
|
|
28
|
+
)
|
|
29
|
+
from ..utils.search import search_users_by_name
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def parse_flexible_date(date_input: Any, is_start: bool = True) -> str:
|
|
35
|
+
"""
|
|
36
|
+
다양한 날짜 포맷을 ISO 8601 형식으로 변환합니다.
|
|
37
|
+
|
|
38
|
+
지원 포맷:
|
|
39
|
+
- 20251225
|
|
40
|
+
- 2025-12-25
|
|
41
|
+
- 12.25.2025
|
|
42
|
+
- 25.12.2025
|
|
43
|
+
- 2025/12/25
|
|
44
|
+
- 12/25/2025
|
|
45
|
+
- 25/12/2025
|
|
46
|
+
- 2025.12.25
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
date_input: 변환할 날짜 (문자열 또는 숫자)
|
|
50
|
+
is_start: True이면 시작 시간(00:00:00), False이면 종료 시간(23:59:00)
|
|
51
|
+
|
|
52
|
+
Returns:
|
|
53
|
+
ISO 8601 형식의 날짜 문자열 (예: "2025-12-25T00:00:00.00Z")
|
|
54
|
+
"""
|
|
55
|
+
from datetime import datetime
|
|
56
|
+
import re
|
|
57
|
+
|
|
58
|
+
if not date_input:
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
# Return as-is if already in ISO format
|
|
62
|
+
date_str = str(date_input).strip()
|
|
63
|
+
if re.match(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}', date_str):
|
|
64
|
+
return date_str
|
|
65
|
+
|
|
66
|
+
# Extract digits only
|
|
67
|
+
digits_only = re.sub(r'[^\d]', '', date_str)
|
|
68
|
+
|
|
69
|
+
year, month, day = None, None, None
|
|
70
|
+
|
|
71
|
+
try:
|
|
72
|
+
# YYYYMMDD 형식 (8자리 숫자)
|
|
73
|
+
if len(digits_only) == 8:
|
|
74
|
+
year = int(digits_only[0:4])
|
|
75
|
+
month = int(digits_only[4:6])
|
|
76
|
+
day = int(digits_only[6:8])
|
|
77
|
+
|
|
78
|
+
# 구분자가 있는 경우
|
|
79
|
+
elif '/' in date_str or '.' in date_str or '-' in date_str:
|
|
80
|
+
# 구분자로 분리
|
|
81
|
+
parts = re.split(r'[/.\-]', date_str)
|
|
82
|
+
parts = [p for p in parts if p] # 빈 문자열 제거
|
|
83
|
+
|
|
84
|
+
if len(parts) == 3:
|
|
85
|
+
p1, p2, p3 = int(parts[0]), int(parts[1]), int(parts[2])
|
|
86
|
+
|
|
87
|
+
# YYYY-MM-DD 또는 YYYY/MM/DD 또는 YYYY.MM.DD
|
|
88
|
+
if p1 > 1900:
|
|
89
|
+
year, month, day = p1, p2, p3
|
|
90
|
+
# DD-MM-YYYY 또는 DD/MM/YYYY 또는 DD.MM.YYYY (일이 12보다 크면 확실)
|
|
91
|
+
elif p1 > 12:
|
|
92
|
+
day, month, year = p1, p2, p3
|
|
93
|
+
# MM-DD-YYYY 또는 MM/DD/YYYY 또는 MM.DD.YYYY (두번째가 12보다 크면 확실)
|
|
94
|
+
elif p2 > 12:
|
|
95
|
+
month, day, year = p1, p2, p3
|
|
96
|
+
# 애매한 경우: 기본적으로 MM/DD/YYYY로 가정
|
|
97
|
+
else:
|
|
98
|
+
month, day, year = p1, p2, p3
|
|
99
|
+
|
|
100
|
+
# 연도가 2자리인 경우 20XX로 변환
|
|
101
|
+
if year < 100:
|
|
102
|
+
year += 2000
|
|
103
|
+
|
|
104
|
+
if year and month and day:
|
|
105
|
+
# 유효한 날짜인지 확인
|
|
106
|
+
dt = datetime(year, month, day)
|
|
107
|
+
time_part = "T00:00:00.00Z" if is_start else "T23:59:00.00Z"
|
|
108
|
+
return f"{year:04d}-{month:02d}-{day:02d}{time_part}"
|
|
109
|
+
|
|
110
|
+
except (ValueError, IndexError) as e:
|
|
111
|
+
logger.warning(f"Date parsing failed: {date_input}, error: {e}")
|
|
112
|
+
|
|
113
|
+
# Return default value if parsing fails
|
|
114
|
+
return "2001-01-01T00:00:00.00Z" if is_start else "2030-12-31T23:59:00.00Z"
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
class UserHandler(BaseHandler):
|
|
118
|
+
"""Handle user-related operations."""
|
|
119
|
+
|
|
120
|
+
# Static helper methods (for internal use)
|
|
121
|
+
@staticmethod
|
|
122
|
+
def _format_pin_from_phone_static(phone: str) -> str:
|
|
123
|
+
"""Internal helper: Extract last 8 digits from phone number for PIN."""
|
|
124
|
+
digits_only = ''.join(c for c in phone if c.isdigit())
|
|
125
|
+
if len(digits_only) >= 8:
|
|
126
|
+
return digits_only[-8:]
|
|
127
|
+
else:
|
|
128
|
+
return digits_only.zfill(8)
|
|
129
|
+
|
|
130
|
+
@staticmethod
|
|
131
|
+
def _format_phone_number_static(phone: str) -> str:
|
|
132
|
+
"""Internal helper: Format phone number (digits only)."""
|
|
133
|
+
return ''.join(c for c in phone if c.isdigit())
|
|
134
|
+
|
|
135
|
+
@staticmethod
|
|
136
|
+
def _format_datetime_range_static(date_str: str) -> Tuple[str, str]:
|
|
137
|
+
"""Internal helper: Convert date string to BioStar datetime range."""
|
|
138
|
+
date_clean = date_str.strip()
|
|
139
|
+
start_datetime = f"{date_clean}T00:00:00.00Z"
|
|
140
|
+
end_datetime = f"{date_clean}T23:59:00.00Z"
|
|
141
|
+
return start_datetime, end_datetime
|
|
142
|
+
|
|
143
|
+
# Tool methods (async, for AI to call)
|
|
144
|
+
async def format_pin_from_phone(self, phone: str) -> Sequence[TextContent]:
|
|
145
|
+
"""
|
|
146
|
+
Extract last 8 digits from phone number for PIN.
|
|
147
|
+
Removes all non-numeric characters.
|
|
148
|
+
|
|
149
|
+
Example:
|
|
150
|
+
"1) 415-732-9042" → "73290426" (last 8 digits)
|
|
151
|
+
"82) 10-9348-5721" → "93485721"
|
|
152
|
+
"""
|
|
153
|
+
pin = self._format_pin_from_phone_static(phone)
|
|
154
|
+
return [TextContent(
|
|
155
|
+
type="text",
|
|
156
|
+
text=json.dumps({
|
|
157
|
+
"success": True,
|
|
158
|
+
"input_phone": phone,
|
|
159
|
+
"output_pin": pin,
|
|
160
|
+
"message": f"Extracted PIN: {pin} (last 8 digits of phone)"
|
|
161
|
+
}, indent=2)
|
|
162
|
+
)]
|
|
163
|
+
|
|
164
|
+
async def format_phone_number(self, phone: str) -> Sequence[TextContent]:
|
|
165
|
+
"""
|
|
166
|
+
Format phone number for BioStar (digits only).
|
|
167
|
+
Removes all non-numeric characters including hyphens, parentheses, spaces.
|
|
168
|
+
|
|
169
|
+
Example:
|
|
170
|
+
"1) 415-732-9042" → "14157329042"
|
|
171
|
+
"82) 10-9348-5721" → "821093485721"
|
|
172
|
+
"""
|
|
173
|
+
cleaned = self._format_phone_number_static(phone)
|
|
174
|
+
return [TextContent(
|
|
175
|
+
type="text",
|
|
176
|
+
text=json.dumps({
|
|
177
|
+
"success": True,
|
|
178
|
+
"input_phone": phone,
|
|
179
|
+
"output_phone": cleaned,
|
|
180
|
+
"message": f"Cleaned phone number: {cleaned}"
|
|
181
|
+
}, indent=2)
|
|
182
|
+
)]
|
|
183
|
+
|
|
184
|
+
async def format_datetime_range(self, date_str: str) -> Sequence[TextContent]:
|
|
185
|
+
"""
|
|
186
|
+
Convert date string to BioStar datetime range format.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
date_str: Date in format "2025-12-25" or "2025-12-25"
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
JSON with start_datetime and end_datetime in ISO format
|
|
193
|
+
|
|
194
|
+
Example:
|
|
195
|
+
"2025-12-25" → {"start": "2025-12-25T00:00:00.00Z", "end": "2025-12-25T23:59:00.00Z"}
|
|
196
|
+
"""
|
|
197
|
+
start, end = self._format_datetime_range_static(date_str)
|
|
198
|
+
return [TextContent(
|
|
199
|
+
type="text",
|
|
200
|
+
text=json.dumps({
|
|
201
|
+
"success": True,
|
|
202
|
+
"input_date": date_str,
|
|
203
|
+
"start_datetime": start,
|
|
204
|
+
"end_datetime": end,
|
|
205
|
+
"message": f"Formatted date range for {date_str}"
|
|
206
|
+
}, indent=2)
|
|
207
|
+
)]
|
|
208
|
+
|
|
209
|
+
async def get_users(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
210
|
+
"""Get list of users."""
|
|
211
|
+
try:
|
|
212
|
+
self.check_auth()
|
|
213
|
+
|
|
214
|
+
headers = {
|
|
215
|
+
"bs-session-id": self.get_session_id(),
|
|
216
|
+
"Content-Type": "application/json"
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
220
|
+
response = await client.get(
|
|
221
|
+
f"{self.session.config.biostar_url}/api/users",
|
|
222
|
+
headers=headers
|
|
223
|
+
)
|
|
224
|
+
|
|
225
|
+
if response.status_code != 200:
|
|
226
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
227
|
+
|
|
228
|
+
data = response.json()
|
|
229
|
+
users = data.get("UserCollection", {}).get("rows", [])
|
|
230
|
+
|
|
231
|
+
if not users:
|
|
232
|
+
return self.success_response({
|
|
233
|
+
"message": "No users found",
|
|
234
|
+
"users": []
|
|
235
|
+
})
|
|
236
|
+
|
|
237
|
+
return self.success_response({
|
|
238
|
+
"message": f"Found {len(users)} users",
|
|
239
|
+
"total": len(users),
|
|
240
|
+
"users": [{"user_id": u["user_id"], "name": u["name"]} for u in users]
|
|
241
|
+
})
|
|
242
|
+
|
|
243
|
+
except Exception as e:
|
|
244
|
+
return await self.handle_api_error(e)
|
|
245
|
+
|
|
246
|
+
async def create_user(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
247
|
+
"""Create a new user."""
|
|
248
|
+
try:
|
|
249
|
+
self.check_auth()
|
|
250
|
+
|
|
251
|
+
# Validate input
|
|
252
|
+
try:
|
|
253
|
+
input_data = CreateUserInput(**args)
|
|
254
|
+
except ValidationError as e:
|
|
255
|
+
return self.handle_validation_error(e)
|
|
256
|
+
|
|
257
|
+
headers = {
|
|
258
|
+
"bs-session-id": self.get_session_id(),
|
|
259
|
+
"Content-Type": "application/json"
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
263
|
+
# Get next user ID
|
|
264
|
+
next_id_resp = await client.get(
|
|
265
|
+
f"{self.session.config.biostar_url}/api/users/next_user_id",
|
|
266
|
+
headers=headers
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
if next_id_resp.status_code != 200:
|
|
271
|
+
return self.error_response(f"Failed to get user ID: {next_id_resp.status_code}")
|
|
272
|
+
|
|
273
|
+
user_id = next_id_resp.json()["User"]["user_id"]
|
|
274
|
+
|
|
275
|
+
# Build user data
|
|
276
|
+
user_data = {
|
|
277
|
+
"User": {
|
|
278
|
+
"user_id": str(user_id),
|
|
279
|
+
"user_group_id": {"id": 1},
|
|
280
|
+
"start_datetime": "2001-01-01T00:00:00.00Z",
|
|
281
|
+
"expiry_datetime": "2030-12-31T23:59:00.00Z",
|
|
282
|
+
"name": input_data.name
|
|
283
|
+
}
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
# Add optional fields (passed as-is from AI/user)
|
|
287
|
+
if input_data.email:
|
|
288
|
+
user_data["User"]["email"] = input_data.email
|
|
289
|
+
if input_data.department:
|
|
290
|
+
user_data["User"]["department"] = input_data.department
|
|
291
|
+
if input_data.phone:
|
|
292
|
+
user_data["User"]["phone"] = input_data.phone
|
|
293
|
+
if input_data.user_title:
|
|
294
|
+
user_data["User"]["user_title"] = input_data.user_title
|
|
295
|
+
if input_data.disabled is not None:
|
|
296
|
+
user_data["User"]["disabled"] = input_data.disabled
|
|
297
|
+
if input_data.access_groups:
|
|
298
|
+
user_data["User"]["access_groups"] = input_data.access_groups
|
|
299
|
+
if input_data.pin is not None:
|
|
300
|
+
user_data["User"]["pin"] = input_data.pin
|
|
301
|
+
|
|
302
|
+
# Create user
|
|
303
|
+
create_resp = await client.post(
|
|
304
|
+
f"{self.session.config.biostar_url}/api/users",
|
|
305
|
+
headers=headers,
|
|
306
|
+
json=user_data
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
if create_resp.status_code != 200:
|
|
310
|
+
return self.error_response(
|
|
311
|
+
f"Failed to create user: {create_resp.status_code}, {create_resp.text}"
|
|
312
|
+
)
|
|
313
|
+
|
|
314
|
+
return self.success_response({
|
|
315
|
+
"message": f"User {input_data.name} created successfully with ID {user_id}",
|
|
316
|
+
"user_id": user_id,
|
|
317
|
+
"name": input_data.name
|
|
318
|
+
})
|
|
319
|
+
|
|
320
|
+
except Exception as e:
|
|
321
|
+
return await self.handle_api_error(e)
|
|
322
|
+
|
|
323
|
+
async def get_user(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
324
|
+
"""Get specific user details."""
|
|
325
|
+
try:
|
|
326
|
+
self.check_auth()
|
|
327
|
+
|
|
328
|
+
user_id = args.get("user_id")
|
|
329
|
+
if not user_id:
|
|
330
|
+
return self.error_response("user_id is required")
|
|
331
|
+
|
|
332
|
+
headers = {
|
|
333
|
+
"bs-session-id": self.get_session_id(),
|
|
334
|
+
"Content-Type": "application/json"
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
338
|
+
response = await client.get(
|
|
339
|
+
f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
340
|
+
headers=headers
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
if response.status_code != 200:
|
|
344
|
+
return self.error_response(f"Failed to get user: {response.status_code}")
|
|
345
|
+
|
|
346
|
+
user_data = response.json().get("User", {})
|
|
347
|
+
|
|
348
|
+
return self.success_response({
|
|
349
|
+
"user": user_data
|
|
350
|
+
})
|
|
351
|
+
|
|
352
|
+
except Exception as e:
|
|
353
|
+
return await self.handle_api_error(e)
|
|
354
|
+
|
|
355
|
+
async def update_user(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
356
|
+
"""Update an existing user (preserve current group/period unless explicitly changed)."""
|
|
357
|
+
try:
|
|
358
|
+
self.check_auth()
|
|
359
|
+
|
|
360
|
+
# ① Validate
|
|
361
|
+
try:
|
|
362
|
+
input_data = UpdateUserInput(**args)
|
|
363
|
+
except ValidationError as e:
|
|
364
|
+
return self.handle_validation_error(e)
|
|
365
|
+
|
|
366
|
+
headers = {
|
|
367
|
+
"bs-session-id": self.get_session_id(),
|
|
368
|
+
"Content-Type": "application/json"
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
provided = getattr(input_data, "model_fields_set", None) or getattr(input_data, "__fields_set__", set())
|
|
372
|
+
def is_provided(field: str) -> bool:
|
|
373
|
+
return field in provided
|
|
374
|
+
|
|
375
|
+
user_id = input_data.user_id
|
|
376
|
+
if not user_id and input_data.name:
|
|
377
|
+
users = await search_users_by_name(self.get_session_id(), input_data.name)
|
|
378
|
+
if not users:
|
|
379
|
+
return self.error_response(f"No user found with name '{input_data.name}'")
|
|
380
|
+
if len(users) > 1:
|
|
381
|
+
return self.error_response(f"Multiple users found with name '{input_data.name}'. Please specify user_id.")
|
|
382
|
+
user_id = users[0]["user_id"]
|
|
383
|
+
if not user_id:
|
|
384
|
+
return self.error_response("Either user_id or name is required")
|
|
385
|
+
user_id = str(user_id)
|
|
386
|
+
|
|
387
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
388
|
+
cur_resp = await client.get(f"{self.session.config.biostar_url}/api/users/{user_id}", headers=headers)
|
|
389
|
+
if cur_resp.status_code != 200:
|
|
390
|
+
return self.error_response(f"Failed to get user: {cur_resp.status_code} - {cur_resp.text}")
|
|
391
|
+
|
|
392
|
+
cur_user = (cur_resp.json() or {}).get("User", {}) or {}
|
|
393
|
+
|
|
394
|
+
def _to_iso(val: Any, fallback: str) -> str:
|
|
395
|
+
if isinstance(val, str) and val:
|
|
396
|
+
return val
|
|
397
|
+
if isinstance(val, (int, float)) and val > 0:
|
|
398
|
+
from datetime import datetime, timezone
|
|
399
|
+
try:
|
|
400
|
+
return datetime.fromtimestamp(val/1000.0, tz=timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
|
401
|
+
except Exception:
|
|
402
|
+
pass
|
|
403
|
+
return fallback
|
|
404
|
+
|
|
405
|
+
cur_gid = None
|
|
406
|
+
ug = cur_user.get("user_group_id")
|
|
407
|
+
if isinstance(ug, dict) and ug.get("id") is not None:
|
|
408
|
+
try:
|
|
409
|
+
cur_gid = int(str(ug["id"]))
|
|
410
|
+
except Exception:
|
|
411
|
+
cur_gid = None
|
|
412
|
+
|
|
413
|
+
if is_provided("user_group_id"):
|
|
414
|
+
try:
|
|
415
|
+
gid = int(input_data.user_group_id) if input_data.user_group_id is not None else None
|
|
416
|
+
except Exception:
|
|
417
|
+
return self.error_response("Invalid 'user_group_id'. Must be numeric.")
|
|
418
|
+
if gid is None:
|
|
419
|
+
return self.error_response("'user_group_id' cannot be null when provided.")
|
|
420
|
+
else:
|
|
421
|
+
if cur_gid is None:
|
|
422
|
+
return self.error_response("Current user_group_id missing on server; cannot preserve safely.")
|
|
423
|
+
gid = cur_gid
|
|
424
|
+
|
|
425
|
+
# 날짜 처리: start_date/end_date alias 지원 및 flexible format 파싱
|
|
426
|
+
start_val = None
|
|
427
|
+
expiry_val = None
|
|
428
|
+
|
|
429
|
+
# start_datetime 또는 start_date 처리
|
|
430
|
+
if is_provided("start_datetime") and input_data.start_datetime:
|
|
431
|
+
start_val = parse_flexible_date(input_data.start_datetime, is_start=True)
|
|
432
|
+
elif is_provided("start_date") and input_data.start_date:
|
|
433
|
+
start_val = parse_flexible_date(input_data.start_date, is_start=True)
|
|
434
|
+
else:
|
|
435
|
+
start_val = _to_iso(cur_user.get("start_datetime"), "2001-01-01T00:00:00.00Z")
|
|
436
|
+
|
|
437
|
+
# expiry_datetime 또는 end_date 처리
|
|
438
|
+
if is_provided("expiry_datetime") and input_data.expiry_datetime:
|
|
439
|
+
expiry_val = parse_flexible_date(input_data.expiry_datetime, is_start=False)
|
|
440
|
+
elif is_provided("end_date") and input_data.end_date:
|
|
441
|
+
expiry_val = parse_flexible_date(input_data.end_date, is_start=False)
|
|
442
|
+
else:
|
|
443
|
+
expiry_val = _to_iso(cur_user.get("expiry_datetime"), "2030-12-31T23:59:00.00Z")
|
|
444
|
+
|
|
445
|
+
user_body: Dict[str, Any] = {
|
|
446
|
+
"name": (input_data.new_name or cur_user.get("name") or input_data.name or ""),
|
|
447
|
+
"user_group_id": {"id": gid},
|
|
448
|
+
"start_datetime": start_val,
|
|
449
|
+
"expiry_datetime": expiry_val,
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
if is_provided("email"):
|
|
453
|
+
user_body["email"] = input_data.email
|
|
454
|
+
if is_provided("department"):
|
|
455
|
+
user_body["department"] = input_data.department
|
|
456
|
+
if is_provided("phone"):
|
|
457
|
+
user_body["phone"] = input_data.phone
|
|
458
|
+
if is_provided("user_title"):
|
|
459
|
+
user_body["user_title"] = input_data.user_title
|
|
460
|
+
if is_provided("disabled"):
|
|
461
|
+
user_body["disabled"] = input_data.disabled
|
|
462
|
+
if is_provided("pin"):
|
|
463
|
+
user_body["pin"] = input_data.pin
|
|
464
|
+
|
|
465
|
+
# 액세스 그룹 처리: ID 또는 이름으로 지정 가능
|
|
466
|
+
if is_provided("access_groups") and input_data.access_groups is not None:
|
|
467
|
+
user_body["access_groups"] = [{"id": str(g)} for g in input_data.access_groups]
|
|
468
|
+
elif is_provided("access_group_names") and input_data.access_group_names is not None:
|
|
469
|
+
# 액세스 그룹 이름으로 ID 찾기
|
|
470
|
+
async with httpx.AsyncClient(verify=False, timeout=30) as client:
|
|
471
|
+
ag_resp = await client.post(
|
|
472
|
+
f"{self.session.config.biostar_url}/api/v2/access_groups",
|
|
473
|
+
headers=headers,
|
|
474
|
+
json={"limit": 0, "order_by": "id:false"}
|
|
475
|
+
)
|
|
476
|
+
|
|
477
|
+
if ag_resp.status_code == 200:
|
|
478
|
+
ag_data = ag_resp.json()
|
|
479
|
+
ag_rows = ((ag_data.get("AccessGroupCollection") or {}).get("rows") or [])
|
|
480
|
+
ag_name_to_obj = {}
|
|
481
|
+
for ag in ag_rows:
|
|
482
|
+
name = str(ag.get("name") or "").strip()
|
|
483
|
+
ag_id = ag.get("id")
|
|
484
|
+
if name and ag_id is not None:
|
|
485
|
+
ag_name_to_obj[name.lower()] = {"id": ag_id, "name": name}
|
|
486
|
+
|
|
487
|
+
# 이름으로 ID 찾기
|
|
488
|
+
access_groups = []
|
|
489
|
+
not_found = []
|
|
490
|
+
for ag_name in input_data.access_group_names:
|
|
491
|
+
ag_name_lower = ag_name.strip().lower()
|
|
492
|
+
if ag_name_lower in ag_name_to_obj:
|
|
493
|
+
ag_obj = ag_name_to_obj[ag_name_lower]
|
|
494
|
+
access_groups.append({"id": str(ag_obj["id"]), "name": ag_obj["name"]})
|
|
495
|
+
else:
|
|
496
|
+
not_found.append(ag_name)
|
|
497
|
+
|
|
498
|
+
if not_found:
|
|
499
|
+
return self.error_response(
|
|
500
|
+
f"액세스 그룹을 찾을 수 없습니다: {', '.join(not_found)}",
|
|
501
|
+
{"available_groups": [obj["name"] for obj in ag_name_to_obj.values()]}
|
|
502
|
+
)
|
|
503
|
+
|
|
504
|
+
user_body["access_groups"] = access_groups
|
|
505
|
+
else:
|
|
506
|
+
return self.error_response(f"Failed to fetch access groups: {ag_resp.status_code}")
|
|
507
|
+
|
|
508
|
+
payload = {"User": user_body}
|
|
509
|
+
|
|
510
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
511
|
+
response = await client.put(
|
|
512
|
+
f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
513
|
+
headers=headers,
|
|
514
|
+
json=payload
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
if response.status_code == 200:
|
|
518
|
+
# Verify the update by fetching the user again
|
|
519
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
520
|
+
verify_resp = await client.get(
|
|
521
|
+
f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
522
|
+
headers=headers
|
|
523
|
+
)
|
|
524
|
+
|
|
525
|
+
updated_user = {}
|
|
526
|
+
if verify_resp.status_code == 200:
|
|
527
|
+
updated_user = (verify_resp.json() or {}).get("User", {}) or {}
|
|
528
|
+
|
|
529
|
+
return self.success_response({
|
|
530
|
+
"message": f" User {user_id} updated successfully and verified",
|
|
531
|
+
"user_id": user_id,
|
|
532
|
+
"before": {
|
|
533
|
+
"name": cur_user.get("name"),
|
|
534
|
+
"user_group": cur_user.get("user_group_id", {}).get("name") if isinstance(cur_user.get("user_group_id"), dict) else None,
|
|
535
|
+
"email": cur_user.get("email"),
|
|
536
|
+
"department": cur_user.get("department"),
|
|
537
|
+
"phone": cur_user.get("phone"),
|
|
538
|
+
},
|
|
539
|
+
"after": {
|
|
540
|
+
"name": updated_user.get("name"),
|
|
541
|
+
"user_group": updated_user.get("user_group_id", {}).get("name") if isinstance(updated_user.get("user_group_id"), dict) else None,
|
|
542
|
+
"email": updated_user.get("email"),
|
|
543
|
+
"department": updated_user.get("department"),
|
|
544
|
+
"phone": updated_user.get("phone"),
|
|
545
|
+
},
|
|
546
|
+
"request_body": payload
|
|
547
|
+
})
|
|
548
|
+
else:
|
|
549
|
+
return self.error_response(
|
|
550
|
+
f"Failed to update user: {response.status_code} - {response.text}",
|
|
551
|
+
{"request_body": payload}
|
|
552
|
+
)
|
|
553
|
+
|
|
554
|
+
except Exception as e:
|
|
555
|
+
return await self.handle_api_error(e)
|
|
556
|
+
|
|
557
|
+
async def delete_user(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
558
|
+
"""Delete one or more users."""
|
|
559
|
+
try:
|
|
560
|
+
self.check_auth()
|
|
561
|
+
|
|
562
|
+
# Validate input
|
|
563
|
+
try:
|
|
564
|
+
input_data = DeleteUserInput(**args)
|
|
565
|
+
except ValidationError as e:
|
|
566
|
+
return self.handle_validation_error(e)
|
|
567
|
+
|
|
568
|
+
headers = {
|
|
569
|
+
"bs-session-id": self.get_session_id(),
|
|
570
|
+
"Content-Type": "application/json"
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
deleted_users = []
|
|
574
|
+
failed_users = []
|
|
575
|
+
|
|
576
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
577
|
+
if not input_data.id and input_data.name:
|
|
578
|
+
# Search by name
|
|
579
|
+
users = await search_users_by_name(self.get_session_id(), input_data.name)
|
|
580
|
+
if not users:
|
|
581
|
+
return self.error_response(f"No users found with name '{input_data.name}'")
|
|
582
|
+
|
|
583
|
+
# CRITICAL SAFETY CHECK: If multiple users matched, require explicit confirmation
|
|
584
|
+
if len(users) > 1 and not input_data.confirm_multiple:
|
|
585
|
+
return self.error_response(
|
|
586
|
+
f" SAFETY CHECK: Found {len(users)} users matching '{input_data.name}'. "
|
|
587
|
+
f"To delete multiple users, please confirm by setting 'confirm_multiple=true'.",
|
|
588
|
+
{
|
|
589
|
+
"matched_users": users,
|
|
590
|
+
"count": len(users),
|
|
591
|
+
"requires_confirmation": True,
|
|
592
|
+
"instruction": "Add 'confirm_multiple=true' to your request to proceed with deletion."
|
|
593
|
+
}
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
user_ids = [user["user_id"] for user in users]
|
|
597
|
+
user_names = {user["user_id"]: user["name"] for user in users}
|
|
598
|
+
|
|
599
|
+
delete_param = "+".join(str(uid) for uid in user_ids)
|
|
600
|
+
response = await client.delete(
|
|
601
|
+
f"{self.session.config.biostar_url}/api/users",
|
|
602
|
+
headers=headers,
|
|
603
|
+
params={"id": delete_param, "group_id": "1"}
|
|
604
|
+
)
|
|
605
|
+
|
|
606
|
+
if response.status_code == 200:
|
|
607
|
+
deleted_users = [{"user_id": uid, "name": user_names[uid]} for uid in user_ids]
|
|
608
|
+
else:
|
|
609
|
+
failed_users = [
|
|
610
|
+
{"user_id": uid, "name": user_names[uid], "reason": response.text}
|
|
611
|
+
for uid in user_ids
|
|
612
|
+
]
|
|
613
|
+
|
|
614
|
+
elif input_data.id:
|
|
615
|
+
# Delete by ID
|
|
616
|
+
id_list = input_data.id if isinstance(input_data.id, list) else [input_data.id]
|
|
617
|
+
delete_param = "+".join(str(uid) for uid in id_list)
|
|
618
|
+
|
|
619
|
+
response = await client.delete(
|
|
620
|
+
f"{self.session.config.biostar_url}/api/users",
|
|
621
|
+
headers=headers,
|
|
622
|
+
params={"id": delete_param, "group_id": "1"}
|
|
623
|
+
)
|
|
624
|
+
|
|
625
|
+
if response.status_code == 200:
|
|
626
|
+
deleted_users = [{"user_id": uid} for uid in id_list]
|
|
627
|
+
else:
|
|
628
|
+
failed_users = [{"user_id": uid, "reason": response.text} for uid in id_list]
|
|
629
|
+
|
|
630
|
+
else:
|
|
631
|
+
return self.error_response("Either id or name must be provided")
|
|
632
|
+
|
|
633
|
+
if deleted_users:
|
|
634
|
+
return self.success_response({
|
|
635
|
+
"message": f"Successfully deleted {len(deleted_users)} user(s)",
|
|
636
|
+
"deleted_users": deleted_users,
|
|
637
|
+
"failed_users": failed_users
|
|
638
|
+
})
|
|
639
|
+
else:
|
|
640
|
+
return self.error_response("Failed to delete users", {"failed_users": failed_users})
|
|
641
|
+
|
|
642
|
+
except Exception as e:
|
|
643
|
+
return await self.handle_api_error(e)
|
|
644
|
+
|
|
645
|
+
async def search_users(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
646
|
+
"""Search for users using search_text or specific criteria."""
|
|
647
|
+
try:
|
|
648
|
+
self.check_auth()
|
|
649
|
+
|
|
650
|
+
search_text = args.get("search_text")
|
|
651
|
+
limit = args.get("limit", 50)
|
|
652
|
+
user_group_id = args.get("user_group_id")
|
|
653
|
+
order_by = args.get("order_by", "user_id:false")
|
|
654
|
+
|
|
655
|
+
if not search_text:
|
|
656
|
+
return self.error_response("search_text is required")
|
|
657
|
+
|
|
658
|
+
headers = {
|
|
659
|
+
"bs-session-id": self.get_session_id(),
|
|
660
|
+
"Content-Type": "application/json"
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
# Build payload for search API (different from advance_search)
|
|
664
|
+
payload = {
|
|
665
|
+
"limit": limit,
|
|
666
|
+
"search_text": search_text
|
|
667
|
+
}
|
|
668
|
+
|
|
669
|
+
if user_group_id:
|
|
670
|
+
payload["user_group_id"] = user_group_id
|
|
671
|
+
|
|
672
|
+
if order_by:
|
|
673
|
+
payload["order_by"] = order_by
|
|
674
|
+
|
|
675
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
676
|
+
response = await client.post(
|
|
677
|
+
f"{self.session.config.biostar_url}/api/v2/users/search",
|
|
678
|
+
headers=headers,
|
|
679
|
+
json=payload
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
if response.status_code != 200:
|
|
683
|
+
return self.error_response(f"Search failed: {response.status_code} - {response.text}")
|
|
684
|
+
|
|
685
|
+
data = response.json()
|
|
686
|
+
users = data.get("UserCollection", {}).get("rows", [])
|
|
687
|
+
|
|
688
|
+
if not users:
|
|
689
|
+
return self.success_response({
|
|
690
|
+
"message": f"No users found matching '{search_text}'",
|
|
691
|
+
"users": [],
|
|
692
|
+
"total": 0,
|
|
693
|
+
"search_text": search_text
|
|
694
|
+
})
|
|
695
|
+
|
|
696
|
+
# Extract key user information
|
|
697
|
+
user_list = []
|
|
698
|
+
for user in users:
|
|
699
|
+
user_info = {
|
|
700
|
+
"user_id": user.get("user_id"),
|
|
701
|
+
"name": user.get("name"),
|
|
702
|
+
"email": user.get("email"),
|
|
703
|
+
"department": user.get("department"),
|
|
704
|
+
"user_title": user.get("user_title"),
|
|
705
|
+
"phone": user.get("phone"),
|
|
706
|
+
"disabled": user.get("disabled"),
|
|
707
|
+
"user_group_id": user.get("user_group_id"),
|
|
708
|
+
"access_groups": user.get("access_groups"),
|
|
709
|
+
"start_datetime": user.get("start_datetime"),
|
|
710
|
+
"expiry_datetime": user.get("expiry_datetime"),
|
|
711
|
+
"operator_level": user.get("operator_level"),
|
|
712
|
+
"fingerprint_template_count": user.get("fingerprint_template_count"),
|
|
713
|
+
"face_count": user.get("face_count"),
|
|
714
|
+
"card_count": user.get("card_count"),
|
|
715
|
+
"have_pin": user.get("have_pin"),
|
|
716
|
+
"qr_count": user.get("qr_count"),
|
|
717
|
+
"mobile_count": user.get("mobile_count")
|
|
718
|
+
}
|
|
719
|
+
user_list.append(user_info)
|
|
720
|
+
|
|
721
|
+
return self.success_response({
|
|
722
|
+
"message": f"Found {len(users)} user(s) matching '{search_text}'",
|
|
723
|
+
"users": user_list,
|
|
724
|
+
"total": len(users),
|
|
725
|
+
"search_text": search_text,
|
|
726
|
+
"search_criteria": payload
|
|
727
|
+
})
|
|
728
|
+
|
|
729
|
+
except Exception as e:
|
|
730
|
+
return await self.handle_api_error(e)
|
|
731
|
+
|
|
732
|
+
async def advanced_search_users(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
733
|
+
"""
|
|
734
|
+
Pass-through advanced search to BioStar 2.
|
|
735
|
+
- Do not add SQL wildcards or normalize fields.
|
|
736
|
+
- Let the API perform partial matching.
|
|
737
|
+
- Enforce operator level and offset types.
|
|
738
|
+
Defaults:
|
|
739
|
+
- user_operator_level_id = "0"
|
|
740
|
+
- limit = 50
|
|
741
|
+
- offset = "0" (string per API spec)
|
|
742
|
+
"""
|
|
743
|
+
try:
|
|
744
|
+
self.check_auth()
|
|
745
|
+
|
|
746
|
+
raw_args: Dict[str, Any] = dict(args)
|
|
747
|
+
|
|
748
|
+
# Coerce offset int->str per API spec (offset must be string)
|
|
749
|
+
if "offset" in raw_args and isinstance(raw_args["offset"], int):
|
|
750
|
+
raw_args["offset"] = str(raw_args["offset"])
|
|
751
|
+
|
|
752
|
+
# Validate with Pydantic schema
|
|
753
|
+
try:
|
|
754
|
+
input_data = AdvancedSearchUserInput(**raw_args)
|
|
755
|
+
except ValidationError as e:
|
|
756
|
+
return self.handle_validation_error(e)
|
|
757
|
+
|
|
758
|
+
# Build payload excluding None
|
|
759
|
+
payload: Dict[str, Any] = {k: v for k, v in input_data.model_dump().items() if v is not None}
|
|
760
|
+
|
|
761
|
+
# Force critical defaults (caller value takes precedence if provided)
|
|
762
|
+
payload["user_operator_level_id"] = str(args.get("user_operator_level_id", "0"))
|
|
763
|
+
payload["limit"] = int(args.get("limit", payload.get("limit", 50)))
|
|
764
|
+
payload["offset"] = str(args.get("offset", payload.get("offset", "0")))
|
|
765
|
+
|
|
766
|
+
# Do not inject order_by unless explicitly provided by caller
|
|
767
|
+
if "order_by" not in args:
|
|
768
|
+
payload.pop("order_by", None)
|
|
769
|
+
|
|
770
|
+
# Align to API spec: if lists slip in, convert to comma-delimited strings
|
|
771
|
+
if "user_group_ids" in payload and isinstance(payload["user_group_ids"], list):
|
|
772
|
+
payload["user_group_ids"] = ",".join(str(x) for x in payload["user_group_ids"])
|
|
773
|
+
if "user_access_group_ids" in payload and isinstance(payload["user_access_group_ids"], list):
|
|
774
|
+
payload["user_access_group_ids"] = ",".join(str(x) for x in payload["user_access_group_ids"])
|
|
775
|
+
|
|
776
|
+
headers = {
|
|
777
|
+
"bs-session-id": self.get_session_id(),
|
|
778
|
+
"Content-Type": "application/json",
|
|
779
|
+
}
|
|
780
|
+
|
|
781
|
+
endpoint = f"{self.session.config.biostar_url}/api/v2/users/advance_search"
|
|
782
|
+
|
|
783
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
784
|
+
response = await client.post(endpoint, headers=headers, json=payload)
|
|
785
|
+
|
|
786
|
+
if response.status_code != 200:
|
|
787
|
+
return self.error_response(f"Advanced search failed: {response.status_code} - {response.text}")
|
|
788
|
+
data = response.json()
|
|
789
|
+
users = data.get("UserCollection", {}).get("rows", [])
|
|
790
|
+
if not users:
|
|
791
|
+
return self.success_response({
|
|
792
|
+
"message": "No users found matching the search criteria",
|
|
793
|
+
"users": [],
|
|
794
|
+
"total": 0,
|
|
795
|
+
"search_criteria": payload
|
|
796
|
+
})
|
|
797
|
+
user_list = []
|
|
798
|
+
for user in users:
|
|
799
|
+
user_info = {
|
|
800
|
+
"user_id": user.get("user_id"),
|
|
801
|
+
"name": user.get("name"),
|
|
802
|
+
"email": user.get("email"),
|
|
803
|
+
"department": user.get("department"),
|
|
804
|
+
"user_title": user.get("user_title"),
|
|
805
|
+
"phone": user.get("phone"),
|
|
806
|
+
"disabled": user.get("disabled"),
|
|
807
|
+
"user_group_id": user.get("user_group_id"),
|
|
808
|
+
"access_groups": user.get("access_groups"),
|
|
809
|
+
"start_datetime": user.get("start_datetime"),
|
|
810
|
+
"expiry_datetime": user.get("expiry_datetime"),
|
|
811
|
+
"operator_level": user.get("operator_level"),
|
|
812
|
+
"fingerprint_template_count": user.get("fingerprint_template_count"),
|
|
813
|
+
"face_count": user.get("face_count"),
|
|
814
|
+
"card_count": user.get("card_count"),
|
|
815
|
+
"have_pin": user.get("have_pin"),
|
|
816
|
+
"qr_count": user.get("qr_count"),
|
|
817
|
+
"mobile_count": user.get("mobile_count")
|
|
818
|
+
}
|
|
819
|
+
user_list.append(user_info)
|
|
820
|
+
|
|
821
|
+
return self.success_response({
|
|
822
|
+
"message": f"Found {len(users)} user(s) matching the search criteria",
|
|
823
|
+
"users": user_list,
|
|
824
|
+
"total": len(users),
|
|
825
|
+
"search_criteria": payload
|
|
826
|
+
})
|
|
827
|
+
|
|
828
|
+
except Exception as e:
|
|
829
|
+
return await self.handle_api_error(e)
|
|
830
|
+
|
|
831
|
+
async def _resolve_user_group_by_name(
|
|
832
|
+
self,
|
|
833
|
+
headers: Dict[str, str],
|
|
834
|
+
query: str,
|
|
835
|
+
) -> Dict[str, Any]:
|
|
836
|
+
"""
|
|
837
|
+
Resolve a single user group by substring name matching.
|
|
838
|
+
|
|
839
|
+
Rules:
|
|
840
|
+
- 0 matches -> {"status":"no_match", "needs_selection":True, ...} with the full list
|
|
841
|
+
- 1 match -> {"status":"ok", "id":..., "name":...}
|
|
842
|
+
- >=2 matches -> {"status":"ambiguous", "needs_selection":True, "candidates":[...]}
|
|
843
|
+
(Never auto-pick even if an exact match is included.)
|
|
844
|
+
"""
|
|
845
|
+
rows = await self._list_user_groups_rows(headers)
|
|
846
|
+
flat = self._flatten_groups_basic(rows)
|
|
847
|
+
|
|
848
|
+
def _norm(s: Any) -> str:
|
|
849
|
+
# normalize: trim, collapse spaces, lowercase
|
|
850
|
+
return " ".join(str(s or "").strip().lower().split())
|
|
851
|
+
|
|
852
|
+
q = _norm(query)
|
|
853
|
+
# substring match (LIKE %query%)
|
|
854
|
+
matches = [g for g in flat if q and q in _norm(g.get("name"))]
|
|
855
|
+
|
|
856
|
+
if len(matches) == 0:
|
|
857
|
+
return {
|
|
858
|
+
"status": "no_match",
|
|
859
|
+
"needs_selection": True,
|
|
860
|
+
"message": f"No user group matched '{query}'. Please pick one below.",
|
|
861
|
+
"prompt": "Which user group?",
|
|
862
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat],
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
if len(matches) > 1:
|
|
866
|
+
return {
|
|
867
|
+
"status": "ambiguous",
|
|
868
|
+
"needs_selection": True,
|
|
869
|
+
"message": f"Multiple user groups matched '{query}'. Which one?",
|
|
870
|
+
"prompt": "Which user group?",
|
|
871
|
+
"candidates": [{"id": g["id"], "name": g["name"]} for g in matches],
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
return {"status": "ok", "id": int(matches[0]["id"]), "name": matches[0]["name"]}
|
|
875
|
+
|
|
876
|
+
|
|
877
|
+
async def _resolve_user_groups_by_names(self, headers: Dict[str, str], names: List[str]) -> Dict[str, Any]:
|
|
878
|
+
"""
|
|
879
|
+
PRIVATE: Resolve multiple group names at once.
|
|
880
|
+
Returns:
|
|
881
|
+
- {"resolved":[{"query":<str>,"id":<int>,"name":<str>},...]} # all good
|
|
882
|
+
- {"needs_selection":True,"issues":[{ per-name selection payloads ... }]}
|
|
883
|
+
"""
|
|
884
|
+
issues: List[Dict[str, Any]] = []
|
|
885
|
+
resolved: List[Dict[str, Any]] = []
|
|
886
|
+
for q in names:
|
|
887
|
+
r = await self._resolve_user_group_by_name(headers, q)
|
|
888
|
+
if r.get("status") == "ok":
|
|
889
|
+
resolved.append({"query": q, "id": r["id"], "name": r.get("name")})
|
|
890
|
+
else:
|
|
891
|
+
item = {"query": q}
|
|
892
|
+
item.update({k: v for k, v in r.items() if k != "status"})
|
|
893
|
+
issues.append(item)
|
|
894
|
+
if issues:
|
|
895
|
+
return {"needs_selection": True, "issues": issues}
|
|
896
|
+
return {"resolved": resolved}
|
|
897
|
+
|
|
898
|
+
async def bulk_add_users(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
899
|
+
"""
|
|
900
|
+
Add multiple users at once.
|
|
901
|
+
|
|
902
|
+
Supports:
|
|
903
|
+
- user group assignment by id or name (0/1/many name resolution)
|
|
904
|
+
- multiple groups with round-robin or explicit counts
|
|
905
|
+
- naming controls: suffix width, start number, separator, or a free template
|
|
906
|
+
|
|
907
|
+
Disambiguation hard-stop:
|
|
908
|
+
- If name-based resolution is ambiguous or not found, return error with
|
|
909
|
+
candidates and a selection_token. The next call MUST include:
|
|
910
|
+
* selection_token (from the error response)
|
|
911
|
+
* user_group_id chosen by the user
|
|
912
|
+
- If a pending selection_token exists, passing user_group_id without
|
|
913
|
+
the token is rejected to prevent the agent from auto-picking.
|
|
914
|
+
|
|
915
|
+
Optional args:
|
|
916
|
+
- user_group_id: int|str
|
|
917
|
+
- user_group_name: str
|
|
918
|
+
- user_group_names: List[str]
|
|
919
|
+
- group_counts: List[int] (with user_group_names + distribution='by_counts')
|
|
920
|
+
- distribution: 'round_robin' | 'by_counts' (default: round_robin)
|
|
921
|
+
- group_allocations: List[{'group_id'| 'group_name', 'count'}], counts must sum to 'count'
|
|
922
|
+
- suffix_width: int (default 3)
|
|
923
|
+
- suffix_start: int (default 1)
|
|
924
|
+
- name_separator: str (default "")
|
|
925
|
+
- name_template: str (format string using {base}, {seq}, {user_id}; e.g., "홍길동 {seq:03d}")
|
|
926
|
+
- selection_token: str (required ONLY when previous call returned ambiguity)
|
|
927
|
+
"""
|
|
928
|
+
try:
|
|
929
|
+
from uuid import uuid4
|
|
930
|
+
|
|
931
|
+
self.check_auth()
|
|
932
|
+
|
|
933
|
+
# ---------- small helpers for selection-token gating ----------
|
|
934
|
+
def _get_state() -> Dict[str, Any]:
|
|
935
|
+
st = getattr(self.session, "state", None)
|
|
936
|
+
if st is None:
|
|
937
|
+
self.session.state = {}
|
|
938
|
+
st = self.session.state
|
|
939
|
+
return st
|
|
940
|
+
|
|
941
|
+
def _set_pending_token(candidates: List[Dict[str, Any]]) -> str:
|
|
942
|
+
token = uuid4().hex
|
|
943
|
+
ids = {int(c["id"]) for c in candidates if isinstance(c, dict) and c.get("id") is not None}
|
|
944
|
+
st = _get_state()
|
|
945
|
+
st["bulk_add_pending"] = {"token": token, "ids": ids}
|
|
946
|
+
return token
|
|
947
|
+
|
|
948
|
+
def _has_pending_token() -> bool:
|
|
949
|
+
st = _get_state()
|
|
950
|
+
return bool(st.get("bulk_add_pending"))
|
|
951
|
+
|
|
952
|
+
def _consume_token_if_valid(token: str, selected_id: int) -> bool:
|
|
953
|
+
st = _get_state()
|
|
954
|
+
pending = st.get("bulk_add_pending")
|
|
955
|
+
if not pending:
|
|
956
|
+
return False
|
|
957
|
+
if pending.get("token") != token:
|
|
958
|
+
return False
|
|
959
|
+
ids: set = pending.get("ids") or set()
|
|
960
|
+
if int(selected_id) not in ids:
|
|
961
|
+
return False
|
|
962
|
+
# consume
|
|
963
|
+
st.pop("bulk_add_pending", None)
|
|
964
|
+
return True
|
|
965
|
+
|
|
966
|
+
# --- Extract grouping params before Pydantic validation (to avoid extra-field errors) ---
|
|
967
|
+
group_id_arg = args.get("user_group_id")
|
|
968
|
+
group_name_arg = args.get("user_group_name") or args.get("group_name")
|
|
969
|
+
group_names_arg = args.get("user_group_names") or args.get("group_names") # List[str]
|
|
970
|
+
group_counts_arg = args.get("group_counts") # List[int], optional
|
|
971
|
+
group_allocations_arg = args.get("group_allocations") # [{"group_id" or "group_name", "count"}...]
|
|
972
|
+
distribution_mode = (args.get("distribution") or "round_robin").strip().lower() # "round_robin" | "by_counts"
|
|
973
|
+
selection_token_in = (args.get("selection_token") or "").strip()
|
|
974
|
+
|
|
975
|
+
# --- Validate base_name/count using Pydantic as-is ---
|
|
976
|
+
try:
|
|
977
|
+
input_data = BulkAddUsersInput(**{k: v for k, v in args.items() if k in {"base_name", "count"}})
|
|
978
|
+
except ValidationError as e:
|
|
979
|
+
return self.handle_validation_error(e)
|
|
980
|
+
|
|
981
|
+
if input_data.count > 30:
|
|
982
|
+
return self.error_response(f"Maximum 30 users can be created at once. Requested: {input_data.count}")
|
|
983
|
+
|
|
984
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
985
|
+
|
|
986
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
987
|
+
# Get starting user ID from server
|
|
988
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/users/next_user_id", headers=headers)
|
|
989
|
+
if resp.status_code != 200:
|
|
990
|
+
return self.error_response(f"Failed to get user ID: {resp.status_code} - {resp.text}")
|
|
991
|
+
start_id = int(resp.json()["User"]["user_id"])
|
|
992
|
+
|
|
993
|
+
# --- Resolve target group ids for each user based on provided args ---
|
|
994
|
+
group_ids_for_each: List[int] = []
|
|
995
|
+
|
|
996
|
+
# Case A: explicit allocation objects [{"group_id":..|"group_name":.., "count": N}, ...]
|
|
997
|
+
if isinstance(group_allocations_arg, list) and group_allocations_arg:
|
|
998
|
+
total = 0
|
|
999
|
+
resolved_allocs: List[Tuple[int, int, str]] = [] # (gid, count, display_name)
|
|
1000
|
+
for alloc in group_allocations_arg:
|
|
1001
|
+
if not isinstance(alloc, dict) or "count" not in alloc:
|
|
1002
|
+
return self.error_response(
|
|
1003
|
+
"Each item in 'group_allocations' must be an object containing 'count' and either 'group_id' or 'group_name'."
|
|
1004
|
+
)
|
|
1005
|
+
try:
|
|
1006
|
+
c = int(alloc["count"])
|
|
1007
|
+
except Exception:
|
|
1008
|
+
return self.error_response("Each 'count' in 'group_allocations' must be an integer.")
|
|
1009
|
+
if c < 0:
|
|
1010
|
+
return self.error_response("Counts in 'group_allocations' must be >= 0.")
|
|
1011
|
+
total += c
|
|
1012
|
+
|
|
1013
|
+
if "group_id" in alloc and alloc["group_id"] is not None:
|
|
1014
|
+
try:
|
|
1015
|
+
gid = int(str(alloc["group_id"]).strip())
|
|
1016
|
+
except Exception:
|
|
1017
|
+
return self.error_response("Invalid 'group_id' in 'group_allocations'.")
|
|
1018
|
+
# If a pending token exists, enforce passing token for safety
|
|
1019
|
+
if _has_pending_token():
|
|
1020
|
+
if not selection_token_in or not _consume_token_if_valid(selection_token_in, gid):
|
|
1021
|
+
return self.error_response(
|
|
1022
|
+
"Ambiguity previously detected. Provide a valid selection_token with a candidate group_id.",
|
|
1023
|
+
{"error_code": "SELECTION_TOKEN_REQUIRED", "needs_selection": True}
|
|
1024
|
+
)
|
|
1025
|
+
resolved_allocs.append((gid, c, f"id={gid}"))
|
|
1026
|
+
elif "group_name" in alloc and alloc["group_name"]:
|
|
1027
|
+
res = await self._resolve_user_group_by_name(headers, str(alloc["group_name"]).strip())
|
|
1028
|
+
if res.get("status") != "ok":
|
|
1029
|
+
# Create a selection token and hard-stop
|
|
1030
|
+
candidates = res.get("candidates") or res.get("user_groups") or []
|
|
1031
|
+
token = _set_pending_token(candidates)
|
|
1032
|
+
detail = {
|
|
1033
|
+
"error_code": "AMBIGUOUS_USER_GROUP" if res.get("status") == "ambiguous" else "USER_GROUP_NOT_FOUND",
|
|
1034
|
+
"needs_selection": True,
|
|
1035
|
+
"prompt": res.get("prompt", "Which user group?"),
|
|
1036
|
+
"candidates": [{"id": x.get("id"), "name": x.get("name")} for x in candidates],
|
|
1037
|
+
"selection_token": token,
|
|
1038
|
+
}
|
|
1039
|
+
return self.error_response("User group resolution failed for 'group_allocations'.", detail)
|
|
1040
|
+
gid = int(res["id"])
|
|
1041
|
+
resolved_allocs.append((gid, c, res.get("name") or f"id={gid}"))
|
|
1042
|
+
else:
|
|
1043
|
+
return self.error_response("Each 'group_allocations' item must provide 'group_id' or 'group_name'.")
|
|
1044
|
+
|
|
1045
|
+
if total != input_data.count:
|
|
1046
|
+
return self.error_response(
|
|
1047
|
+
f"Sum of 'group_allocations.count' must equal requested 'count' ({input_data.count}). Got {total}."
|
|
1048
|
+
)
|
|
1049
|
+
|
|
1050
|
+
# Expand into per-user assignment
|
|
1051
|
+
for (gid, c, _label) in resolved_allocs:
|
|
1052
|
+
group_ids_for_each.extend([gid] * c)
|
|
1053
|
+
|
|
1054
|
+
# Case B: single group by id/name for all
|
|
1055
|
+
elif group_id_arg is not None or group_name_arg:
|
|
1056
|
+
if group_id_arg is not None:
|
|
1057
|
+
try:
|
|
1058
|
+
single_gid = int(str(group_id_arg).strip())
|
|
1059
|
+
except Exception:
|
|
1060
|
+
return self.error_response("Invalid 'user_group_id'. It must be a number.")
|
|
1061
|
+
# If there is a pending ambiguity token, enforce token validation
|
|
1062
|
+
if _has_pending_token():
|
|
1063
|
+
if not selection_token_in or not _consume_token_if_valid(selection_token_in, single_gid):
|
|
1064
|
+
return self.error_response(
|
|
1065
|
+
"Ambiguity previously detected. Provide a valid selection_token with the chosen user_group_id.",
|
|
1066
|
+
{"error_code": "SELECTION_TOKEN_REQUIRED", "needs_selection": True}
|
|
1067
|
+
)
|
|
1068
|
+
group_ids_for_each = [single_gid] * input_data.count
|
|
1069
|
+
else:
|
|
1070
|
+
# Name-based resolution -> HARD STOP if ambiguous or not found
|
|
1071
|
+
res = await self._resolve_user_group_by_name(headers, str(group_name_arg).strip())
|
|
1072
|
+
if res.get("status") != "ok":
|
|
1073
|
+
candidates = res.get("candidates") or res.get("user_groups") or []
|
|
1074
|
+
token = _set_pending_token(candidates)
|
|
1075
|
+
detail = {
|
|
1076
|
+
"error_code": "AMBIGUOUS_USER_GROUP" if res.get("status") == "ambiguous" else "USER_GROUP_NOT_FOUND",
|
|
1077
|
+
"needs_selection": True,
|
|
1078
|
+
"prompt": res.get("prompt", "Which user group?"),
|
|
1079
|
+
"candidates": [{"id": x.get("id"), "name": x.get("name")} for x in candidates],
|
|
1080
|
+
"selection_token": token,
|
|
1081
|
+
}
|
|
1082
|
+
return self.error_response("Ambiguous or missing user group. Please choose explicitly.", detail)
|
|
1083
|
+
single_gid = int(res["id"])
|
|
1084
|
+
group_ids_for_each = [single_gid] * input_data.count
|
|
1085
|
+
|
|
1086
|
+
# Case C: multiple group names with distribution
|
|
1087
|
+
elif isinstance(group_names_arg, list) and group_names_arg:
|
|
1088
|
+
multi = await self._resolve_user_groups_by_names(headers, [str(x).strip() for x in group_names_arg])
|
|
1089
|
+
if multi.get("needs_selection"):
|
|
1090
|
+
# Any unresolved name -> create token and hard-stop
|
|
1091
|
+
# We gather all candidates to a single token for simplicity.
|
|
1092
|
+
issues = multi.get("issues", [])
|
|
1093
|
+
all_candidates: List[Dict[str, Any]] = []
|
|
1094
|
+
for it in issues:
|
|
1095
|
+
cands = (it.get("candidates") or it.get("user_groups") or [])
|
|
1096
|
+
for c in cands:
|
|
1097
|
+
if isinstance(c, dict) and "id" in c:
|
|
1098
|
+
all_candidates.append({"id": c["id"], "name": c.get("name")})
|
|
1099
|
+
token = _set_pending_token(all_candidates)
|
|
1100
|
+
return self.error_response(
|
|
1101
|
+
"One or more user group names require disambiguation.",
|
|
1102
|
+
{
|
|
1103
|
+
"error_code": "AMBIGUOUS_OR_NOT_FOUND",
|
|
1104
|
+
"needs_selection": True,
|
|
1105
|
+
"issues": issues,
|
|
1106
|
+
"selection_token": token,
|
|
1107
|
+
"prompt": "Resolve each group name to a specific id and call again."
|
|
1108
|
+
}
|
|
1109
|
+
)
|
|
1110
|
+
resolved = multi.get("resolved", [])
|
|
1111
|
+
gids = [int(x["id"]) for x in resolved]
|
|
1112
|
+
if not gids:
|
|
1113
|
+
return self.error_response("No valid user groups resolved from 'user_group_names'.")
|
|
1114
|
+
|
|
1115
|
+
if distribution_mode == "by_counts":
|
|
1116
|
+
if not isinstance(group_counts_arg, list) or len(group_counts_arg) != len(gids):
|
|
1117
|
+
return self.error_response(
|
|
1118
|
+
"'group_counts' must be provided and match the length of 'user_group_names' when distribution='by_counts'."
|
|
1119
|
+
)
|
|
1120
|
+
try:
|
|
1121
|
+
counts = [int(c) for c in group_counts_arg]
|
|
1122
|
+
except Exception:
|
|
1123
|
+
return self.error_response("All 'group_counts' must be integers.")
|
|
1124
|
+
if any(c < 0 for c in counts):
|
|
1125
|
+
return self.error_response("All 'group_counts' must be >= 0.")
|
|
1126
|
+
if sum(counts) != input_data.count:
|
|
1127
|
+
return self.error_response(
|
|
1128
|
+
f"Sum of 'group_counts' must equal requested 'count' ({input_data.count})."
|
|
1129
|
+
)
|
|
1130
|
+
# expand by counts order
|
|
1131
|
+
for gid, c in zip(gids, counts):
|
|
1132
|
+
group_ids_for_each.extend([gid] * c)
|
|
1133
|
+
else:
|
|
1134
|
+
# default round-robin
|
|
1135
|
+
for i in range(input_data.count):
|
|
1136
|
+
group_ids_for_each.append(gids[i % len(gids)])
|
|
1137
|
+
|
|
1138
|
+
# Case D: nothing provided → default to root (id=1)
|
|
1139
|
+
else:
|
|
1140
|
+
group_ids_for_each = [1] * input_data.count
|
|
1141
|
+
|
|
1142
|
+
# --- Naming options ---
|
|
1143
|
+
suffix_width = int(args.get("suffix_width", 3))
|
|
1144
|
+
suffix_start = int(args.get("suffix_start", 1))
|
|
1145
|
+
name_separator = args.get("name_separator", "")
|
|
1146
|
+
name_template = (args.get("name_template") or "").strip() or None
|
|
1147
|
+
base = input_data.base_name
|
|
1148
|
+
|
|
1149
|
+
# --- Build user rows with computed group assignments ---
|
|
1150
|
+
rows: List[Dict[str, Any]] = []
|
|
1151
|
+
for i in range(input_data.count):
|
|
1152
|
+
# seq is NOT the BioStar user_id; it's our logical counter (1..N)
|
|
1153
|
+
seq = suffix_start + i
|
|
1154
|
+
|
|
1155
|
+
if name_template:
|
|
1156
|
+
# Supports {base}, {seq}, {user_id} and formatting like {seq:03d}
|
|
1157
|
+
try:
|
|
1158
|
+
name = name_template.format(base=base, seq=seq, user_id=start_id + i)
|
|
1159
|
+
except Exception:
|
|
1160
|
+
# Fallback if template formatting fails
|
|
1161
|
+
name = f"{base}{name_separator}{str(seq).zfill(suffix_width)}"
|
|
1162
|
+
else:
|
|
1163
|
+
name = f"{base}{name_separator}{str(seq).zfill(suffix_width)}"
|
|
1164
|
+
|
|
1165
|
+
gid = int(group_ids_for_each[i])
|
|
1166
|
+
rows.append({
|
|
1167
|
+
"user_id": start_id + i,
|
|
1168
|
+
"name": name,
|
|
1169
|
+
"user_group_id": {"id": gid},
|
|
1170
|
+
"start_datetime": "2001-01-01T00:00:00.00Z",
|
|
1171
|
+
"expiry_datetime": "2030-12-31T23:59:00.00Z"
|
|
1172
|
+
})
|
|
1173
|
+
|
|
1174
|
+
payload = {"UserCollection": {"rows": rows}}
|
|
1175
|
+
|
|
1176
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1177
|
+
post_resp = await client.post(
|
|
1178
|
+
f"{self.session.config.biostar_url}/api/users",
|
|
1179
|
+
json=payload,
|
|
1180
|
+
headers=headers
|
|
1181
|
+
)
|
|
1182
|
+
|
|
1183
|
+
if post_resp.status_code == 200:
|
|
1184
|
+
created_users = [{
|
|
1185
|
+
"user_id": row["user_id"],
|
|
1186
|
+
"name": row["name"],
|
|
1187
|
+
"group_id": row["user_group_id"]["id"]
|
|
1188
|
+
} for row in rows]
|
|
1189
|
+
|
|
1190
|
+
# summarize group distribution
|
|
1191
|
+
dist: Dict[int, int] = {}
|
|
1192
|
+
for gid in group_ids_for_each:
|
|
1193
|
+
dist[gid] = dist.get(gid, 0) + 1
|
|
1194
|
+
|
|
1195
|
+
return self.success_response({
|
|
1196
|
+
"message": f"Successfully created {len(rows)} users",
|
|
1197
|
+
"users": created_users,
|
|
1198
|
+
"group_distribution": [{"group_id": gid, "count": cnt} for gid, cnt in sorted(dist.items())]
|
|
1199
|
+
})
|
|
1200
|
+
else:
|
|
1201
|
+
return self.error_response(
|
|
1202
|
+
f"Failed to create users: {post_resp.status_code} - {post_resp.text}"
|
|
1203
|
+
)
|
|
1204
|
+
|
|
1205
|
+
except Exception as e:
|
|
1206
|
+
return await self.handle_api_error(e)
|
|
1207
|
+
|
|
1208
|
+
async def export_users_csv(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1209
|
+
r"""
|
|
1210
|
+
Export users to CSV via /api/users/csv_export and copy the file to Windows user's Downloads.
|
|
1211
|
+
|
|
1212
|
+
- Source (server) path:
|
|
1213
|
+
C:\Program Files\BioStar X\nginx\html\download\<filename> (or self.session.config.download_dir)
|
|
1214
|
+
- Destination (Windows):
|
|
1215
|
+
• If 'dest_dir' provided -> use it as absolute path
|
|
1216
|
+
• elif 'target_username' -> C:\Users\<target_username>\Downloads
|
|
1217
|
+
• else -> %USERPROFILE%\Downloads
|
|
1218
|
+
|
|
1219
|
+
Behavior:
|
|
1220
|
+
- Only attempts the file copy on Windows.
|
|
1221
|
+
- Always returns the server source path (even if copy is skipped/failed).
|
|
1222
|
+
- Filters args before Pydantic validation to avoid 'extra fields' errors.
|
|
1223
|
+
"""
|
|
1224
|
+
try:
|
|
1225
|
+
self.check_auth()
|
|
1226
|
+
|
|
1227
|
+
# ---------- copy options (not validated by ExportCSVInput) ----------
|
|
1228
|
+
copy_to_downloads: bool = bool(args.get("copy_to_downloads", True))
|
|
1229
|
+
dest_dir_arg: str = (args.get("dest_dir") or "").strip()
|
|
1230
|
+
target_username: str = (args.get("target_username") or "").strip()
|
|
1231
|
+
|
|
1232
|
+
# ---------- advanced criteria (new: for server-side filtering) ----------
|
|
1233
|
+
adv_criteria_arg: dict = args.get("adv_criteria") or {}
|
|
1234
|
+
|
|
1235
|
+
# ---------- validate inputs (allow only schema fields) ----------
|
|
1236
|
+
schema_keys = {"ids", "search_text", "limit"}
|
|
1237
|
+
filtered_args = {k: v for k, v in args.items() if k in schema_keys}
|
|
1238
|
+
|
|
1239
|
+
try:
|
|
1240
|
+
input_data = ExportCSVInput(**filtered_args)
|
|
1241
|
+
except ValidationError as e:
|
|
1242
|
+
return self.handle_validation_error(e)
|
|
1243
|
+
|
|
1244
|
+
headers = {
|
|
1245
|
+
"bs-session-id": self.get_session_id(),
|
|
1246
|
+
"Content-Type": "application/json"
|
|
1247
|
+
}
|
|
1248
|
+
|
|
1249
|
+
# ---------- resolve user ids or use advanced criteria ----------
|
|
1250
|
+
user_ids = input_data.ids
|
|
1251
|
+
id_param = None
|
|
1252
|
+
export_count_desc = "matching"
|
|
1253
|
+
use_adv_criteria = False
|
|
1254
|
+
|
|
1255
|
+
# Priority 1: Use advanced criteria (server-side filtering, no limit)
|
|
1256
|
+
if adv_criteria_arg and not user_ids:
|
|
1257
|
+
use_adv_criteria = True
|
|
1258
|
+
id_param = "*" # Export all users matching criteria
|
|
1259
|
+
export_count_desc = "all matching"
|
|
1260
|
+
logger.info(f"Using advanced criteria for CSV export: {adv_criteria_arg}")
|
|
1261
|
+
|
|
1262
|
+
# Priority 2: Use provided IDs or search_text
|
|
1263
|
+
elif input_data.search_text and not user_ids:
|
|
1264
|
+
# Search first to resolve IDs
|
|
1265
|
+
search_args = {
|
|
1266
|
+
"search_text": input_data.search_text,
|
|
1267
|
+
"limit": input_data.limit or 50
|
|
1268
|
+
}
|
|
1269
|
+
search_result = await self.search_users(search_args)
|
|
1270
|
+
if not search_result:
|
|
1271
|
+
return self.error_response("No users found matching the search criteria")
|
|
1272
|
+
|
|
1273
|
+
try:
|
|
1274
|
+
search_content = search_result[0].text
|
|
1275
|
+
search_data = json.loads(search_content)
|
|
1276
|
+
users = search_data.get("users", []) or []
|
|
1277
|
+
user_ids = [str(u.get("user_id")) for u in users if u.get("user_id")]
|
|
1278
|
+
if not user_ids:
|
|
1279
|
+
return self.error_response("No user IDs found in search results")
|
|
1280
|
+
except (json.JSONDecodeError, KeyError) as e:
|
|
1281
|
+
logger.error(f"Failed to parse search results: {e}")
|
|
1282
|
+
return self.error_response("Failed to parse search results")
|
|
1283
|
+
|
|
1284
|
+
# Priority 3: Use provided IDs or wildcard
|
|
1285
|
+
if not use_adv_criteria:
|
|
1286
|
+
if not user_ids:
|
|
1287
|
+
return self.error_response("At least one user ID, search_text, or adv_criteria must be provided")
|
|
1288
|
+
|
|
1289
|
+
if user_ids == ["*"]:
|
|
1290
|
+
id_param = "*"
|
|
1291
|
+
export_count_desc = "all"
|
|
1292
|
+
else:
|
|
1293
|
+
id_param = "+".join(str(uid) for uid in user_ids)
|
|
1294
|
+
export_count_desc = str(len(user_ids))
|
|
1295
|
+
|
|
1296
|
+
# Build payload with advanced criteria
|
|
1297
|
+
payload = {
|
|
1298
|
+
"Query": {
|
|
1299
|
+
"offset": 0,
|
|
1300
|
+
"group_by": 1,
|
|
1301
|
+
"columns": [
|
|
1302
|
+
"user_id", "name", "email", "user_group_id", "access_groups",
|
|
1303
|
+
"start_datetime", "expiry_datetime", "operator_level",
|
|
1304
|
+
"fingerprint_template_count", "face_count", "visual_face_count",
|
|
1305
|
+
"card_count", "have_pin", "qr_count", "mobile_count", "disabled"
|
|
1306
|
+
],
|
|
1307
|
+
"headers": [
|
|
1308
|
+
"ID", "Name", "Email", "Group", "Access Group", "Start datetime",
|
|
1309
|
+
"End datetime", "Operator Level", "Fingerprint", "Face",
|
|
1310
|
+
"Visual Face", "Card", "PIN", "QR/Barcode", "Mobile Access Card", "Status"
|
|
1311
|
+
]
|
|
1312
|
+
},
|
|
1313
|
+
"adv_criteria": adv_criteria_arg if use_adv_criteria else {
|
|
1314
|
+
"user_group_ids": [],
|
|
1315
|
+
"user_group_id": "1",
|
|
1316
|
+
"user_operator_level_id": "0"
|
|
1317
|
+
}
|
|
1318
|
+
}
|
|
1319
|
+
|
|
1320
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1321
|
+
resp = await client.post(
|
|
1322
|
+
f"{self.session.config.biostar_url}/api/users/csv_export?id={id_param}",
|
|
1323
|
+
headers=headers,
|
|
1324
|
+
json=payload
|
|
1325
|
+
)
|
|
1326
|
+
|
|
1327
|
+
if resp.status_code != 200:
|
|
1328
|
+
return self.error_response(f"Failed to export CSV: {resp.status_code} - {resp.text}")
|
|
1329
|
+
|
|
1330
|
+
data = resp.json() or {}
|
|
1331
|
+
filename = (data.get("File") or {}).get("uri")
|
|
1332
|
+
if not filename:
|
|
1333
|
+
return self.error_response("Failed to create CSV file - no filename returned")
|
|
1334
|
+
|
|
1335
|
+
# ---------- compute server source path ----------
|
|
1336
|
+
download_root = getattr(
|
|
1337
|
+
self.session.config,
|
|
1338
|
+
"download_dir",
|
|
1339
|
+
r"C:\Program Files\BioStar X\nginx\html\download"
|
|
1340
|
+
)
|
|
1341
|
+
src_path = Path(download_root) / filename
|
|
1342
|
+
|
|
1343
|
+
# ---------- copy to Windows Downloads (optional) ----------
|
|
1344
|
+
copied_to: str = ""
|
|
1345
|
+
copy_status: str = "skipped"
|
|
1346
|
+
|
|
1347
|
+
if copy_to_downloads:
|
|
1348
|
+
if platform.system().lower() == "windows":
|
|
1349
|
+
if dest_dir_arg:
|
|
1350
|
+
dest_dir = Path(dest_dir_arg)
|
|
1351
|
+
elif target_username:
|
|
1352
|
+
# Explicit username case
|
|
1353
|
+
dest_dir = Path(fr"C:\Users\{target_username}\Downloads")
|
|
1354
|
+
else:
|
|
1355
|
+
# Current user profile
|
|
1356
|
+
userprofile = os.environ.get("USERPROFILE") or str(Path.home())
|
|
1357
|
+
dest_dir = Path(userprofile) / "Downloads"
|
|
1358
|
+
|
|
1359
|
+
try:
|
|
1360
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
1361
|
+
|
|
1362
|
+
if not src_path.exists():
|
|
1363
|
+
copy_status = "failed: source not found"
|
|
1364
|
+
else:
|
|
1365
|
+
dest_path = dest_dir / filename
|
|
1366
|
+
shutil.copy2(src_path, dest_path)
|
|
1367
|
+
copied_to = str(dest_path)
|
|
1368
|
+
copy_status = "success"
|
|
1369
|
+
except Exception as ce:
|
|
1370
|
+
logger.error(f"Copy to Downloads failed: {ce}")
|
|
1371
|
+
copy_status = f"failed: {ce}"
|
|
1372
|
+
else:
|
|
1373
|
+
copy_status = "skipped: non-windows"
|
|
1374
|
+
|
|
1375
|
+
# ---------- final response ----------
|
|
1376
|
+
return self.success_response({
|
|
1377
|
+
"message": f"CSV file created successfully for {export_count_desc} users",
|
|
1378
|
+
"filename": filename,
|
|
1379
|
+
"download_url": f"/download/{filename}",
|
|
1380
|
+
"source_path": str(src_path),
|
|
1381
|
+
"copy": {
|
|
1382
|
+
"enabled": copy_to_downloads,
|
|
1383
|
+
"status": copy_status,
|
|
1384
|
+
"destination": copied_to
|
|
1385
|
+
},
|
|
1386
|
+
"user_count": export_count_desc
|
|
1387
|
+
})
|
|
1388
|
+
|
|
1389
|
+
except Exception as e:
|
|
1390
|
+
logger.error(f"Error in export_users_csv: {e}")
|
|
1391
|
+
return await self.handle_api_error(e)
|
|
1392
|
+
|
|
1393
|
+
async def export_non_accessed_users_csv(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1394
|
+
"""
|
|
1395
|
+
Export users who have NOT accessed a specific door within a given time period.
|
|
1396
|
+
|
|
1397
|
+
This is a 2-step process:
|
|
1398
|
+
1. Use occupancy handler to get list of users who accessed the door
|
|
1399
|
+
2. Get all users and filter out the ones who accessed
|
|
1400
|
+
3. Export only non-accessed users to CSV
|
|
1401
|
+
"""
|
|
1402
|
+
try:
|
|
1403
|
+
self.check_auth()
|
|
1404
|
+
|
|
1405
|
+
door_id = args.get("door_id")
|
|
1406
|
+
door_name = args.get("door_name")
|
|
1407
|
+
days = args.get("days", 7)
|
|
1408
|
+
hours = args.get("hours")
|
|
1409
|
+
|
|
1410
|
+
if not door_id and not door_name:
|
|
1411
|
+
return self.error_response("Either door_id or door_name is required")
|
|
1412
|
+
|
|
1413
|
+
# Import OccupancyHandler to reuse logic
|
|
1414
|
+
from .occupancy_handler import OccupancyHandler
|
|
1415
|
+
occupancy_handler = OccupancyHandler(self.session)
|
|
1416
|
+
occupancy_handler.client_session_id = self.client_session_id
|
|
1417
|
+
|
|
1418
|
+
# Step 1: Get occupancy status to find users who DID access
|
|
1419
|
+
logger.info(f" Finding users who accessed door in last {days} days...")
|
|
1420
|
+
occupancy_args = {
|
|
1421
|
+
"door_id": door_id,
|
|
1422
|
+
"door_name": door_name,
|
|
1423
|
+
"days": days
|
|
1424
|
+
}
|
|
1425
|
+
# Only include hours if explicitly provided
|
|
1426
|
+
if hours is not None:
|
|
1427
|
+
occupancy_args["hours"] = hours
|
|
1428
|
+
|
|
1429
|
+
occupancy_result = await occupancy_handler.get_occupancy_status(occupancy_args)
|
|
1430
|
+
|
|
1431
|
+
# Parse the result to extract accessed user IDs
|
|
1432
|
+
accessed_user_ids = set()
|
|
1433
|
+
try:
|
|
1434
|
+
result_text = occupancy_result[0].text
|
|
1435
|
+
result_data = json.loads(result_text)
|
|
1436
|
+
|
|
1437
|
+
# Extract the actual data from the response wrapper
|
|
1438
|
+
data = result_data.get("data", {})
|
|
1439
|
+
|
|
1440
|
+
# Extract user IDs from currently_inside and recently_exited
|
|
1441
|
+
for user in data.get("currently_inside", []):
|
|
1442
|
+
accessed_user_ids.add(str(user.get("user_id")))
|
|
1443
|
+
|
|
1444
|
+
for user in data.get("recently_exited", []):
|
|
1445
|
+
accessed_user_ids.add(str(user.get("user_id")))
|
|
1446
|
+
|
|
1447
|
+
logger.info(f" Found {len(accessed_user_ids)} users who accessed the door")
|
|
1448
|
+
logger.info(f" Accessed user IDs: {sorted(accessed_user_ids)}")
|
|
1449
|
+
|
|
1450
|
+
except Exception as e:
|
|
1451
|
+
logger.error(f"Failed to parse occupancy result: {e}")
|
|
1452
|
+
return self.error_response(f"Failed to analyze door access: {str(e)}")
|
|
1453
|
+
|
|
1454
|
+
# Step 2: Get ALL users
|
|
1455
|
+
logger.info(f" Getting all users...")
|
|
1456
|
+
headers = {
|
|
1457
|
+
"bs-session-id": self.get_session_id(),
|
|
1458
|
+
"Content-Type": "application/json"
|
|
1459
|
+
}
|
|
1460
|
+
|
|
1461
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1462
|
+
response = await client.get(
|
|
1463
|
+
f"{self.session.config.biostar_url}/api/users",
|
|
1464
|
+
headers=headers
|
|
1465
|
+
)
|
|
1466
|
+
|
|
1467
|
+
if response.status_code != 200:
|
|
1468
|
+
return self.error_response(f"Failed to get users: {response.status_code}")
|
|
1469
|
+
|
|
1470
|
+
data = response.json()
|
|
1471
|
+
all_users = data.get("UserCollection", {}).get("rows", [])
|
|
1472
|
+
|
|
1473
|
+
# Step 3: Filter to get NON-accessed users
|
|
1474
|
+
non_accessed_user_ids = []
|
|
1475
|
+
for user in all_users:
|
|
1476
|
+
user_id = str(user.get("user_id"))
|
|
1477
|
+
if user_id not in accessed_user_ids:
|
|
1478
|
+
non_accessed_user_ids.append(user_id)
|
|
1479
|
+
|
|
1480
|
+
logger.info(f" Found {len(non_accessed_user_ids)} non-accessed users out of {len(all_users)} total")
|
|
1481
|
+
|
|
1482
|
+
if not non_accessed_user_ids:
|
|
1483
|
+
return self.success_response({
|
|
1484
|
+
"message": "No non-accessed users found",
|
|
1485
|
+
"total_users": len(all_users),
|
|
1486
|
+
"accessed_users": len(accessed_user_ids),
|
|
1487
|
+
"non_accessed_users": 0
|
|
1488
|
+
})
|
|
1489
|
+
|
|
1490
|
+
# Step 4: Export only non-accessed users
|
|
1491
|
+
logger.info(f" Exporting {len(non_accessed_user_ids)} non-accessed users to CSV...")
|
|
1492
|
+
export_args = {
|
|
1493
|
+
"ids": non_accessed_user_ids,
|
|
1494
|
+
"copy_to_downloads": args.get("copy_to_downloads", True),
|
|
1495
|
+
"dest_dir": args.get("dest_dir"),
|
|
1496
|
+
"target_username": args.get("target_username")
|
|
1497
|
+
}
|
|
1498
|
+
|
|
1499
|
+
export_result = await self.export_users_csv(export_args)
|
|
1500
|
+
|
|
1501
|
+
# Enhance the result with non-access context
|
|
1502
|
+
try:
|
|
1503
|
+
result_text = export_result[0].text
|
|
1504
|
+
result_data = json.loads(result_text)
|
|
1505
|
+
|
|
1506
|
+
# Add analysis summary
|
|
1507
|
+
result_data["analysis"] = {
|
|
1508
|
+
"door_name": door_name or f"Door {door_id}",
|
|
1509
|
+
"period_days": days,
|
|
1510
|
+
"total_users": len(all_users),
|
|
1511
|
+
"accessed_users": len(accessed_user_ids),
|
|
1512
|
+
"non_accessed_users": len(non_accessed_user_ids),
|
|
1513
|
+
"percentage_non_accessed": round((len(non_accessed_user_ids) / len(all_users) * 100), 1) if all_users else 0
|
|
1514
|
+
}
|
|
1515
|
+
|
|
1516
|
+
return [TextContent(type="text", text=json.dumps(result_data, ensure_ascii=False, indent=2))]
|
|
1517
|
+
|
|
1518
|
+
except Exception as e:
|
|
1519
|
+
logger.warning(f"Could not enhance result: {e}")
|
|
1520
|
+
return export_result
|
|
1521
|
+
|
|
1522
|
+
except Exception as e:
|
|
1523
|
+
logger.error(f"Error in export_non_accessed_users_csv: {e}")
|
|
1524
|
+
return await self.handle_api_error(e)
|
|
1525
|
+
|
|
1526
|
+
async def get_user_cards(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1527
|
+
try:
|
|
1528
|
+
self.check_auth()
|
|
1529
|
+
user_id = args.get("user_id")
|
|
1530
|
+
if not user_id:
|
|
1531
|
+
return self.error_response("user_id is required")
|
|
1532
|
+
|
|
1533
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1534
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1535
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/users/{user_id}", headers=headers)
|
|
1536
|
+
if resp.status_code != 200:
|
|
1537
|
+
return self.error_response(f"Failed to get user: {resp.status_code} - {resp.text}")
|
|
1538
|
+
|
|
1539
|
+
user = (resp.json() or {}).get("User", {}) or {}
|
|
1540
|
+
cards_field = user.get("cards", [])
|
|
1541
|
+
if isinstance(cards_field, dict) and "rows" in cards_field:
|
|
1542
|
+
rows = cards_field["rows"]
|
|
1543
|
+
elif isinstance(cards_field, list):
|
|
1544
|
+
rows = cards_field
|
|
1545
|
+
else:
|
|
1546
|
+
rows = []
|
|
1547
|
+
|
|
1548
|
+
cards = [{"id": r.get("id"),
|
|
1549
|
+
"card_id": r.get("card_id"),
|
|
1550
|
+
"display_card_id": r.get("display_card_id")} for r in rows]
|
|
1551
|
+
|
|
1552
|
+
return self.success_response({"user_id": str(user_id), "cards": cards, "total": len(cards)})
|
|
1553
|
+
except Exception as e:
|
|
1554
|
+
return await self.handle_api_error(e)
|
|
1555
|
+
|
|
1556
|
+
async def unassign_user_cards(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1557
|
+
try:
|
|
1558
|
+
self.check_auth()
|
|
1559
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1560
|
+
|
|
1561
|
+
user_id = args.get("user_id")
|
|
1562
|
+
name = args.get("name")
|
|
1563
|
+
if not user_id:
|
|
1564
|
+
if not name:
|
|
1565
|
+
return self.error_response("Either user_id or name is required")
|
|
1566
|
+
matches = await search_users_by_name(self.get_session_id(), name)
|
|
1567
|
+
if not matches:
|
|
1568
|
+
return self.error_response(f"No user found with name '{name}'")
|
|
1569
|
+
if len(matches) > 1:
|
|
1570
|
+
return self.error_response(f"Multiple users found with name '{name}'. Please specify user_id.")
|
|
1571
|
+
user_id = matches[0]["user_id"]
|
|
1572
|
+
user_id = str(user_id)
|
|
1573
|
+
|
|
1574
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1575
|
+
r = await client.get(f"{self.session.config.biostar_url}/api/users/{user_id}", headers=headers)
|
|
1576
|
+
if r.status_code != 200:
|
|
1577
|
+
return self.error_response(f"Failed to get user: {r.status_code} - {r.text}")
|
|
1578
|
+
|
|
1579
|
+
user = (r.json() or {}).get("User", {}) or {}
|
|
1580
|
+
cards_field = user.get("cards", [])
|
|
1581
|
+
if isinstance(cards_field, dict) and "rows" in cards_field:
|
|
1582
|
+
rows = cards_field["rows"]
|
|
1583
|
+
elif isinstance(cards_field, list):
|
|
1584
|
+
rows = cards_field
|
|
1585
|
+
else:
|
|
1586
|
+
rows = []
|
|
1587
|
+
|
|
1588
|
+
state_before = [{"id": x.get("id"), "card_id": x.get("card_id"), "display_card_id": x.get("display_card_id")} for x in rows]
|
|
1589
|
+
assigned_row_ids = {str(x.get("id")) for x in rows if x.get("id") is not None}
|
|
1590
|
+
|
|
1591
|
+
remove_all = bool(args.get("remove_all", False))
|
|
1592
|
+
ignore_missing = bool(args.get("ignore_missing", False))
|
|
1593
|
+
dry_run = bool(args.get("dry_run", False))
|
|
1594
|
+
|
|
1595
|
+
if remove_all:
|
|
1596
|
+
planned_payload = {"User": {"cards": []}}
|
|
1597
|
+
if dry_run:
|
|
1598
|
+
return self.success_response({
|
|
1599
|
+
"message": "Dry run: would detach ALL cards",
|
|
1600
|
+
"user_id": user_id,
|
|
1601
|
+
"state_before": state_before,
|
|
1602
|
+
"request_body": planned_payload
|
|
1603
|
+
})
|
|
1604
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1605
|
+
put_resp = await client.put(f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
1606
|
+
headers=headers, json=planned_payload)
|
|
1607
|
+
if put_resp.status_code != 200:
|
|
1608
|
+
return self.error_response(f"Failed to unassign all cards: {put_resp.status_code} - {put_resp.text}",
|
|
1609
|
+
{"request_body": planned_payload})
|
|
1610
|
+
return self.success_response({
|
|
1611
|
+
"message": f"Detached all cards from user {user_id}",
|
|
1612
|
+
"user_id": user_id,
|
|
1613
|
+
"removed_count": len(rows),
|
|
1614
|
+
"request_body": planned_payload,
|
|
1615
|
+
"response": put_resp.json()
|
|
1616
|
+
})
|
|
1617
|
+
|
|
1618
|
+
remove_ids = [str(x) for x in (args.get("remove_card_row_ids") or [])]
|
|
1619
|
+
if not remove_ids:
|
|
1620
|
+
return self.error_response("Provide remove_card_row_ids (or set remove_all=true)")
|
|
1621
|
+
|
|
1622
|
+
missing = [rid for rid in remove_ids if rid not in assigned_row_ids]
|
|
1623
|
+
if missing and not ignore_missing:
|
|
1624
|
+
return self.error_response(
|
|
1625
|
+
"One or more requested row ids are NOT assigned to the user. No changes applied.",
|
|
1626
|
+
{"user_id": user_id, "missing": missing, "assigned_row_ids": sorted(list(assigned_row_ids))}
|
|
1627
|
+
)
|
|
1628
|
+
|
|
1629
|
+
to_remove = set(remove_ids) & assigned_row_ids
|
|
1630
|
+
kept_rows = [x for x in rows if str(x.get("id")) not in to_remove]
|
|
1631
|
+
|
|
1632
|
+
planned_payload = {"User": {"cards": [{"id": str(x.get("id"))} for x in kept_rows if x.get("id") is not None]}}
|
|
1633
|
+
|
|
1634
|
+
if dry_run:
|
|
1635
|
+
return self.success_response({
|
|
1636
|
+
"message": "Dry run: would detach selected cards",
|
|
1637
|
+
"user_id": user_id,
|
|
1638
|
+
"state_before": state_before,
|
|
1639
|
+
"remove_targets": sorted(list(to_remove)),
|
|
1640
|
+
"request_body": planned_payload,
|
|
1641
|
+
"missing": missing,
|
|
1642
|
+
"ignored_missing": ignore_missing
|
|
1643
|
+
})
|
|
1644
|
+
|
|
1645
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1646
|
+
put_resp = await client.put(f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
1647
|
+
headers=headers, json=planned_payload)
|
|
1648
|
+
if put_resp.status_code != 200:
|
|
1649
|
+
return self.error_response(f"Failed to detach cards: {put_resp.status_code} - {put_resp.text}",
|
|
1650
|
+
{"request_body": planned_payload})
|
|
1651
|
+
|
|
1652
|
+
return self.success_response({
|
|
1653
|
+
"message": f"Detached {len(to_remove)} card(s) from user {user_id}",
|
|
1654
|
+
"user_id": user_id,
|
|
1655
|
+
"removed": sorted(list(to_remove)),
|
|
1656
|
+
"kept_count": len(kept_rows),
|
|
1657
|
+
"request_body": planned_payload,
|
|
1658
|
+
"response": put_resp.json(),
|
|
1659
|
+
"missing": missing,
|
|
1660
|
+
"ignored_missing": ignore_missing
|
|
1661
|
+
})
|
|
1662
|
+
except Exception as e:
|
|
1663
|
+
return await self.handle_api_error(e)
|
|
1664
|
+
|
|
1665
|
+
async def search_user_cards(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1666
|
+
"""
|
|
1667
|
+
Behavior:
|
|
1668
|
+
- If user_id is provided (single or list), hydrate each via GET /api/users/{id} and parse cards.
|
|
1669
|
+
- Else build advanced-search payload from args (or 'name' -> user_name) and call advanced_search_users().
|
|
1670
|
+
Then hydrate each matched user_id via GET /api/users/{id} and parse cards.
|
|
1671
|
+
- If require_has_card is True, exclude users with zero cards.
|
|
1672
|
+
- Limit hydration via max_users (default 50).
|
|
1673
|
+
"""
|
|
1674
|
+
try:
|
|
1675
|
+
self.check_auth()
|
|
1676
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1677
|
+
|
|
1678
|
+
# 0) helpers
|
|
1679
|
+
def _parse_cards(user_obj: Dict[str, Any]) -> List[Dict[str, Any]]:
|
|
1680
|
+
cards_field = user_obj.get("cards", [])
|
|
1681
|
+
if isinstance(cards_field, dict) and "rows" in cards_field:
|
|
1682
|
+
rows = cards_field.get("rows", []) or []
|
|
1683
|
+
elif isinstance(cards_field, list):
|
|
1684
|
+
rows = cards_field
|
|
1685
|
+
else:
|
|
1686
|
+
rows = []
|
|
1687
|
+
cards = []
|
|
1688
|
+
for r in rows:
|
|
1689
|
+
if not isinstance(r, dict):
|
|
1690
|
+
continue
|
|
1691
|
+
cards.append({
|
|
1692
|
+
"id": r.get("id"),
|
|
1693
|
+
"card_id": r.get("card_id"),
|
|
1694
|
+
"display_card_id": r.get("display_card_id"),
|
|
1695
|
+
})
|
|
1696
|
+
return cards
|
|
1697
|
+
|
|
1698
|
+
require_has_card = bool(args.get("require_has_card", False))
|
|
1699
|
+
max_users = int(args.get("max_users", 50))
|
|
1700
|
+
|
|
1701
|
+
# 1) Collect target user_ids
|
|
1702
|
+
target_ids: List[str] = []
|
|
1703
|
+
|
|
1704
|
+
# 1-a) direct user_id(s)
|
|
1705
|
+
if "user_id" in args and args["user_id"] is not None:
|
|
1706
|
+
val = args["user_id"]
|
|
1707
|
+
if isinstance(val, list):
|
|
1708
|
+
target_ids = [str(x) for x in val]
|
|
1709
|
+
else:
|
|
1710
|
+
target_ids = [str(val)]
|
|
1711
|
+
|
|
1712
|
+
# 1-b) advanced search (or simple 'name' convenience) if no direct ids
|
|
1713
|
+
if not target_ids:
|
|
1714
|
+
# Build advanced args from incoming args
|
|
1715
|
+
adv_keys = {
|
|
1716
|
+
"limit", "offset", "user_id", "user_group_id", "user_group_ids",
|
|
1717
|
+
"user_name", "user_email", "user_phone", "user_department",
|
|
1718
|
+
"user_access_group_ids", "user_title", "order_by", "user_operator_level_id"
|
|
1719
|
+
}
|
|
1720
|
+
adv_args: Dict[str, Any] = {k: v for k, v in args.items() if k in adv_keys and v is not None}
|
|
1721
|
+
|
|
1722
|
+
# convenience aliases
|
|
1723
|
+
if args.get("name") and not adv_args.get("user_name"):
|
|
1724
|
+
adv_args["user_name"] = args.get("name")
|
|
1725
|
+
if args.get("user_id_like") and not adv_args.get("user_id"):
|
|
1726
|
+
adv_args["user_id"] = args.get("user_id_like")
|
|
1727
|
+
|
|
1728
|
+
# defaults
|
|
1729
|
+
if "limit" not in adv_args:
|
|
1730
|
+
adv_args["limit"] = int(args.get("limit", 50))
|
|
1731
|
+
if "offset" in adv_args and isinstance(adv_args["offset"], int):
|
|
1732
|
+
adv_args["offset"] = str(adv_args["offset"])
|
|
1733
|
+
if "offset" not in adv_args:
|
|
1734
|
+
adv_args["offset"] = str(args.get("offset", "0"))
|
|
1735
|
+
if "user_operator_level_id" not in adv_args:
|
|
1736
|
+
adv_args["user_operator_level_id"] = str(args.get("user_operator_level_id", "0"))
|
|
1737
|
+
|
|
1738
|
+
# If nothing to search on, fail fast (폭주 방지)
|
|
1739
|
+
if not any(k for k in adv_args if k != "limit" and k != "offset" and k != "user_operator_level_id"):
|
|
1740
|
+
return self.error_response(
|
|
1741
|
+
"Provide 'user_id' or at least one advanced search field (e.g., user_name/user_email/user_phone/user_department)."
|
|
1742
|
+
)
|
|
1743
|
+
|
|
1744
|
+
# Reuse existing advanced_search_users() for payload alignment and validation
|
|
1745
|
+
adv_result = await self.advanced_search_users(adv_args)
|
|
1746
|
+
if not adv_result:
|
|
1747
|
+
return self.error_response("Advanced search returned no result content")
|
|
1748
|
+
|
|
1749
|
+
try:
|
|
1750
|
+
adv_json = json.loads(adv_result[0].text)
|
|
1751
|
+
except Exception as e:
|
|
1752
|
+
return self.error_response(f"Failed to parse advanced search result: {e!r}")
|
|
1753
|
+
|
|
1754
|
+
matched = adv_json.get("users", []) or []
|
|
1755
|
+
if not matched:
|
|
1756
|
+
return self.success_response({
|
|
1757
|
+
"message": "No users matched the search criteria",
|
|
1758
|
+
"users": [],
|
|
1759
|
+
"total_users": 0
|
|
1760
|
+
})
|
|
1761
|
+
|
|
1762
|
+
target_ids = [str(u.get("user_id")) for u in matched if u.get("user_id") is not None]
|
|
1763
|
+
|
|
1764
|
+
# Cap the number of hydrated users
|
|
1765
|
+
target_ids = target_ids[:max_users]
|
|
1766
|
+
|
|
1767
|
+
if not target_ids:
|
|
1768
|
+
return self.success_response({"message": "No target users to hydrate", "users": [], "total_users": 0})
|
|
1769
|
+
|
|
1770
|
+
# 2) Hydrate each user and parse cards
|
|
1771
|
+
results: List[Dict[str, Any]] = []
|
|
1772
|
+
errors: List[Dict[str, Any]] = []
|
|
1773
|
+
|
|
1774
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1775
|
+
for uid in target_ids:
|
|
1776
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/users/{uid}", headers=headers)
|
|
1777
|
+
if resp.status_code != 200:
|
|
1778
|
+
errors.append({"user_id": uid, "status": resp.status_code, "error": resp.text})
|
|
1779
|
+
continue
|
|
1780
|
+
|
|
1781
|
+
user_obj = (resp.json() or {}).get("User", {}) or {}
|
|
1782
|
+
cards = _parse_cards(user_obj)
|
|
1783
|
+
|
|
1784
|
+
if require_has_card and not cards:
|
|
1785
|
+
continue
|
|
1786
|
+
|
|
1787
|
+
results.append({
|
|
1788
|
+
"user_id": uid,
|
|
1789
|
+
"name": user_obj.get("name"),
|
|
1790
|
+
"email": user_obj.get("email"),
|
|
1791
|
+
"department": user_obj.get("department"),
|
|
1792
|
+
"card_count": len(cards),
|
|
1793
|
+
"cards": cards
|
|
1794
|
+
})
|
|
1795
|
+
|
|
1796
|
+
return self.success_response({
|
|
1797
|
+
"message": f"Hydrated {len(results)} user(s)",
|
|
1798
|
+
"total_users": len(results),
|
|
1799
|
+
"users": results,
|
|
1800
|
+
"errors": errors,
|
|
1801
|
+
"require_has_card": require_has_card,
|
|
1802
|
+
"max_users": max_users
|
|
1803
|
+
})
|
|
1804
|
+
|
|
1805
|
+
except Exception as e:
|
|
1806
|
+
return await self.handle_api_error(e)
|
|
1807
|
+
|
|
1808
|
+
async def create_user_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1809
|
+
"""
|
|
1810
|
+
Create a new User Group.
|
|
1811
|
+
- Blocks creation if the name already exists (case-insensitive, trimmed).
|
|
1812
|
+
- If 'depth' not provided, auto-compute as (parent.depth + 1).
|
|
1813
|
+
- Enforces name length <= 48 and max depth < 8 (root=0).
|
|
1814
|
+
Body sent to BioStar 2:
|
|
1815
|
+
{
|
|
1816
|
+
"UserGroup": {
|
|
1817
|
+
"parent_id": {"id": <int>},
|
|
1818
|
+
"depth": <int>,
|
|
1819
|
+
"name": "<str>"
|
|
1820
|
+
}
|
|
1821
|
+
}
|
|
1822
|
+
"""
|
|
1823
|
+
try:
|
|
1824
|
+
self.check_auth()
|
|
1825
|
+
|
|
1826
|
+
# ----- basic validation -----
|
|
1827
|
+
name = (args.get("name") or "").strip()
|
|
1828
|
+
parent_id = args.get("parent_id")
|
|
1829
|
+
if not name:
|
|
1830
|
+
return self.error_response("Parameter 'name' is required")
|
|
1831
|
+
if parent_id is None:
|
|
1832
|
+
return self.error_response("Parameter 'parent_id' is required")
|
|
1833
|
+
if len(name) > 48:
|
|
1834
|
+
return self.error_response("User group name must be 48 characters or fewer")
|
|
1835
|
+
|
|
1836
|
+
# normalize parent_id for compare/payload
|
|
1837
|
+
parent_id_str = str(parent_id)
|
|
1838
|
+
try:
|
|
1839
|
+
parent_id_int = int(str(parent_id).strip())
|
|
1840
|
+
except Exception:
|
|
1841
|
+
return self.error_response("Parameter 'parent_id' must be a number")
|
|
1842
|
+
|
|
1843
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1844
|
+
|
|
1845
|
+
# ----- fetch existing groups to check duplicates & compute depth -----
|
|
1846
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1847
|
+
list_resp = await client.get(f"{self.session.config.biostar_url}/api/user_groups", headers=headers)
|
|
1848
|
+
if list_resp.status_code != 200:
|
|
1849
|
+
return self.error_response(f"Failed to list user groups: {list_resp.status_code} - {list_resp.text}")
|
|
1850
|
+
|
|
1851
|
+
data = list_resp.json() or {}
|
|
1852
|
+
rows = (data.get("UserGroupCollection") or {}).get("rows", []) or []
|
|
1853
|
+
|
|
1854
|
+
# duplicate check (case-insensitive, trimmed)
|
|
1855
|
+
target_norm = name.lower()
|
|
1856
|
+
dup_matches = [
|
|
1857
|
+
{"id": r.get("id"), "name": r.get("name")}
|
|
1858
|
+
for r in rows
|
|
1859
|
+
if isinstance(r, dict) and str(r.get("name", "")).strip().lower() == target_norm
|
|
1860
|
+
]
|
|
1861
|
+
if dup_matches:
|
|
1862
|
+
return self.error_response(
|
|
1863
|
+
f"User group '{name}' already exists. Please choose a different name.",
|
|
1864
|
+
{"conflicts": dup_matches}
|
|
1865
|
+
)
|
|
1866
|
+
|
|
1867
|
+
# ----- resolve depth -----
|
|
1868
|
+
depth_arg = args.get("depth")
|
|
1869
|
+
if depth_arg is not None:
|
|
1870
|
+
try:
|
|
1871
|
+
depth_val = int(depth_arg)
|
|
1872
|
+
except Exception:
|
|
1873
|
+
return self.error_response("Parameter 'depth' must be an integer when provided")
|
|
1874
|
+
else:
|
|
1875
|
+
# find parent depth; 'All Users' typically depth=0
|
|
1876
|
+
parent_row = next((r for r in rows if str(r.get("id")) == parent_id_str), None)
|
|
1877
|
+
if parent_row and "depth" in parent_row:
|
|
1878
|
+
try:
|
|
1879
|
+
parent_depth = int(str(parent_row.get("depth")))
|
|
1880
|
+
except Exception:
|
|
1881
|
+
parent_depth = 0
|
|
1882
|
+
else:
|
|
1883
|
+
# fallback: if parent not found but parent is root(1), assume depth=0; else default 0
|
|
1884
|
+
parent_depth = 0 if parent_id_int == 1 else 0
|
|
1885
|
+
depth_val = parent_depth + 1
|
|
1886
|
+
|
|
1887
|
+
# enforce max 8 levels total (root=0 → allowed children depths: 1..7)
|
|
1888
|
+
if depth_val >= 8:
|
|
1889
|
+
return self.error_response(
|
|
1890
|
+
f"Max depth is 7 for child groups (root=0). Computed depth={depth_val} exceeds limit."
|
|
1891
|
+
)
|
|
1892
|
+
|
|
1893
|
+
# ----- build & send create payload -----
|
|
1894
|
+
payload = {
|
|
1895
|
+
"UserGroup": {
|
|
1896
|
+
"parent_id": {"id": parent_id_int},
|
|
1897
|
+
"depth": depth_val,
|
|
1898
|
+
"name": name
|
|
1899
|
+
}
|
|
1900
|
+
}
|
|
1901
|
+
|
|
1902
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1903
|
+
create_resp = await client.post(
|
|
1904
|
+
f"{self.session.config.biostar_url}/api/user_groups",
|
|
1905
|
+
headers=headers,
|
|
1906
|
+
json=payload
|
|
1907
|
+
)
|
|
1908
|
+
|
|
1909
|
+
if create_resp.status_code != 200:
|
|
1910
|
+
return self.error_response(
|
|
1911
|
+
f"Failed to create user group: {create_resp.status_code} - {create_resp.text}",
|
|
1912
|
+
{"request_body": payload}
|
|
1913
|
+
)
|
|
1914
|
+
|
|
1915
|
+
cj = create_resp.json() or {}
|
|
1916
|
+
created = (cj.get("UserGroup") or {}) if "UserGroup" in cj else cj
|
|
1917
|
+
|
|
1918
|
+
return self.success_response({
|
|
1919
|
+
"message": f"User group '{name}' created successfully",
|
|
1920
|
+
"created": {
|
|
1921
|
+
"id": created.get("id"),
|
|
1922
|
+
"name": created.get("name", name),
|
|
1923
|
+
"parent_id": parent_id_int,
|
|
1924
|
+
"depth": depth_val
|
|
1925
|
+
}
|
|
1926
|
+
})
|
|
1927
|
+
|
|
1928
|
+
except Exception as e:
|
|
1929
|
+
return await self.handle_api_error(e)
|
|
1930
|
+
|
|
1931
|
+
async def get_user_groups(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1932
|
+
"""List all user groups (GET /api/user_groups)."""
|
|
1933
|
+
try:
|
|
1934
|
+
self.check_auth()
|
|
1935
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
1936
|
+
|
|
1937
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1938
|
+
resp = await client.get(f"{self.session.config.biostar_url}/api/user_groups", headers=headers)
|
|
1939
|
+
|
|
1940
|
+
if resp.status_code != 200:
|
|
1941
|
+
return self.error_response(f"Failed to list user groups: {resp.status_code} - {resp.text}")
|
|
1942
|
+
|
|
1943
|
+
data = resp.json() or {}
|
|
1944
|
+
coll = (data.get("UserGroupCollection") or {})
|
|
1945
|
+
rows = coll.get("rows", []) or []
|
|
1946
|
+
total = coll.get("total")
|
|
1947
|
+
|
|
1948
|
+
def _children(r):
|
|
1949
|
+
ch = r.get("user_groups", []) or []
|
|
1950
|
+
out = []
|
|
1951
|
+
for c in ch:
|
|
1952
|
+
if isinstance(c, dict):
|
|
1953
|
+
out.append({"id": str(c.get("id")), "name": c.get("name")})
|
|
1954
|
+
return out
|
|
1955
|
+
|
|
1956
|
+
groups = []
|
|
1957
|
+
for r in rows:
|
|
1958
|
+
if not isinstance(r, dict):
|
|
1959
|
+
continue
|
|
1960
|
+
item = {
|
|
1961
|
+
"id": str(r.get("id")),
|
|
1962
|
+
"name": r.get("name"),
|
|
1963
|
+
"description": r.get("description", ""),
|
|
1964
|
+
"depth": str(r.get("depth")) if r.get("depth") is not None else None,
|
|
1965
|
+
"inherited": str(r.get("inherited")) if r.get("inherited") is not None else None,
|
|
1966
|
+
"user_count": r.get("user_count"),
|
|
1967
|
+
}
|
|
1968
|
+
if isinstance(r.get("parent_id"), dict):
|
|
1969
|
+
item["parent_id"] = {
|
|
1970
|
+
"id": str(r["parent_id"].get("id")),
|
|
1971
|
+
"name": r["parent_id"].get("name")
|
|
1972
|
+
}
|
|
1973
|
+
item["user_groups"] = _children(r)
|
|
1974
|
+
groups.append(item)
|
|
1975
|
+
|
|
1976
|
+
try:
|
|
1977
|
+
total_int = int(total) if total is not None else len(groups)
|
|
1978
|
+
except Exception:
|
|
1979
|
+
total_int = len(groups)
|
|
1980
|
+
|
|
1981
|
+
return self.success_response({
|
|
1982
|
+
"message": f"Found {total_int} user group(s)",
|
|
1983
|
+
"total": total_int,
|
|
1984
|
+
"groups": groups
|
|
1985
|
+
})
|
|
1986
|
+
except Exception as e:
|
|
1987
|
+
return await self.handle_api_error(e)
|
|
1988
|
+
|
|
1989
|
+
# ---------- helpers (internal) ----------
|
|
1990
|
+
async def _fetch_user_detail(self, headers: Dict[str, str], user_id: str) -> Optional[Dict[str, Any]]:
|
|
1991
|
+
"""GET /api/users/{id} and return 'User' object or None."""
|
|
1992
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1993
|
+
r = await client.get(f"{self.session.config.biostar_url}/api/users/{user_id}", headers=headers)
|
|
1994
|
+
if r.status_code != 200:
|
|
1995
|
+
return None
|
|
1996
|
+
return (r.json() or {}).get("User") or {}
|
|
1997
|
+
|
|
1998
|
+
async def _resolve_user(self, headers: Dict[str, str], user_id: Optional[Any], name: Optional[str]):
|
|
1999
|
+
"""
|
|
2000
|
+
Resolve a user by user_id or name with 0/1/many branching.
|
|
2001
|
+
- If user_id provided → verify existence.
|
|
2002
|
+
- Else use search_users_by_name() to handle 0/1/many matches.
|
|
2003
|
+
"""
|
|
2004
|
+
if user_id is not None:
|
|
2005
|
+
uid = str(user_id)
|
|
2006
|
+
detail = await self._fetch_user_detail(headers, uid)
|
|
2007
|
+
if not detail:
|
|
2008
|
+
return {"error": f"User '{uid}' not found"}
|
|
2009
|
+
return {"user_id": uid, "user": detail}
|
|
2010
|
+
|
|
2011
|
+
nm = (name or "").strip()
|
|
2012
|
+
if not nm:
|
|
2013
|
+
return {"error": "Either 'user_id' or 'name' is required"}
|
|
2014
|
+
|
|
2015
|
+
matches = await search_users_by_name(self.get_session_id(), nm)
|
|
2016
|
+
if not matches:
|
|
2017
|
+
return {"error": f"No user found with name '{nm}'"}
|
|
2018
|
+
if len(matches) > 1:
|
|
2019
|
+
return {
|
|
2020
|
+
"needs_selection": True,
|
|
2021
|
+
"type": "user",
|
|
2022
|
+
"message": f"Multiple users matched for '{nm}'. Which one?",
|
|
2023
|
+
"prompt": "Which user?",
|
|
2024
|
+
"candidates": [{"user_id": str(m.get("user_id")), "name": m.get("name")} for m in matches]
|
|
2025
|
+
}
|
|
2026
|
+
uid = str(matches[0]["user_id"])
|
|
2027
|
+
detail = await self._fetch_user_detail(headers, uid)
|
|
2028
|
+
if not detail:
|
|
2029
|
+
return {"error": f"User '{uid}' not found"}
|
|
2030
|
+
return {"user_id": uid, "user": detail}
|
|
2031
|
+
|
|
2032
|
+
async def _list_user_groups_rows(self, headers: Dict[str, str]) -> List[Dict[str, Any]]:
|
|
2033
|
+
"""GET /api/user_groups and return raw rows."""
|
|
2034
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
2035
|
+
r = await client.get(f"{self.session.config.biostar_url}/api/user_groups", headers=headers)
|
|
2036
|
+
if r.status_code != 200:
|
|
2037
|
+
return []
|
|
2038
|
+
data = r.json() or {}
|
|
2039
|
+
return (data.get("UserGroupCollection") or {}).get("rows", []) or []
|
|
2040
|
+
|
|
2041
|
+
def _flatten_groups_basic(self, rows: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
2042
|
+
"""
|
|
2043
|
+
Flatten top-level rows and their children (id, name, depth, parent_id if possible).
|
|
2044
|
+
"""
|
|
2045
|
+
flat: Dict[str, Dict[str, Any]] = {}
|
|
2046
|
+
# top-level rows
|
|
2047
|
+
for g in rows:
|
|
2048
|
+
gid = g.get("id")
|
|
2049
|
+
if gid is None:
|
|
2050
|
+
continue
|
|
2051
|
+
key = str(gid)
|
|
2052
|
+
flat[key] = {
|
|
2053
|
+
"id": int(gid),
|
|
2054
|
+
"name": g.get("name"),
|
|
2055
|
+
"depth": g.get("depth"),
|
|
2056
|
+
"parent_id": (g.get("parent_id") or {}).get("id") if isinstance(g.get("parent_id"), dict) else g.get("parent_id"),
|
|
2057
|
+
}
|
|
2058
|
+
# children
|
|
2059
|
+
for g in rows:
|
|
2060
|
+
try:
|
|
2061
|
+
base_depth = int(str(g.get("depth"))) if g.get("depth") is not None else 0
|
|
2062
|
+
except Exception:
|
|
2063
|
+
base_depth = 0
|
|
2064
|
+
for ch in (g.get("user_groups") or []):
|
|
2065
|
+
cid = ch.get("id")
|
|
2066
|
+
if cid is None:
|
|
2067
|
+
continue
|
|
2068
|
+
ckey = str(cid)
|
|
2069
|
+
if ckey not in flat:
|
|
2070
|
+
flat[ckey] = {
|
|
2071
|
+
"id": int(cid),
|
|
2072
|
+
"name": ch.get("name"),
|
|
2073
|
+
"depth": base_depth + 1,
|
|
2074
|
+
"parent_id": g.get("id"),
|
|
2075
|
+
}
|
|
2076
|
+
return list(flat.values())
|
|
2077
|
+
|
|
2078
|
+
def _filter_groups_by_name(self, rows: List[Dict[str, Any]], query: str) -> List[Dict[str, Any]]:
|
|
2079
|
+
"""Case-insensitive substring match on group name."""
|
|
2080
|
+
q = " ".join((query or "").lower().split())
|
|
2081
|
+
out = []
|
|
2082
|
+
for r in rows:
|
|
2083
|
+
nm = " ".join(str(r.get("name") or "").lower().split())
|
|
2084
|
+
if q in nm:
|
|
2085
|
+
out.append(r)
|
|
2086
|
+
return out
|
|
2087
|
+
|
|
2088
|
+
# ---------- add user to a user group ----------
|
|
2089
|
+
async def add_user_to_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2090
|
+
"""
|
|
2091
|
+
Assign a user to a specific user group via PUT /api/users/{user_id}.
|
|
2092
|
+
Flow:
|
|
2093
|
+
1) Resolve user (id or name) with 0/1/many.
|
|
2094
|
+
2) Resolve target group (id or name/search) with 0/1/many.
|
|
2095
|
+
3) If same as current group -> no-op.
|
|
2096
|
+
4) PUT with preserved name/start/expiry fields.
|
|
2097
|
+
"""
|
|
2098
|
+
try:
|
|
2099
|
+
self.check_auth()
|
|
2100
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
2101
|
+
|
|
2102
|
+
# 1) resolve user
|
|
2103
|
+
user_res = await self._resolve_user(headers, args.get("user_id"), args.get("name"))
|
|
2104
|
+
if "error" in user_res:
|
|
2105
|
+
return self.error_response(user_res["error"])
|
|
2106
|
+
if user_res.get("needs_selection"):
|
|
2107
|
+
return self.success_response(user_res)
|
|
2108
|
+
user_id = user_res["user_id"]
|
|
2109
|
+
user_obj = user_res["user"]
|
|
2110
|
+
|
|
2111
|
+
# 2) resolve user group
|
|
2112
|
+
rows = await self._list_user_groups_rows(headers)
|
|
2113
|
+
flat = self._flatten_groups_basic(rows)
|
|
2114
|
+
id_set = {str(g["id"]) for g in flat}
|
|
2115
|
+
|
|
2116
|
+
target_gid: Optional[int] = None
|
|
2117
|
+
if args.get("user_group_id") is not None:
|
|
2118
|
+
gid_str = str(args.get("user_group_id")).strip()
|
|
2119
|
+
if gid_str not in id_set:
|
|
2120
|
+
return self.success_response({
|
|
2121
|
+
"status": "user_group_not_found",
|
|
2122
|
+
"needs_selection": True,
|
|
2123
|
+
"message": f"User group id '{gid_str}' not found. Please pick one.",
|
|
2124
|
+
"prompt": "Which user group?",
|
|
2125
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2126
|
+
})
|
|
2127
|
+
target_gid = int(gid_str)
|
|
2128
|
+
else:
|
|
2129
|
+
gq = (args.get("group_name") or args.get("group_search_text") or "").strip()
|
|
2130
|
+
if not gq:
|
|
2131
|
+
return self.success_response({
|
|
2132
|
+
"status": "user_group_required",
|
|
2133
|
+
"needs_selection": True,
|
|
2134
|
+
"message": "Please specify a user group to assign.",
|
|
2135
|
+
"prompt": "Which user group?",
|
|
2136
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2137
|
+
})
|
|
2138
|
+
matches = self._filter_groups_by_name(flat, gq)
|
|
2139
|
+
if len(matches) == 0:
|
|
2140
|
+
return self.success_response({
|
|
2141
|
+
"status": "user_group_not_found",
|
|
2142
|
+
"needs_selection": True,
|
|
2143
|
+
"message": f"No user group matched '{gq}'. Please pick one below.",
|
|
2144
|
+
"prompt": "Which user group?",
|
|
2145
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2146
|
+
})
|
|
2147
|
+
if len(matches) > 1:
|
|
2148
|
+
return self.success_response({
|
|
2149
|
+
"status": "ambiguous_user_group",
|
|
2150
|
+
"needs_selection": True,
|
|
2151
|
+
"message": f"Multiple user groups matched '{gq}'. Which one?",
|
|
2152
|
+
"prompt": "Which user group?",
|
|
2153
|
+
"candidates": [{"id": g["id"], "name": g["name"]} for g in matches]
|
|
2154
|
+
})
|
|
2155
|
+
target_gid = int(matches[0]["id"])
|
|
2156
|
+
|
|
2157
|
+
# 3) skip if already in the target group
|
|
2158
|
+
cur_gid = None
|
|
2159
|
+
ug = user_obj.get("user_group_id")
|
|
2160
|
+
if isinstance(ug, dict) and ug.get("id") is not None:
|
|
2161
|
+
try:
|
|
2162
|
+
cur_gid = int(str(ug.get("id")))
|
|
2163
|
+
except Exception:
|
|
2164
|
+
cur_gid = None
|
|
2165
|
+
if cur_gid == target_gid:
|
|
2166
|
+
return self.success_response({
|
|
2167
|
+
"message": f"User {user_id} is already in group {target_gid}.",
|
|
2168
|
+
"user_id": user_id,
|
|
2169
|
+
"current_group_id": cur_gid,
|
|
2170
|
+
"target_group_id": target_gid,
|
|
2171
|
+
"changed": False
|
|
2172
|
+
})
|
|
2173
|
+
|
|
2174
|
+
# 4) PUT (preserve required fields)
|
|
2175
|
+
name_val = user_obj.get("name") or args.get("new_name") or args.get("name") or ""
|
|
2176
|
+
start_val = user_obj.get("start_datetime") or "2001-01-01T00:00:00.00Z"
|
|
2177
|
+
expiry_val = user_obj.get("expiry_datetime") or "2030-12-31T23:59:00.00Z"
|
|
2178
|
+
|
|
2179
|
+
payload = {
|
|
2180
|
+
"User": {
|
|
2181
|
+
"name": name_val,
|
|
2182
|
+
"user_group_id": {"id": target_gid},
|
|
2183
|
+
"start_datetime": start_val,
|
|
2184
|
+
"expiry_datetime": expiry_val
|
|
2185
|
+
}
|
|
2186
|
+
}
|
|
2187
|
+
|
|
2188
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
2189
|
+
pr = await client.put(f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
2190
|
+
headers=headers, json=payload)
|
|
2191
|
+
|
|
2192
|
+
if pr.status_code != 200:
|
|
2193
|
+
return self.error_response(
|
|
2194
|
+
f"Failed to update user: {pr.status_code} - {pr.text}",
|
|
2195
|
+
{"request_body": payload}
|
|
2196
|
+
)
|
|
2197
|
+
|
|
2198
|
+
return self.success_response({
|
|
2199
|
+
"message": f"User {user_id} has been assigned to group {target_gid}.",
|
|
2200
|
+
"user_id": user_id,
|
|
2201
|
+
"before_group_id": cur_gid,
|
|
2202
|
+
"after_group_id": target_gid,
|
|
2203
|
+
"request_body": payload
|
|
2204
|
+
})
|
|
2205
|
+
|
|
2206
|
+
except Exception as e:
|
|
2207
|
+
return await self.handle_api_error(e)
|
|
2208
|
+
|
|
2209
|
+
# ---------- remove user from a user group (move elsewhere) ----------
|
|
2210
|
+
async def remove_user_from_group(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2211
|
+
"""
|
|
2212
|
+
Exclude a user from a given group by moving to a destination group via PUT /api/users/{user_id}.
|
|
2213
|
+
Behavior:
|
|
2214
|
+
- If 'from_*' specified, user must currently belong to that group.
|
|
2215
|
+
- Destination resolution priority:
|
|
2216
|
+
1) destination_group_id
|
|
2217
|
+
2) destination_group_name/search_text (0/1/many)
|
|
2218
|
+
3) parent of current group → if none, fallback to 1 (All Users)
|
|
2219
|
+
"""
|
|
2220
|
+
try:
|
|
2221
|
+
self.check_auth()
|
|
2222
|
+
headers = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
2223
|
+
|
|
2224
|
+
# 1) resolve user
|
|
2225
|
+
user_res = await self._resolve_user(headers, args.get("user_id"), args.get("name"))
|
|
2226
|
+
if "error" in user_res:
|
|
2227
|
+
return self.error_response(user_res["error"])
|
|
2228
|
+
if user_res.get("needs_selection"):
|
|
2229
|
+
return self.success_response(user_res)
|
|
2230
|
+
user_id = user_res["user_id"]
|
|
2231
|
+
user_obj = user_res["user"]
|
|
2232
|
+
|
|
2233
|
+
# current group
|
|
2234
|
+
cur_gid = None
|
|
2235
|
+
cur_gname = None
|
|
2236
|
+
ug = user_obj.get("user_group_id")
|
|
2237
|
+
if isinstance(ug, dict):
|
|
2238
|
+
cur_gid = ug.get("id")
|
|
2239
|
+
cur_gname = ug.get("name")
|
|
2240
|
+
try:
|
|
2241
|
+
cur_gid = int(str(cur_gid)) if cur_gid is not None else None
|
|
2242
|
+
except Exception:
|
|
2243
|
+
cur_gid = None
|
|
2244
|
+
|
|
2245
|
+
# 2) groups index
|
|
2246
|
+
rows = await self._list_user_groups_rows(headers)
|
|
2247
|
+
flat = self._flatten_groups_basic(rows)
|
|
2248
|
+
id_set = {str(g["id"]) for g in flat}
|
|
2249
|
+
by_id = {int(g["id"]): g for g in flat}
|
|
2250
|
+
|
|
2251
|
+
# 3) optional from-group validation
|
|
2252
|
+
if args.get("from_user_group_id") is not None or args.get("from_group_name") or args.get("from_group_search_text"):
|
|
2253
|
+
if args.get("from_user_group_id") is not None:
|
|
2254
|
+
fid_str = str(args.get("from_user_group_id")).strip()
|
|
2255
|
+
if fid_str not in id_set:
|
|
2256
|
+
return self.success_response({
|
|
2257
|
+
"status": "user_group_not_found",
|
|
2258
|
+
"needs_selection": True,
|
|
2259
|
+
"message": f"From-group id '{fid_str}' not found. Please pick one.",
|
|
2260
|
+
"prompt": "Which user group?",
|
|
2261
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2262
|
+
})
|
|
2263
|
+
from_gid = int(fid_str)
|
|
2264
|
+
else:
|
|
2265
|
+
fq = (args.get("from_group_name") or args.get("from_group_search_text") or "").strip()
|
|
2266
|
+
matches = self._filter_groups_by_name(flat, fq)
|
|
2267
|
+
if len(matches) == 0:
|
|
2268
|
+
return self.success_response({
|
|
2269
|
+
"status": "user_group_not_found",
|
|
2270
|
+
"needs_selection": True,
|
|
2271
|
+
"message": f"No user group matched '{fq}'. Please pick one below.",
|
|
2272
|
+
"prompt": "Which user group?",
|
|
2273
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2274
|
+
})
|
|
2275
|
+
if len(matches) > 1:
|
|
2276
|
+
return self.success_response({
|
|
2277
|
+
"status": "ambiguous_user_group",
|
|
2278
|
+
"needs_selection": True,
|
|
2279
|
+
"message": f"Multiple user groups matched '{fq}'. Which one?",
|
|
2280
|
+
"prompt": "Which user group?",
|
|
2281
|
+
"candidates": [{"id": g["id"], "name": g["name"]} for g in matches]
|
|
2282
|
+
})
|
|
2283
|
+
from_gid = int(matches[0]["id"])
|
|
2284
|
+
|
|
2285
|
+
if cur_gid is None or cur_gid != from_gid:
|
|
2286
|
+
return self.error_response(
|
|
2287
|
+
"User is not currently in the specified 'from' group.",
|
|
2288
|
+
{"current_group_id": cur_gid, "current_group_name": cur_gname, "from_group_id": from_gid}
|
|
2289
|
+
)
|
|
2290
|
+
|
|
2291
|
+
# 4) resolve destination group
|
|
2292
|
+
dest_gid: Optional[int] = None
|
|
2293
|
+
if args.get("destination_group_id") is not None:
|
|
2294
|
+
dg_str = str(args.get("destination_group_id")).strip()
|
|
2295
|
+
if dg_str not in id_set:
|
|
2296
|
+
return self.success_response({
|
|
2297
|
+
"status": "user_group_not_found",
|
|
2298
|
+
"needs_selection": True,
|
|
2299
|
+
"message": f"Destination group id '{dg_str}' not found. Please pick one.",
|
|
2300
|
+
"prompt": "Which user group?",
|
|
2301
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2302
|
+
})
|
|
2303
|
+
dest_gid = int(dg_str)
|
|
2304
|
+
elif (args.get("destination_group_name") or args.get("destination_group_search_text")):
|
|
2305
|
+
dq = (args.get("destination_group_name") or args.get("destination_group_search_text") or "").strip()
|
|
2306
|
+
matches = self._filter_groups_by_name(flat, dq)
|
|
2307
|
+
if len(matches) == 0:
|
|
2308
|
+
return self.success_response({
|
|
2309
|
+
"status": "user_group_not_found",
|
|
2310
|
+
"needs_selection": True,
|
|
2311
|
+
"message": f"No user group matched '{dq}'. Please pick one below.",
|
|
2312
|
+
"prompt": "Which user group?",
|
|
2313
|
+
"user_groups": [{"id": g["id"], "name": g["name"]} for g in flat]
|
|
2314
|
+
})
|
|
2315
|
+
if len(matches) > 1:
|
|
2316
|
+
return self.success_response({
|
|
2317
|
+
"status": "ambiguous_user_group",
|
|
2318
|
+
"needs_selection": True,
|
|
2319
|
+
"message": f"Multiple user groups matched '{dq}'. Which one?",
|
|
2320
|
+
"prompt": "Which user group?",
|
|
2321
|
+
"candidates": [{"id": g["id"], "name": g["name"]} for g in matches]
|
|
2322
|
+
})
|
|
2323
|
+
dest_gid = int(matches[0]["id"])
|
|
2324
|
+
else:
|
|
2325
|
+
# fallback: parent → if missing, move to 1 (All Users)
|
|
2326
|
+
if cur_gid is not None and cur_gid in by_id:
|
|
2327
|
+
parent_id = by_id[cur_gid].get("parent_id")
|
|
2328
|
+
try:
|
|
2329
|
+
dest_gid = int(str(parent_id)) if parent_id is not None else 1
|
|
2330
|
+
except Exception:
|
|
2331
|
+
dest_gid = 1
|
|
2332
|
+
else:
|
|
2333
|
+
dest_gid = 1
|
|
2334
|
+
|
|
2335
|
+
# 5) no-op if same
|
|
2336
|
+
if cur_gid == dest_gid:
|
|
2337
|
+
return self.success_response({
|
|
2338
|
+
"message": f"Destination group {dest_gid} equals current group; no change applied.",
|
|
2339
|
+
"user_id": user_id,
|
|
2340
|
+
"current_group_id": cur_gid,
|
|
2341
|
+
"target_group_id": dest_gid,
|
|
2342
|
+
"changed": False
|
|
2343
|
+
})
|
|
2344
|
+
|
|
2345
|
+
# 6) PUT (preserve required fields)
|
|
2346
|
+
name_val = user_obj.get("name") or ""
|
|
2347
|
+
start_val = user_obj.get("start_datetime") or "2001-01-01T00:00:00.00Z"
|
|
2348
|
+
expiry_val = user_obj.get("expiry_datetime") or "2030-12-31T23:59:00.00Z"
|
|
2349
|
+
|
|
2350
|
+
payload = {
|
|
2351
|
+
"User": {
|
|
2352
|
+
"name": name_val,
|
|
2353
|
+
"user_group_id": {"id": dest_gid},
|
|
2354
|
+
"start_datetime": start_val,
|
|
2355
|
+
"expiry_datetime": expiry_val
|
|
2356
|
+
}
|
|
2357
|
+
}
|
|
2358
|
+
|
|
2359
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
2360
|
+
pr = await client.put(f"{self.session.config.biostar_url}/api/users/{user_id}",
|
|
2361
|
+
headers=headers, json=payload)
|
|
2362
|
+
|
|
2363
|
+
if pr.status_code != 200:
|
|
2364
|
+
return self.error_response(
|
|
2365
|
+
f"Failed to update user: {pr.status_code} - {pr.text}",
|
|
2366
|
+
{"request_body": payload}
|
|
2367
|
+
)
|
|
2368
|
+
|
|
2369
|
+
return self.success_response({
|
|
2370
|
+
"message": f"User {user_id} has been moved from group {cur_gid} to {dest_gid}.",
|
|
2371
|
+
"user_id": user_id,
|
|
2372
|
+
"before_group_id": cur_gid,
|
|
2373
|
+
"after_group_id": dest_gid,
|
|
2374
|
+
"request_body": payload
|
|
2375
|
+
})
|
|
2376
|
+
|
|
2377
|
+
except Exception as e:
|
|
2378
|
+
return await self.handle_api_error(e)
|
|
2379
|
+
|
|
2380
|
+
async def bulk_edit_users(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2381
|
+
"""
|
|
2382
|
+
Bulk update multiple users selected by 'adv_criteria' using the fixed endpoint:
|
|
2383
|
+
PUT https://192.168.120.71/api/users?adv=mode1
|
|
2384
|
+
|
|
2385
|
+
Inputs:
|
|
2386
|
+
- adv_criteria: dict (required)
|
|
2387
|
+
- (optional) new_user_group_id OR new_user_group (object)
|
|
2388
|
+
- (optional) start_datetime, expiry_datetime
|
|
2389
|
+
- (optional) access_group_ids OR access_groups, or clear_access_groups=True
|
|
2390
|
+
- dry_run: if true, return payload only
|
|
2391
|
+
- timeout: request timeout in seconds
|
|
2392
|
+
|
|
2393
|
+
Behavior:
|
|
2394
|
+
- Builds the 'User' patch from provided fields.
|
|
2395
|
+
- Enforces at least one change.
|
|
2396
|
+
- If dry_run: returns the planned payload without HTTP call.
|
|
2397
|
+
- Otherwise: PUT to the fixed URL with 'bs-session-id' header.
|
|
2398
|
+
"""
|
|
2399
|
+
try:
|
|
2400
|
+
self.check_auth()
|
|
2401
|
+
|
|
2402
|
+
# Validate args with Pydantic
|
|
2403
|
+
try:
|
|
2404
|
+
input_data = BulkEditUsersInput(**args)
|
|
2405
|
+
except Exception as e:
|
|
2406
|
+
return self.handle_validation_error(e)
|
|
2407
|
+
|
|
2408
|
+
headers = {
|
|
2409
|
+
"bs-session-id": self.get_session_id(),
|
|
2410
|
+
"Content-Type": "application/json"
|
|
2411
|
+
}
|
|
2412
|
+
|
|
2413
|
+
# Build 'User' patch object
|
|
2414
|
+
user_patch: Dict[str, Any] = {}
|
|
2415
|
+
|
|
2416
|
+
# 1) user_group_id (either minimal id form or pass-through object)
|
|
2417
|
+
if input_data.new_user_group is not None:
|
|
2418
|
+
# Pass-through as provided (e.g., {"id": "4036", "name": "ACE"})
|
|
2419
|
+
user_patch["user_group_id"] = input_data.new_user_group
|
|
2420
|
+
elif input_data.new_user_group_id is not None:
|
|
2421
|
+
user_patch["user_group_id"] = {"id": str(input_data.new_user_group_id)}
|
|
2422
|
+
|
|
2423
|
+
# 2) period (start/expiry)
|
|
2424
|
+
if input_data.start_datetime is not None:
|
|
2425
|
+
user_patch["start_datetime"] = input_data.start_datetime
|
|
2426
|
+
if input_data.expiry_datetime is not None:
|
|
2427
|
+
user_patch["expiry_datetime"] = input_data.expiry_datetime
|
|
2428
|
+
|
|
2429
|
+
# 3) access_groups
|
|
2430
|
+
if input_data.clear_access_groups:
|
|
2431
|
+
user_patch["access_groups"] = []
|
|
2432
|
+
elif input_data.access_groups is not None:
|
|
2433
|
+
# Full objects pass-through (may contain accessLevels, users, etc.)
|
|
2434
|
+
user_patch["access_groups"] = input_data.access_groups
|
|
2435
|
+
elif input_data.access_group_ids is not None:
|
|
2436
|
+
# Minimal id-only list
|
|
2437
|
+
user_patch["access_groups"] = [{"id": str(gid)} for gid in input_data.access_group_ids]
|
|
2438
|
+
|
|
2439
|
+
# Sanity: ensure we actually have changes (should be guaranteed by validator)
|
|
2440
|
+
if not user_patch:
|
|
2441
|
+
return self.error_response(
|
|
2442
|
+
"No changes to apply. Provide at least one of group/period/access_groups."
|
|
2443
|
+
)
|
|
2444
|
+
|
|
2445
|
+
payload: Dict[str, Any] = {
|
|
2446
|
+
"adv_criteria": input_data.adv_criteria,
|
|
2447
|
+
"User": user_patch
|
|
2448
|
+
}
|
|
2449
|
+
|
|
2450
|
+
# Dry-run: return the planned payload without calling API
|
|
2451
|
+
if input_data.dry_run:
|
|
2452
|
+
return self.success_response({
|
|
2453
|
+
"message": "Dry run: no API call was made.",
|
|
2454
|
+
"request_body": payload,
|
|
2455
|
+
"endpoint": "https://192.168.120.71/api/users?adv=mode1",
|
|
2456
|
+
"method": "PUT"
|
|
2457
|
+
})
|
|
2458
|
+
|
|
2459
|
+
# Execute PUT to the fixed endpoint (verify=False for internal/self-signed TLS)
|
|
2460
|
+
endpoint = "https://192.168.120.71/api/users?adv=mode1"
|
|
2461
|
+
async with httpx.AsyncClient(verify=False, timeout=input_data.timeout) as client:
|
|
2462
|
+
resp = await client.put(endpoint, headers=headers, json=payload)
|
|
2463
|
+
|
|
2464
|
+
if resp.status_code != 200:
|
|
2465
|
+
# Return API error with the request body for troubleshooting
|
|
2466
|
+
return self.error_response(
|
|
2467
|
+
f"Bulk edit failed: {resp.status_code} - {resp.text}",
|
|
2468
|
+
{"request_body": payload}
|
|
2469
|
+
)
|
|
2470
|
+
|
|
2471
|
+
# Try to parse JSON; if not JSON, return raw text
|
|
2472
|
+
try:
|
|
2473
|
+
resp_json = resp.json()
|
|
2474
|
+
except Exception:
|
|
2475
|
+
resp_json = {"raw": resp.text}
|
|
2476
|
+
|
|
2477
|
+
return self.success_response({
|
|
2478
|
+
"message": "Bulk edit applied successfully.",
|
|
2479
|
+
"endpoint": endpoint,
|
|
2480
|
+
"request_body": payload,
|
|
2481
|
+
"response": resp_json
|
|
2482
|
+
})
|
|
2483
|
+
|
|
2484
|
+
except Exception as e:
|
|
2485
|
+
return await self.handle_api_error(e)
|
|
2486
|
+
|
|
2487
|
+
# -----------------------------------------
|
|
2488
|
+
# Helper: write CSV into nginx/download dir
|
|
2489
|
+
# -----------------------------------------
|
|
2490
|
+
async def _write_csv_to_download_dir(self, filename: str, data: bytes) -> Dict[str, Any]:
|
|
2491
|
+
DOWNLOAD_ROOT = r"C:\Program Files\BioStar X\nginx\html\download"
|
|
2492
|
+
if not filename:
|
|
2493
|
+
return {"error": "Missing filename."}
|
|
2494
|
+
basename = Path(filename).name
|
|
2495
|
+
target_path = Path(DOWNLOAD_ROOT) / basename
|
|
2496
|
+
try:
|
|
2497
|
+
os.makedirs(target_path.parent, exist_ok=True)
|
|
2498
|
+
with open(target_path, "wb") as f:
|
|
2499
|
+
f.write(data)
|
|
2500
|
+
except PermissionError:
|
|
2501
|
+
return {"error": f"Permission denied writing: {str(target_path)}"}
|
|
2502
|
+
except Exception as e:
|
|
2503
|
+
return {"error": f"Failed to write CSV: {e}"}
|
|
2504
|
+
return {"path": str(target_path), "basename": basename}
|
|
2505
|
+
|
|
2506
|
+
# -------------------------------------------------
|
|
2507
|
+
# NEW: try to locate CSV bytes from multiple places
|
|
2508
|
+
# -------------------------------------------------
|
|
2509
|
+
async def _load_csv_bytes_from_args(self, args: Dict[str, Any], *, timeout: int) -> Dict[str, Any]:
|
|
2510
|
+
"""
|
|
2511
|
+
Returns:
|
|
2512
|
+
{ "bytes": <bytes>, "basename": <str> } or { "error": "..." }
|
|
2513
|
+
|
|
2514
|
+
Priority:
|
|
2515
|
+
1) file_base64 / file_text / file_bytes (직접 바이트 제공)
|
|
2516
|
+
2) file_path (URL이면 다운로드, 아니면 로컬 경로/후보 폴더에서 탐색)
|
|
2517
|
+
"""
|
|
2518
|
+
# 1) explicit content
|
|
2519
|
+
file_base64 = args.get("file_base64")
|
|
2520
|
+
file_text = args.get("file_text")
|
|
2521
|
+
file_bytes = args.get("file_bytes")
|
|
2522
|
+
|
|
2523
|
+
preferred_name = (
|
|
2524
|
+
(args.get("original_file_name") or "")
|
|
2525
|
+
or (args.get("fileName") or "")
|
|
2526
|
+
or (args.get("file_path") or "")
|
|
2527
|
+
or (args.get("uri") or "")
|
|
2528
|
+
or "upload.csv"
|
|
2529
|
+
)
|
|
2530
|
+
basename = Path(preferred_name).name or "upload.csv"
|
|
2531
|
+
|
|
2532
|
+
if file_base64 is not None:
|
|
2533
|
+
try:
|
|
2534
|
+
b = base64.b64decode(file_base64)
|
|
2535
|
+
except Exception:
|
|
2536
|
+
return {"error": "Invalid 'file_base64'. Base64 decode failed."}
|
|
2537
|
+
return {"bytes": b, "basename": basename}
|
|
2538
|
+
|
|
2539
|
+
if file_text is not None:
|
|
2540
|
+
charset = str(args.get("charset", "utf-8-sig"))
|
|
2541
|
+
try:
|
|
2542
|
+
b = str(file_text).encode(charset or "utf-8")
|
|
2543
|
+
except Exception:
|
|
2544
|
+
return {"error": "Encoding 'file_text' failed."}
|
|
2545
|
+
return {"bytes": b, "basename": basename}
|
|
2546
|
+
|
|
2547
|
+
if file_bytes is not None:
|
|
2548
|
+
if isinstance(file_bytes, list):
|
|
2549
|
+
try:
|
|
2550
|
+
b = bytes(file_bytes)
|
|
2551
|
+
except Exception:
|
|
2552
|
+
return {"error": "Invalid 'file_bytes' list."}
|
|
2553
|
+
elif isinstance(file_bytes, (bytes, bytearray)):
|
|
2554
|
+
b = bytes(file_bytes)
|
|
2555
|
+
else:
|
|
2556
|
+
return {"error": "Invalid 'file_bytes'. Provide bytes/bytearray or list[int]."}
|
|
2557
|
+
return {"bytes": b, "basename": basename}
|
|
2558
|
+
|
|
2559
|
+
# 2) fallback: file_path as URL or local path (+ smart search)
|
|
2560
|
+
file_path = str(args.get("file_path") or "").strip()
|
|
2561
|
+
if file_path:
|
|
2562
|
+
# URL?
|
|
2563
|
+
if re.match(r"^https?://", file_path, flags=re.I):
|
|
2564
|
+
try:
|
|
2565
|
+
with httpx.Client(verify=False, timeout=timeout) as sclient:
|
|
2566
|
+
r = sclient.get(file_path)
|
|
2567
|
+
if r.status_code != 200:
|
|
2568
|
+
return {"error": f"Failed to download CSV from URL (HTTP {r.status_code})."}
|
|
2569
|
+
# use the remote filename if available
|
|
2570
|
+
remote_name = Path(file_path).name or basename
|
|
2571
|
+
return {"bytes": r.content, "basename": Path(remote_name).name}
|
|
2572
|
+
except Exception as e:
|
|
2573
|
+
return {"error": f"Failed to download CSV from URL: {e}"}
|
|
2574
|
+
|
|
2575
|
+
# Local path exact
|
|
2576
|
+
p = Path(file_path)
|
|
2577
|
+
if p.exists():
|
|
2578
|
+
try:
|
|
2579
|
+
return {"bytes": p.read_bytes(), "basename": p.name}
|
|
2580
|
+
except Exception as e:
|
|
2581
|
+
return {"error": f"Failed to read local CSV: {e}"}
|
|
2582
|
+
|
|
2583
|
+
# Smart search candidates (so 'User_....csv' 같은 '파일명만'도 처리)
|
|
2584
|
+
candidates: List[Path] = []
|
|
2585
|
+
# 1) configured inbox
|
|
2586
|
+
inbox = getattr(getattr(self, "session", None), "config", None)
|
|
2587
|
+
inbox_dir = getattr(inbox, "upload_inbox_dir", None)
|
|
2588
|
+
if inbox_dir:
|
|
2589
|
+
candidates.append(Path(inbox_dir) / Path(file_path).name)
|
|
2590
|
+
# 2) common MCP/Notebook mount
|
|
2591
|
+
candidates.append(Path("/mnt/data") / Path(file_path).name)
|
|
2592
|
+
# 3) current working dir
|
|
2593
|
+
candidates.append(Path.cwd() / Path(file_path).name)
|
|
2594
|
+
# 4) script dir (if available)
|
|
2595
|
+
try:
|
|
2596
|
+
candidates.append(Path(__file__).parent / Path(file_path).name) # type: ignore
|
|
2597
|
+
except Exception:
|
|
2598
|
+
pass
|
|
2599
|
+
# 5) user Downloads (best-effort)
|
|
2600
|
+
try:
|
|
2601
|
+
candidates.append(Path.home() / "Downloads" / Path(file_path).name)
|
|
2602
|
+
except Exception:
|
|
2603
|
+
pass
|
|
2604
|
+
|
|
2605
|
+
for c in candidates:
|
|
2606
|
+
try:
|
|
2607
|
+
if c.exists():
|
|
2608
|
+
return {"bytes": c.read_bytes(), "basename": Path(file_path).name}
|
|
2609
|
+
except Exception:
|
|
2610
|
+
continue
|
|
2611
|
+
|
|
2612
|
+
return {"error": "Local 'file_path' not found in known locations. "
|
|
2613
|
+
"Provide 'file_base64'/'file_text'/'file_bytes' or an 'http(s)://...' URL."}
|
|
2614
|
+
|
|
2615
|
+
return {"error": "CSV content is required. "
|
|
2616
|
+
"Provide 'file_base64', 'file_text', 'file_bytes', or a 'file_path' that resolves to a real file/URL."}
|
|
2617
|
+
|
|
2618
|
+
# ------------------------------------------------------------
|
|
2619
|
+
# import_users_csv — write to download, preflight, then import
|
|
2620
|
+
# ------------------------------------------------------------
|
|
2621
|
+
async def import_users_csv(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2622
|
+
"""
|
|
2623
|
+
No /api/attachments.
|
|
2624
|
+
1) Load CSV bytes (base64 / text / bytes / URL / smart local search incl. /mnt/data).
|
|
2625
|
+
2) Save to: C:\Program Files\BioStar X\nginx\html\download\<basename>
|
|
2626
|
+
3) Preflight: GET /download/<basename>
|
|
2627
|
+
4) POST /api/users/csv_import with File.uri=<basename>, import_option=2
|
|
2628
|
+
"""
|
|
2629
|
+
try:
|
|
2630
|
+
self.check_auth()
|
|
2631
|
+
|
|
2632
|
+
DOWNLOAD_ROOT = r"C:\Program Files\BioStar X\nginx\html\download"
|
|
2633
|
+
import_option = 2
|
|
2634
|
+
if "import_option" in args:
|
|
2635
|
+
args.pop("import_option", None)
|
|
2636
|
+
|
|
2637
|
+
start_line = int(args.get("start_line", 2))
|
|
2638
|
+
timeout = int(args.get("timeout", 60))
|
|
2639
|
+
headers_http = {
|
|
2640
|
+
"bs-session-id": self.get_session_id(),
|
|
2641
|
+
"Content-Type": "application/json",
|
|
2642
|
+
}
|
|
2643
|
+
|
|
2644
|
+
# Load bytes (now with /mnt/data and other fallbacks)
|
|
2645
|
+
loaded = await self._load_csv_bytes_from_args(args, timeout=timeout)
|
|
2646
|
+
if "error" in loaded:
|
|
2647
|
+
return self.error_response(loaded["error"])
|
|
2648
|
+
csv_bytes: bytes = loaded["bytes"]
|
|
2649
|
+
basename: str = loaded["basename"]
|
|
2650
|
+
|
|
2651
|
+
# Write to nginx/download
|
|
2652
|
+
w = await self._write_csv_to_download_dir(basename, csv_bytes)
|
|
2653
|
+
if "error" in w:
|
|
2654
|
+
return self.error_response(w["error"])
|
|
2655
|
+
|
|
2656
|
+
# Preflight (a few retries for disk/anti-virus lag)
|
|
2657
|
+
ok = False
|
|
2658
|
+
last = None
|
|
2659
|
+
for _ in range(4):
|
|
2660
|
+
try:
|
|
2661
|
+
with httpx.Client(verify=False, timeout=timeout) as sclient:
|
|
2662
|
+
r = sclient.get(f"{self.session.config.biostar_url}/download/{basename}", headers=headers_http)
|
|
2663
|
+
last = r.status_code
|
|
2664
|
+
if r.status_code == 200:
|
|
2665
|
+
ok = True
|
|
2666
|
+
break
|
|
2667
|
+
except Exception:
|
|
2668
|
+
await asyncio.sleep(0.25)
|
|
2669
|
+
continue
|
|
2670
|
+
await asyncio.sleep(0.25)
|
|
2671
|
+
if not ok:
|
|
2672
|
+
return self.error_response(
|
|
2673
|
+
f"CSV not served at /download/{basename} (HTTP {last}). "
|
|
2674
|
+
f"Check permissions on: {DOWNLOAD_ROOT}\\{basename}"
|
|
2675
|
+
)
|
|
2676
|
+
|
|
2677
|
+
# Columns
|
|
2678
|
+
status, j, _ = await self._http_get_json(
|
|
2679
|
+
f"{self.session.config.biostar_url}/api/users/csv_option", headers_http, timeout
|
|
2680
|
+
)
|
|
2681
|
+
supported_cols: List[str] = []
|
|
2682
|
+
if status == 200 and isinstance(j, dict):
|
|
2683
|
+
supported_cols = ((j.get("CsvOption") or {}).get("columns") or {}).get("rows", []) or []
|
|
2684
|
+
|
|
2685
|
+
if args.get("columns"):
|
|
2686
|
+
columns = list(args["columns"])
|
|
2687
|
+
else:
|
|
2688
|
+
columns = list(supported_cols) if supported_cols else await self._get_users_csv_columns(headers_http, timeout)
|
|
2689
|
+
|
|
2690
|
+
if args.get("headers"):
|
|
2691
|
+
headers_map = list(args["headers"])
|
|
2692
|
+
else:
|
|
2693
|
+
headers_map = list(columns)
|
|
2694
|
+
|
|
2695
|
+
if len(headers_map) != len(columns):
|
|
2696
|
+
return self.error_response(
|
|
2697
|
+
f"Length mismatch: headers({len(headers_map)}) vs columns({len(columns)})."
|
|
2698
|
+
)
|
|
2699
|
+
|
|
2700
|
+
removed_unsupported: List[str] = []
|
|
2701
|
+
if supported_cols:
|
|
2702
|
+
f_cols, f_hdrs = [], []
|
|
2703
|
+
for c, h in zip(columns, headers_map):
|
|
2704
|
+
if c in supported_cols:
|
|
2705
|
+
f_cols.append(c)
|
|
2706
|
+
f_hdrs.append(h)
|
|
2707
|
+
else:
|
|
2708
|
+
removed_unsupported.append(c)
|
|
2709
|
+
columns, headers_map = f_cols, f_hdrs
|
|
2710
|
+
if not columns:
|
|
2711
|
+
return self.error_response("All requested columns are unsupported by the server.")
|
|
2712
|
+
|
|
2713
|
+
file_info = {
|
|
2714
|
+
"uri": basename, # IMPORTANT: basename only
|
|
2715
|
+
"fileName": basename,
|
|
2716
|
+
"originalFileName": Path(args.get("original_file_name") or basename).name,
|
|
2717
|
+
"type": "",
|
|
2718
|
+
}
|
|
2719
|
+
csv_option_block = {
|
|
2720
|
+
"start_line": start_line,
|
|
2721
|
+
"import_option": import_option,
|
|
2722
|
+
"columns": {
|
|
2723
|
+
"total": str(len(columns)),
|
|
2724
|
+
"rows": columns,
|
|
2725
|
+
},
|
|
2726
|
+
}
|
|
2727
|
+
query_block = {"headers": headers_map, "columns": columns}
|
|
2728
|
+
body: Dict[str, Any] = {
|
|
2729
|
+
"File": file_info,
|
|
2730
|
+
"CsvOption": csv_option_block,
|
|
2731
|
+
"Query": query_block,
|
|
2732
|
+
"currentPhoneBook": 0,
|
|
2733
|
+
"filterdUserIds": [],
|
|
2734
|
+
}
|
|
2735
|
+
data_rows = args.get("data_rows")
|
|
2736
|
+
if isinstance(data_rows, list) and data_rows:
|
|
2737
|
+
body["Data"] = data_rows
|
|
2738
|
+
|
|
2739
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
2740
|
+
resp = await client.post(
|
|
2741
|
+
f"{self.session.config.biostar_url}/api/users/csv_import",
|
|
2742
|
+
headers=headers_http,
|
|
2743
|
+
json=body,
|
|
2744
|
+
)
|
|
2745
|
+
|
|
2746
|
+
if resp.status_code != 200:
|
|
2747
|
+
return self.error_response(
|
|
2748
|
+
f"CSV import failed: {resp.status_code} - {resp.text}",
|
|
2749
|
+
{
|
|
2750
|
+
"request_body": {
|
|
2751
|
+
"File": file_info,
|
|
2752
|
+
"CsvOption": csv_option_block,
|
|
2753
|
+
"Query": query_block,
|
|
2754
|
+
**({"removed_unsupported": removed_unsupported} if removed_unsupported else {}),
|
|
2755
|
+
"server_path": str(Path(DOWNLOAD_ROOT) / basename),
|
|
2756
|
+
}
|
|
2757
|
+
},
|
|
2758
|
+
)
|
|
2759
|
+
|
|
2760
|
+
try:
|
|
2761
|
+
resp_json = resp.json()
|
|
2762
|
+
except Exception:
|
|
2763
|
+
resp_json = {"raw": resp.text}
|
|
2764
|
+
|
|
2765
|
+
summary = {
|
|
2766
|
+
"message": "CSV import request accepted.",
|
|
2767
|
+
"endpoint": "/api/users/csv_import",
|
|
2768
|
+
"file": file_info,
|
|
2769
|
+
"csv_option": {"start_line": start_line, "import_option": import_option},
|
|
2770
|
+
"columns_total": len(columns),
|
|
2771
|
+
"used_fields": [h for h in headers_map if h],
|
|
2772
|
+
"skipped_fields": [c for (c, h) in zip(columns, headers_map) if not h],
|
|
2773
|
+
**({"removed_unsupported": removed_unsupported} if removed_unsupported else {}),
|
|
2774
|
+
"response": resp_json,
|
|
2775
|
+
"server_path": str(Path(DOWNLOAD_ROOT) / basename),
|
|
2776
|
+
}
|
|
2777
|
+
return self.success_response(summary)
|
|
2778
|
+
|
|
2779
|
+
except Exception as e:
|
|
2780
|
+
return await self.handle_api_error(e)
|
|
2781
|
+
|
|
2782
|
+
# -------------------------
|
|
2783
|
+
# HTTP GET JSON helper
|
|
2784
|
+
# -------------------------
|
|
2785
|
+
async def _http_get_json(self, url: str, headers: Dict[str, str], timeout: int) -> Tuple[int, Any, str]:
|
|
2786
|
+
"""GET JSON helper (returns status, json-or-None, raw-text)."""
|
|
2787
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
2788
|
+
resp = await client.get(url, headers=headers)
|
|
2789
|
+
raw = resp.text
|
|
2790
|
+
try:
|
|
2791
|
+
j = resp.json()
|
|
2792
|
+
except Exception:
|
|
2793
|
+
j = None
|
|
2794
|
+
return resp.status_code, j, raw
|
|
2795
|
+
|
|
2796
|
+
# -------------------------
|
|
2797
|
+
# Fetch CSV columns from server (fallback to baseline)
|
|
2798
|
+
# -------------------------
|
|
2799
|
+
async def _get_users_csv_columns(self, headers: Dict[str, str], timeout: int = 60) -> List[str]:
|
|
2800
|
+
"""
|
|
2801
|
+
GET /api/users/csv_option → list of columns supported by the server.
|
|
2802
|
+
If the server returns nothing or fails, return a safe baseline.
|
|
2803
|
+
"""
|
|
2804
|
+
baseline = [
|
|
2805
|
+
"user_id", "name", "department", "user_title", "phone", "email",
|
|
2806
|
+
"user_group", "start_datetime", "expiry_datetime", "csn",
|
|
2807
|
+
"secure_credential", "access_on_card",
|
|
2808
|
+
"mobile_start_datetime", "mobile_expiry_datetime", "csn_mobile", "qr",
|
|
2809
|
+
"26 bit SIA Standard-H10301", "HID 37 bit-H10302", "HID 37 bit-H10304",
|
|
2810
|
+
"HID Corporate 1000", "HID Corporate 1000 48bit",
|
|
2811
|
+
"MIFARE CSN 32bit", "MIFARE CSN 34bit (Parity)",
|
|
2812
|
+
"DESFire 56bit", "DESFire 58bit (Parity)",
|
|
2813
|
+
"face_image_file1", "face_image_file2", "pin", "tom_aoc", "tom_scc"
|
|
2814
|
+
]
|
|
2815
|
+
|
|
2816
|
+
url = f"{self.session.config.biostar_url}/api/users/csv_option"
|
|
2817
|
+
status, j, _raw = await self._http_get_json(url, headers, timeout)
|
|
2818
|
+
|
|
2819
|
+
if status == 200 and isinstance(j, dict):
|
|
2820
|
+
rows = ((j.get("CsvOption") or {}).get("columns") or {}).get("rows", [])
|
|
2821
|
+
if isinstance(rows, list) and rows:
|
|
2822
|
+
return rows
|
|
2823
|
+
|
|
2824
|
+
return baseline
|
|
2825
|
+
async def import_users_csv_smart(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
2826
|
+
"""
|
|
2827
|
+
Smart CSV import (force overwrite, use next_user_id):
|
|
2828
|
+
- Create an enhanced CSV that uses the server's full column list as headers
|
|
2829
|
+
- Map name/email and inject defaults for user_group/start_datetime/expiry_datetime
|
|
2830
|
+
- Get the starting ID via GET /api/users/next_user_id → fill user_id for each row
|
|
2831
|
+
- Finally delegate to import_users_csv() (import_option=2 fixed)
|
|
2832
|
+
- 🆕 After import, automatically update user groups, access groups, and dates from CSV
|
|
2833
|
+
"""
|
|
2834
|
+
try:
|
|
2835
|
+
self.check_auth()
|
|
2836
|
+
|
|
2837
|
+
# ---- Defaults/Options ----
|
|
2838
|
+
default_group = str(args.get("default_group_name", "All Users"))
|
|
2839
|
+
default_start = str(args.get("default_start_datetime", "2001-01-01 00:00"))
|
|
2840
|
+
default_expiry = str(args.get("default_expiry_datetime", "2030-12-31 23:59"))
|
|
2841
|
+
start_line = int(args.get("start_line", 2))
|
|
2842
|
+
timeout = int(args.get("timeout", 60))
|
|
2843
|
+
dry_run = bool(args.get("dry_run", False))
|
|
2844
|
+
auto_update = bool(args.get("auto_update", True)) # 🆕 자동 업데이트 기능
|
|
2845
|
+
|
|
2846
|
+
# ---- Load original CSV ----
|
|
2847
|
+
loaded = await self._load_csv_bytes_from_args(args, timeout=timeout)
|
|
2848
|
+
if "error" in loaded:
|
|
2849
|
+
return self.error_response(loaded["error"])
|
|
2850
|
+
orig_bytes: bytes = loaded["bytes"]
|
|
2851
|
+
orig_basename: str = loaded["basename"] or "upload.csv"
|
|
2852
|
+
|
|
2853
|
+
# ---- CSV parsing ----
|
|
2854
|
+
import csv, io, re
|
|
2855
|
+
text = orig_bytes.decode(args.get("charset") or "utf-8-sig", errors="replace")
|
|
2856
|
+
try:
|
|
2857
|
+
sniff_sample = text[:2048]
|
|
2858
|
+
dialect = csv.Sniffer().sniff(sniff_sample, delimiters=[",", ";", "|", "\t"])
|
|
2859
|
+
except Exception:
|
|
2860
|
+
class _D(csv.excel):
|
|
2861
|
+
delimiter = (args.get("delimiter") or ",")
|
|
2862
|
+
dialect = _D
|
|
2863
|
+
|
|
2864
|
+
reader = csv.DictReader(io.StringIO(text), dialect=dialect)
|
|
2865
|
+
src_headers_exact = [h for h in (reader.fieldnames or []) if h]
|
|
2866
|
+
src_rows = list(reader)
|
|
2867
|
+
|
|
2868
|
+
# ---- Fetch full server column list ----
|
|
2869
|
+
headers_http = {"bs-session-id": self.get_session_id(), "Content-Type": "application/json"}
|
|
2870
|
+
status, j, _raw = await self._http_get_json(
|
|
2871
|
+
f"{self.session.config.biostar_url}/api/users/csv_option", headers_http, timeout
|
|
2872
|
+
)
|
|
2873
|
+
if status == 200 and isinstance(j, dict):
|
|
2874
|
+
server_columns: list[str] = ((j.get("CsvOption") or {}).get("columns") or {}).get("rows", []) or []
|
|
2875
|
+
else:
|
|
2876
|
+
server_columns = await self._get_users_csv_columns(headers_http, timeout)
|
|
2877
|
+
if not server_columns:
|
|
2878
|
+
return self.error_response("Server did not provide CSV columns and no baseline is available.")
|
|
2879
|
+
|
|
2880
|
+
# ---- Aliases/Mapping ----
|
|
2881
|
+
aliases = {
|
|
2882
|
+
"name": {"name", "full name", "username"},
|
|
2883
|
+
"email": {"email", "e-mail"},
|
|
2884
|
+
"user_group": {"user_group", "user group", "group", "group_name", "group name"},
|
|
2885
|
+
"start_datetime": {"start_datetime", "start", "start date", "valid_from", "start datetime"},
|
|
2886
|
+
"expiry_datetime": {"expiry_datetime", "end", "end_datetime", "valid_to", "expiration_datetime", "end datetime"},
|
|
2887
|
+
}
|
|
2888
|
+
src_idx = {(h or "").strip().lower(): h for h in src_headers_exact if isinstance(h, str)}
|
|
2889
|
+
|
|
2890
|
+
def pick_src_header(alias_set: set[str]) -> str | None:
|
|
2891
|
+
for cand in alias_set:
|
|
2892
|
+
k = str(cand).strip().lower()
|
|
2893
|
+
if k in src_idx:
|
|
2894
|
+
return src_idx[k]
|
|
2895
|
+
return None
|
|
2896
|
+
|
|
2897
|
+
def _normalize_dt(s: str) -> str:
|
|
2898
|
+
if not isinstance(s, str):
|
|
2899
|
+
return s
|
|
2900
|
+
s = s.strip()
|
|
2901
|
+
if re.match(r"^\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}$", s):
|
|
2902
|
+
return s + ":00"
|
|
2903
|
+
return s
|
|
2904
|
+
|
|
2905
|
+
map_src_for = {
|
|
2906
|
+
"name": pick_src_header(aliases["name"]),
|
|
2907
|
+
"email": pick_src_header(aliases["email"]),
|
|
2908
|
+
"user_group": pick_src_header(aliases["user_group"]),
|
|
2909
|
+
"start_datetime": pick_src_header(aliases["start_datetime"]),
|
|
2910
|
+
"expiry_datetime": pick_src_header(aliases["expiry_datetime"]),
|
|
2911
|
+
}
|
|
2912
|
+
|
|
2913
|
+
# ---- Build enhanced CSV headers/rows ----
|
|
2914
|
+
enhanced_headers_exact = list(server_columns)
|
|
2915
|
+
|
|
2916
|
+
def get_ci(row: dict, header_label: str) -> str | None:
|
|
2917
|
+
want = (header_label or "").strip().lower()
|
|
2918
|
+
for k, v in row.items():
|
|
2919
|
+
if (k or "").strip().lower() == want:
|
|
2920
|
+
return v
|
|
2921
|
+
return None
|
|
2922
|
+
|
|
2923
|
+
enhanced_rows: list[dict[str, str]] = []
|
|
2924
|
+
for row in src_rows:
|
|
2925
|
+
out = {col: "" for col in enhanced_headers_exact}
|
|
2926
|
+
if map_src_for["name"]:
|
|
2927
|
+
out["name"] = str(get_ci(row, map_src_for["name"]) or "").strip()
|
|
2928
|
+
if map_src_for["email"]:
|
|
2929
|
+
out["email"] = str(get_ci(row, map_src_for["email"]) or "").strip()
|
|
2930
|
+
|
|
2931
|
+
out["user_group"] = (
|
|
2932
|
+
(str(get_ci(row, map_src_for["user_group"]) or "").strip())
|
|
2933
|
+
if map_src_for["user_group"] else ""
|
|
2934
|
+
) or default_group
|
|
2935
|
+
|
|
2936
|
+
sdt = (str(get_ci(row, map_src_for["start_datetime"]) or "").strip()
|
|
2937
|
+
if map_src_for["start_datetime"] else "") or default_start
|
|
2938
|
+
edt = (str(get_ci(row, map_src_for["expiry_datetime"]) or "").strip()
|
|
2939
|
+
if map_src_for["expiry_datetime"] else "") or default_expiry
|
|
2940
|
+
out["start_datetime"] = _normalize_dt(sdt)
|
|
2941
|
+
out["expiry_datetime"] = _normalize_dt(edt)
|
|
2942
|
+
|
|
2943
|
+
enhanced_rows.append(out)
|
|
2944
|
+
|
|
2945
|
+
# ---- Fill user_id using next_user_id ----
|
|
2946
|
+
# (Note: assign sequentially for the number of rows. If there is concurrency, the server may reject some; check the error CSV.)
|
|
2947
|
+
try:
|
|
2948
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
2949
|
+
nid_resp = await client.get(f"{self.session.config.biostar_url}/api/users/next_user_id", headers=headers_http)
|
|
2950
|
+
if nid_resp.status_code != 200:
|
|
2951
|
+
return self.error_response(f"Failed to get next_user_id: {nid_resp.status_code} - {nid_resp.text}")
|
|
2952
|
+
start_id = int((nid_resp.json() or {}).get("User", {}).get("user_id"))
|
|
2953
|
+
except Exception as e:
|
|
2954
|
+
return self.error_response(f"Failed to parse next_user_id: {e!r}")
|
|
2955
|
+
|
|
2956
|
+
if "user_id" in enhanced_headers_exact:
|
|
2957
|
+
for i, r in enumerate(enhanced_rows):
|
|
2958
|
+
r["user_id"] = str(start_id + i)
|
|
2959
|
+
|
|
2960
|
+
# ---- Dry-run ----
|
|
2961
|
+
if dry_run:
|
|
2962
|
+
return self.success_response({
|
|
2963
|
+
"message": "Dry-run: full-width enhanced CSV prepared with user_id assigned.",
|
|
2964
|
+
"enhanced_filename": f"enh_{orig_basename}",
|
|
2965
|
+
"enhanced_headers": enhanced_headers_exact,
|
|
2966
|
+
"row_count": len(enhanced_rows),
|
|
2967
|
+
"defaults_used": {
|
|
2968
|
+
"user_group": default_group,
|
|
2969
|
+
"start_datetime": default_start,
|
|
2970
|
+
"expiry_datetime": default_expiry,
|
|
2971
|
+
},
|
|
2972
|
+
"next_user_id_start": start_id,
|
|
2973
|
+
"server_columns_total": len(enhanced_headers_exact),
|
|
2974
|
+
})
|
|
2975
|
+
|
|
2976
|
+
# ---- Serialize and delegate to import_users_csv() (import_option=2 fixed here) ----
|
|
2977
|
+
import io as _io
|
|
2978
|
+
buf = _io.StringIO()
|
|
2979
|
+
w = csv.DictWriter(buf, fieldnames=enhanced_headers_exact, dialect=dialect)
|
|
2980
|
+
w.writeheader()
|
|
2981
|
+
for r in enhanced_rows:
|
|
2982
|
+
w.writerow(r)
|
|
2983
|
+
enhanced_text = buf.getvalue()
|
|
2984
|
+
enhanced_bytes = enhanced_text.encode("utf-8-sig")
|
|
2985
|
+
enhanced_name = f"enh_{orig_basename}"
|
|
2986
|
+
|
|
2987
|
+
# import_users_csv writes to the download folder + /download preflight + calls csv_import(import_option=2)
|
|
2988
|
+
call_args = {
|
|
2989
|
+
"file_bytes": enhanced_bytes,
|
|
2990
|
+
"original_file_name": enhanced_name,
|
|
2991
|
+
"start_line": start_line,
|
|
2992
|
+
"timeout": timeout,
|
|
2993
|
+
"columns": enhanced_headers_exact,
|
|
2994
|
+
"headers": enhanced_headers_exact,
|
|
2995
|
+
}
|
|
2996
|
+
import_result = await self.import_users_csv(call_args)
|
|
2997
|
+
|
|
2998
|
+
# 🆕 Auto-update: CSV에 User Group, Access Group, Start Date, End Date가 있으면 자동 업데이트
|
|
2999
|
+
if auto_update and not dry_run:
|
|
3000
|
+
# CSV에 추가 정보가 있는지 확인
|
|
3001
|
+
has_user_group = any(col.lower() in ["user group", "user_group", "group"] for col in src_headers_exact)
|
|
3002
|
+
has_access_group = any(col.lower() in ["access group", "access_group", "출입그룹"] for col in src_headers_exact)
|
|
3003
|
+
has_dates = any(col.lower() in ["start date", "start_date", "end date", "end_date", "시작일", "종료일", "만료일"] for col in src_headers_exact)
|
|
3004
|
+
|
|
3005
|
+
if has_user_group or has_access_group or has_dates:
|
|
3006
|
+
logger.info(f" Auto-update triggered: user_group={has_user_group}, access_group={has_access_group}, dates={has_dates}")
|
|
3007
|
+
|
|
3008
|
+
# 원본 CSV 파일을 사용해서 bulk_update_users_from_file 호출
|
|
3009
|
+
try:
|
|
3010
|
+
update_args = {
|
|
3011
|
+
"file_bytes": orig_bytes,
|
|
3012
|
+
"original_file_name": orig_basename,
|
|
3013
|
+
"timeout": timeout,
|
|
3014
|
+
}
|
|
3015
|
+
update_result = await self.bulk_update_users_from_file(update_args)
|
|
3016
|
+
|
|
3017
|
+
# import 결과와 update 결과를 결합
|
|
3018
|
+
combined_result = import_result[0] if import_result else TextContent(type="text", text="")
|
|
3019
|
+
update_text = update_result[0].text if update_result else ""
|
|
3020
|
+
|
|
3021
|
+
return [TextContent(
|
|
3022
|
+
type="text",
|
|
3023
|
+
text=f"{combined_result.text}\n\n"
|
|
3024
|
+
f"{'='*60}\n"
|
|
3025
|
+
f" 자동 업데이트 완료 (User Groups, Access Groups, Dates)\n"
|
|
3026
|
+
f"{'='*60}\n\n"
|
|
3027
|
+
f"{update_text}"
|
|
3028
|
+
)]
|
|
3029
|
+
except Exception as e:
|
|
3030
|
+
logger.warning(f" Auto-update failed: {e}")
|
|
3031
|
+
# import는 성공했으므로 그 결과만 반환
|
|
3032
|
+
return import_result
|
|
3033
|
+
|
|
3034
|
+
return import_result
|
|
3035
|
+
|
|
3036
|
+
except Exception as e:
|
|
3037
|
+
return await self.handle_api_error(e)
|
|
3038
|
+
|
|
3039
|
+
|
|
3040
|
+
# ---- helper: CSV attachment upload ----
|
|
3041
|
+
async def _upload_csv_attachment(self, filename: str, file_bytes: bytes, timeout: int) -> tuple[int, dict]:
|
|
3042
|
+
"""
|
|
3043
|
+
Upload CSV to /api/attachments to obtain the uri.
|
|
3044
|
+
Depending on the server, the key may be 'File' or 'Attachment'; the caller handles flexibly.
|
|
3045
|
+
"""
|
|
3046
|
+
headers = {"bs-session-id": self.get_session_id()}
|
|
3047
|
+
# Use the multipart upload helper if available
|
|
3048
|
+
if hasattr(self, "_http_post_multipart"):
|
|
3049
|
+
status, j, _raw = await self._http_post_multipart(
|
|
3050
|
+
f"{self.session.config.biostar_url}/api/attachments",
|
|
3051
|
+
files={"file": (filename, file_bytes, "text/csv")},
|
|
3052
|
+
headers=headers,
|
|
3053
|
+
timeout=timeout
|
|
3054
|
+
)
|
|
3055
|
+
return status, (j or {})
|
|
3056
|
+
# Otherwise, upload JSON body (only if the server allows it); on failure, handle in the caller
|
|
3057
|
+
status, j, _raw = await self._http_post_json(
|
|
3058
|
+
f"{self.session.config.biostar_url}/api/attachments",
|
|
3059
|
+
{"fileName": filename, "content_base64": file_bytes.decode("latin1")},
|
|
3060
|
+
{**headers, "Content-Type": "application/json"},
|
|
3061
|
+
timeout
|
|
3062
|
+
)
|
|
3063
|
+
return status, (j or {})
|
|
3064
|
+
|
|
3065
|
+
# JSON POST
|
|
3066
|
+
async def _http_post_json(self, url: str, body: Dict[str, Any], headers: Dict[str, str], timeout: int) -> tuple[int, Any, str]:
|
|
3067
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
3068
|
+
resp = await client.post(url, headers=headers, json=body)
|
|
3069
|
+
raw = resp.text
|
|
3070
|
+
try:
|
|
3071
|
+
j = resp.json()
|
|
3072
|
+
except Exception:
|
|
3073
|
+
j = None
|
|
3074
|
+
return resp.status_code, j, raw
|
|
3075
|
+
|
|
3076
|
+
# multipart/form-data POST (file upload)
|
|
3077
|
+
async def _http_post_multipart(self, url: str, files: Dict[str, tuple], headers: Dict[str, str], timeout: int) -> tuple[int, Any, str]:
|
|
3078
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
3079
|
+
resp = await client.post(url, headers=headers, files=files)
|
|
3080
|
+
raw = resp.text
|
|
3081
|
+
try:
|
|
3082
|
+
j = resp.json()
|
|
3083
|
+
except Exception:
|
|
3084
|
+
j = None
|
|
3085
|
+
return resp.status_code, j, raw
|
|
3086
|
+
|
|
3087
|
+
async def bulk_update_users_from_file(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
3088
|
+
"""
|
|
3089
|
+
Read a CSV-like file and update users:
|
|
3090
|
+
- User Group: exclusive 1:1 replacement via PUT /api/users/{id}
|
|
3091
|
+
- Access Group: additive only via PUT /api/access_groups/{id} with new_users
|
|
3092
|
+
Name resolution rules:
|
|
3093
|
+
- 0 match: return all users as candidates and ask for confirmation
|
|
3094
|
+
- >1 matches: return matched candidates and ask for confirmation
|
|
3095
|
+
- 1 match: proceed
|
|
3096
|
+
Columns (case/spacing tolerant): name, group (user group), access group (can be name or id; multiple allowed by delimiters)
|
|
3097
|
+
"""
|
|
3098
|
+
try:
|
|
3099
|
+
# ---------- 0) Auth check ----------
|
|
3100
|
+
self.check_auth()
|
|
3101
|
+
|
|
3102
|
+
timeout = int(args.get("timeout", 60))
|
|
3103
|
+
|
|
3104
|
+
# ---------- 1) Load CSV bytes ----------
|
|
3105
|
+
loaded = await self._load_csv_bytes_from_args(args, timeout=timeout)
|
|
3106
|
+
if "error" in loaded:
|
|
3107
|
+
return self.error_response(loaded["error"])
|
|
3108
|
+
file_bytes: bytes = loaded["bytes"]
|
|
3109
|
+
basename: str = loaded.get("basename") or "upload.csv"
|
|
3110
|
+
|
|
3111
|
+
# ---------- 2) Decode & parse CSV ----------
|
|
3112
|
+
import csv, io, re
|
|
3113
|
+
text = file_bytes.decode(args.get("charset") or "utf-8-sig", errors="replace")
|
|
3114
|
+
try:
|
|
3115
|
+
sniff_sample = text[:2048]
|
|
3116
|
+
dialect = csv.Sniffer().sniff(sniff_sample, delimiters=[",", ";", "|", "\t"])
|
|
3117
|
+
except Exception:
|
|
3118
|
+
class _D(csv.excel):
|
|
3119
|
+
delimiter = (args.get("delimiter") or ",")
|
|
3120
|
+
dialect = _D
|
|
3121
|
+
|
|
3122
|
+
reader = csv.DictReader(io.StringIO(text), dialect=dialect)
|
|
3123
|
+
src_headers_exact = list(reader.fieldnames or [])
|
|
3124
|
+
src_rows: list[dict] = [dict(r) for r in reader]
|
|
3125
|
+
|
|
3126
|
+
# Case-insensitive header picking helper
|
|
3127
|
+
def _pick(src_headers: list[str], *cands: str) -> Optional[str]:
|
|
3128
|
+
norm = { (h or "").strip().lower(): h for h in src_headers if isinstance(h, str) }
|
|
3129
|
+
for c in cands:
|
|
3130
|
+
k = (c or "").strip().lower()
|
|
3131
|
+
if k in norm:
|
|
3132
|
+
return norm[k]
|
|
3133
|
+
return None
|
|
3134
|
+
|
|
3135
|
+
col_name = _pick(src_headers_exact, "name", "user name", "username", "full name")
|
|
3136
|
+
col_ug = _pick(src_headers_exact, "group", "user group", "user_group", "group name", "group_name")
|
|
3137
|
+
col_ag = _pick(src_headers_exact, "access group", "access_group", "ag", "access-group", "출입그룹", "출입 그룹")
|
|
3138
|
+
col_start = _pick(src_headers_exact, "start date", "start_date", "start datetime", "start_datetime", "시작일", "시작 일자")
|
|
3139
|
+
col_end = _pick(src_headers_exact, "end date", "end_date", "expiry date", "expiry_date", "expiry datetime", "expiry_datetime", "종료일", "만료일", "종료 일자", "만료 일자")
|
|
3140
|
+
|
|
3141
|
+
if not col_name:
|
|
3142
|
+
return self.error_response(f"Missing required column 'name'. Detected headers: {src_headers_exact}")
|
|
3143
|
+
|
|
3144
|
+
# ---------- 3) Headers & base URL ----------
|
|
3145
|
+
headers = {
|
|
3146
|
+
"bs-session-id": self.get_session_id(),
|
|
3147
|
+
"Content-Type": "application/json",
|
|
3148
|
+
"Accept": "application/json",
|
|
3149
|
+
}
|
|
3150
|
+
base = self.session.config.biostar_url
|
|
3151
|
+
|
|
3152
|
+
# ---------- 4) List all users once (for name resolution & defaults) ----------
|
|
3153
|
+
# Use v2 users search with minimal sort
|
|
3154
|
+
users_body = {"limit": 0, "order_by": "user_id:false"}
|
|
3155
|
+
st_users, users_json, users_raw = await self._http_post_json(
|
|
3156
|
+
f"{base}/api/v2/users/search", users_body, headers, timeout
|
|
3157
|
+
)
|
|
3158
|
+
if st_users != 200 or not isinstance(users_json, dict):
|
|
3159
|
+
return self.error_response(f"Failed to list users: {st_users}", {"raw": users_raw})
|
|
3160
|
+
|
|
3161
|
+
users_rows = ((users_json.get("UserCollection") or {}).get("rows") or [])
|
|
3162
|
+
|
|
3163
|
+
# Build: name -> list[user], uid -> minimal info for fallback
|
|
3164
|
+
name_to_users: dict[str, list[dict]] = {}
|
|
3165
|
+
uid_to_min: dict[str, dict] = {}
|
|
3166
|
+
for u in users_rows:
|
|
3167
|
+
nm = str(u.get("name") or "").strip()
|
|
3168
|
+
name_to_users.setdefault(nm.lower(), []).append(u)
|
|
3169
|
+
uid = u.get("user_id")
|
|
3170
|
+
if uid is not None:
|
|
3171
|
+
uid_str = str(uid)
|
|
3172
|
+
uid_to_min[uid_str] = {
|
|
3173
|
+
"name": u.get("name") or "",
|
|
3174
|
+
"start_datetime": u.get("start_datetime") or "2001-01-01T00:00:00.00Z",
|
|
3175
|
+
"expiry_datetime": u.get("expiry_datetime") or "2030-12-31T23:59:00.00Z",
|
|
3176
|
+
}
|
|
3177
|
+
|
|
3178
|
+
# ---------- 5) List all user groups (FULL) and build name->id map ----------
|
|
3179
|
+
# Prefer GET /api/user_groups (stable), then fallback to POST /api/v2/user_groups
|
|
3180
|
+
st_ug, ug_json, ug_raw = await self._http_get_json(f"{base}/api/user_groups", headers, timeout)
|
|
3181
|
+
if st_ug != 200 or not isinstance(ug_json, dict):
|
|
3182
|
+
st_ug, ug_json, ug_raw = await self._http_post_json(
|
|
3183
|
+
f"{base}/api/v2/user_groups", {"limit": 0, "order_by": "id:false"}, headers, timeout
|
|
3184
|
+
)
|
|
3185
|
+
if st_ug != 200 or not isinstance(ug_json, dict):
|
|
3186
|
+
return self.error_response(f"Failed to list user groups: {st_ug}", {"raw": ug_raw})
|
|
3187
|
+
|
|
3188
|
+
ug_rows = ((ug_json.get("UserGroupCollection") or {}).get("rows") or [])
|
|
3189
|
+
user_group_name_to_id: dict[str, int] = {}
|
|
3190
|
+
# Flatten top-level rows + nested user_groups field
|
|
3191
|
+
for g in ug_rows:
|
|
3192
|
+
nm = str(g.get("name") or "").strip()
|
|
3193
|
+
gid = g.get("id")
|
|
3194
|
+
if nm and gid is not None:
|
|
3195
|
+
user_group_name_to_id[nm.lower()] = int(gid)
|
|
3196
|
+
for child in (g.get("user_groups") or []):
|
|
3197
|
+
cnm = str(child.get("name") or "").strip()
|
|
3198
|
+
cid = child.get("id")
|
|
3199
|
+
if cnm and cid is not None:
|
|
3200
|
+
user_group_name_to_id[cnm.lower()] = int(cid)
|
|
3201
|
+
|
|
3202
|
+
# ---------- 6) Resolve each row's user by name (collect conflicts) ----------
|
|
3203
|
+
provided_resolutions = args.get("name_resolutions") or {}
|
|
3204
|
+
conflicts: list[dict] = []
|
|
3205
|
+
resolved_rows: list[dict] = [] # [{name, user_id, user_group_name, access_group_tokens[]}]
|
|
3206
|
+
|
|
3207
|
+
# Build a quick 'all users' list used for 0-match suggestion
|
|
3208
|
+
all_users_min = [{"user_id": str(u.get("user_id")), "name": u.get("name"), "email": u.get("email")} for u in users_rows]
|
|
3209
|
+
|
|
3210
|
+
for row in src_rows:
|
|
3211
|
+
name_val = (str(row.get(col_name) or "").strip())
|
|
3212
|
+
if not name_val:
|
|
3213
|
+
continue
|
|
3214
|
+
|
|
3215
|
+
candidates = name_to_users.get(name_val.lower(), [])
|
|
3216
|
+
resolved_user_id: Optional[str] = None
|
|
3217
|
+
if len(candidates) == 1:
|
|
3218
|
+
resolved_user_id = str(candidates[0].get("user_id"))
|
|
3219
|
+
else:
|
|
3220
|
+
chosen = provided_resolutions.get(name_val) # caller can provide mapping {name: user_id}
|
|
3221
|
+
if chosen:
|
|
3222
|
+
resolved_user_id = str(chosen)
|
|
3223
|
+
else:
|
|
3224
|
+
if len(candidates) == 0:
|
|
3225
|
+
conflicts.append({
|
|
3226
|
+
"name": name_val,
|
|
3227
|
+
"reason": "no_match",
|
|
3228
|
+
"candidates": all_users_min
|
|
3229
|
+
})
|
|
3230
|
+
else:
|
|
3231
|
+
conflicts.append({
|
|
3232
|
+
"name": name_val,
|
|
3233
|
+
"reason": "multiple_matches",
|
|
3234
|
+
"candidates": [
|
|
3235
|
+
{
|
|
3236
|
+
"user_id": str(c.get("user_id")),
|
|
3237
|
+
"name": c.get("name"),
|
|
3238
|
+
"email": c.get("email"),
|
|
3239
|
+
"user_group": (c.get("user_group_id") or {}).get("name"),
|
|
3240
|
+
} for c in candidates
|
|
3241
|
+
]
|
|
3242
|
+
})
|
|
3243
|
+
|
|
3244
|
+
if resolved_user_id:
|
|
3245
|
+
# Normalize target user group (if present)
|
|
3246
|
+
target_group_name = str(row.get(col_ug) or "").strip() if col_ug else ""
|
|
3247
|
+
|
|
3248
|
+
# Normalize access group cell -> tokens (can be delimited or single)
|
|
3249
|
+
ag_tokens: list[str] = []
|
|
3250
|
+
if col_ag:
|
|
3251
|
+
cell = str(row.get(col_ag) or "").strip()
|
|
3252
|
+
if cell:
|
|
3253
|
+
for tok in re.split(r"[;,/|]", cell):
|
|
3254
|
+
t = tok.strip()
|
|
3255
|
+
if t:
|
|
3256
|
+
ag_tokens.append(t)
|
|
3257
|
+
|
|
3258
|
+
# 날짜 값 파싱
|
|
3259
|
+
start_date_val = None
|
|
3260
|
+
end_date_val = None
|
|
3261
|
+
if col_start:
|
|
3262
|
+
start_raw = str(row.get(col_start) or "").strip()
|
|
3263
|
+
if start_raw:
|
|
3264
|
+
start_date_val = parse_flexible_date(start_raw, is_start=True)
|
|
3265
|
+
if col_end:
|
|
3266
|
+
end_raw = str(row.get(col_end) or "").strip()
|
|
3267
|
+
if end_raw:
|
|
3268
|
+
end_date_val = parse_flexible_date(end_raw, is_start=False)
|
|
3269
|
+
|
|
3270
|
+
resolved_rows.append({
|
|
3271
|
+
"name": name_val,
|
|
3272
|
+
"user_id": resolved_user_id,
|
|
3273
|
+
"user_group_name": target_group_name,
|
|
3274
|
+
"access_group_tokens": ag_tokens,
|
|
3275
|
+
"start_datetime": start_date_val,
|
|
3276
|
+
"expiry_datetime": end_date_val,
|
|
3277
|
+
})
|
|
3278
|
+
|
|
3279
|
+
if conflicts:
|
|
3280
|
+
import uuid
|
|
3281
|
+
sel_token = args.get("selection_token") or f"sel_{uuid.uuid4().hex}"
|
|
3282
|
+
return self.error_response("Some names need confirmation.", {
|
|
3283
|
+
"needs_selection": True,
|
|
3284
|
+
"selection_token": sel_token,
|
|
3285
|
+
"conflicts": conflicts,
|
|
3286
|
+
})
|
|
3287
|
+
|
|
3288
|
+
if not resolved_rows:
|
|
3289
|
+
return self.error_response("No resolvable rows found (empty or invalid file?)")
|
|
3290
|
+
|
|
3291
|
+
# ---------- 7) Build user-group changes (exclusive 1:1 replace) ----------
|
|
3292
|
+
# Note: only when target group is found in user_group_name_to_id; otherwise record as unknown
|
|
3293
|
+
# We do not send no-op changes (when current group == target)
|
|
3294
|
+
uid_to_user = {str(u.get("user_id")): u for u in users_rows}
|
|
3295
|
+
user_group_changes: list[dict] = [] # each: {user_id, gid, start_datetime, expiry_datetime}
|
|
3296
|
+
unknown_user_groups: set[str] = set()
|
|
3297
|
+
|
|
3298
|
+
# 날짜 업데이트가 필요한 사용자들 (그룹 변경이 없어도 날짜만 업데이트)
|
|
3299
|
+
user_date_updates: list[dict] = [] # each: {user_id, start_datetime, expiry_datetime}
|
|
3300
|
+
|
|
3301
|
+
for r in resolved_rows:
|
|
3302
|
+
gname = (r.get("user_group_name") or "").strip()
|
|
3303
|
+
has_group_change = False
|
|
3304
|
+
gid = None
|
|
3305
|
+
|
|
3306
|
+
if gname:
|
|
3307
|
+
gid = user_group_name_to_id.get(gname.lower())
|
|
3308
|
+
if gid is None:
|
|
3309
|
+
unknown_user_groups.add(gname)
|
|
3310
|
+
else:
|
|
3311
|
+
cur_u = uid_to_user.get(str(r["user_id"])) or {}
|
|
3312
|
+
cur_gid = ((cur_u.get("user_group_id") or {}).get("id"))
|
|
3313
|
+
try:
|
|
3314
|
+
if cur_gid is None or int(cur_gid) != int(gid):
|
|
3315
|
+
has_group_change = True
|
|
3316
|
+
except Exception:
|
|
3317
|
+
has_group_change = True
|
|
3318
|
+
|
|
3319
|
+
# 날짜 변경이 있는지 확인
|
|
3320
|
+
has_date_change = r.get("start_datetime") is not None or r.get("expiry_datetime") is not None
|
|
3321
|
+
|
|
3322
|
+
if has_group_change or has_date_change:
|
|
3323
|
+
change_info = {
|
|
3324
|
+
"user_id": str(r["user_id"]),
|
|
3325
|
+
"start_datetime": r.get("start_datetime"),
|
|
3326
|
+
"expiry_datetime": r.get("expiry_datetime"),
|
|
3327
|
+
}
|
|
3328
|
+
if has_group_change:
|
|
3329
|
+
change_info["gid"] = int(gid)
|
|
3330
|
+
user_group_changes.append(change_info)
|
|
3331
|
+
elif has_date_change:
|
|
3332
|
+
user_date_updates.append(change_info)
|
|
3333
|
+
|
|
3334
|
+
# ---------- 8) List access groups and map by name/id ----------
|
|
3335
|
+
# Use v2 (no '/search'); if fails, fallback to legacy GET
|
|
3336
|
+
st_ag, ag_json, ag_raw = await self._http_post_json(
|
|
3337
|
+
f"{base}/api/v2/access_groups", {"limit": 0, "order_by": "id:false"}, headers, timeout
|
|
3338
|
+
)
|
|
3339
|
+
ag_rows = []
|
|
3340
|
+
if st_ag == 200 and isinstance(ag_json, dict):
|
|
3341
|
+
ag_rows = ((ag_json.get("AccessGroupCollection") or {}).get("rows") or [])
|
|
3342
|
+
else:
|
|
3343
|
+
st2, j2, raw2 = await self._http_get_json(f"{base}/api/access_groups", headers, timeout)
|
|
3344
|
+
if st2 == 200 and isinstance(j2, dict):
|
|
3345
|
+
ag_rows = ((j2.get("AccessGroupCollection") or {}).get("rows") or [])
|
|
3346
|
+
else:
|
|
3347
|
+
return self.error_response(f"Failed to list access groups: {st_ag} / fallback {st2}", {"raw": ag_raw})
|
|
3348
|
+
|
|
3349
|
+
ag_name_to_obj = {str(g.get("name") or "").strip().lower(): g for g in ag_rows}
|
|
3350
|
+
ag_id_to_obj = {}
|
|
3351
|
+
for g in ag_rows:
|
|
3352
|
+
gid = g.get("id")
|
|
3353
|
+
if gid is not None:
|
|
3354
|
+
try:
|
|
3355
|
+
ag_id_to_obj[int(gid)] = g
|
|
3356
|
+
except Exception:
|
|
3357
|
+
pass
|
|
3358
|
+
|
|
3359
|
+
# Build: {access_group_id(int) -> set(user_id)}
|
|
3360
|
+
ag_to_user_ids: dict[int, set[str]] = {}
|
|
3361
|
+
for r in resolved_rows:
|
|
3362
|
+
for tok in (r.get("access_group_tokens") or []):
|
|
3363
|
+
# Accept either numeric ID or name-like token
|
|
3364
|
+
gid_resolved: Optional[int] = None
|
|
3365
|
+
# Try numeric first
|
|
3366
|
+
try:
|
|
3367
|
+
gid_try = int(str(tok))
|
|
3368
|
+
if gid_try in ag_id_to_obj:
|
|
3369
|
+
gid_resolved = gid_try
|
|
3370
|
+
except Exception:
|
|
3371
|
+
gid_resolved = None
|
|
3372
|
+
# Fallback to name
|
|
3373
|
+
if gid_resolved is None:
|
|
3374
|
+
gobj = ag_name_to_obj.get(str(tok).strip().lower())
|
|
3375
|
+
if gobj and gobj.get("id") is not None:
|
|
3376
|
+
try:
|
|
3377
|
+
gid_resolved = int(gobj["id"])
|
|
3378
|
+
except Exception:
|
|
3379
|
+
gid_resolved = None
|
|
3380
|
+
if gid_resolved is not None:
|
|
3381
|
+
ag_to_user_ids.setdefault(gid_resolved, set()).add(str(r["user_id"]))
|
|
3382
|
+
|
|
3383
|
+
# ---------- 9) Dry-run summary ----------
|
|
3384
|
+
if bool(args.get("dry_run", False)):
|
|
3385
|
+
return self.success_response({
|
|
3386
|
+
"message": "Dry run: no updates were sent.",
|
|
3387
|
+
"file": basename,
|
|
3388
|
+
"resolved_row_count": len(resolved_rows),
|
|
3389
|
+
"user_group_updates": {
|
|
3390
|
+
"total_users": len(user_group_changes),
|
|
3391
|
+
"unknown_user_groups": sorted(list(unknown_user_groups)),
|
|
3392
|
+
},
|
|
3393
|
+
"access_group_updates": {
|
|
3394
|
+
"groups_to_update": len(ag_to_user_ids),
|
|
3395
|
+
"per_group_counts": {str(gid): len(uids) for gid, uids in ag_to_user_ids.items()},
|
|
3396
|
+
},
|
|
3397
|
+
})
|
|
3398
|
+
|
|
3399
|
+
# ---------- 10) Execute: per-user PUT /api/users/{id} (exclusive group replace + date updates) ----------
|
|
3400
|
+
ug_results: list[dict] = []
|
|
3401
|
+
date_results: list[dict] = []
|
|
3402
|
+
|
|
3403
|
+
# 그룹 변경이 있는 사용자들 처리
|
|
3404
|
+
if user_group_changes:
|
|
3405
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
3406
|
+
for ch in user_group_changes:
|
|
3407
|
+
uid = ch["user_id"]
|
|
3408
|
+
tgt_gid = ch["gid"]
|
|
3409
|
+
|
|
3410
|
+
# Hydrate details from server; fallback to pre-fetched minimal info
|
|
3411
|
+
user_obj = await self._fetch_user_detail(headers, uid)
|
|
3412
|
+
if not user_obj:
|
|
3413
|
+
user_obj = uid_to_min.get(uid, {}) # minimal but enough
|
|
3414
|
+
|
|
3415
|
+
name_val = user_obj.get("name") or (uid_to_min.get(uid) or {}).get("name") or ""
|
|
3416
|
+
|
|
3417
|
+
# CSV에서 지정한 날짜 사용, 없으면 기존 값 유지
|
|
3418
|
+
start_val = ch.get("start_datetime") or user_obj.get("start_datetime") or (uid_to_min.get(uid) or {}).get("start_datetime") or "2001-01-01T00:00:00.00Z"
|
|
3419
|
+
expiry_val = ch.get("expiry_datetime") or user_obj.get("expiry_datetime") or (uid_to_min.get(uid) or {}).get("expiry_datetime") or "2030-12-31T23:59:00.00Z"
|
|
3420
|
+
|
|
3421
|
+
payload = {
|
|
3422
|
+
"User": {
|
|
3423
|
+
"name": name_val,
|
|
3424
|
+
"user_group_id": {"id": tgt_gid}, # exclusive replace: server moves user to this group
|
|
3425
|
+
"start_datetime": start_val,
|
|
3426
|
+
"expiry_datetime": expiry_val,
|
|
3427
|
+
}
|
|
3428
|
+
}
|
|
3429
|
+
resp = await client.put(f"{base}/api/users/{uid}", headers=headers, json=payload)
|
|
3430
|
+
ok = (resp.status_code == 200)
|
|
3431
|
+
try:
|
|
3432
|
+
body = resp.json()
|
|
3433
|
+
except Exception:
|
|
3434
|
+
body = {"raw": resp.text}
|
|
3435
|
+
ug_results.append({
|
|
3436
|
+
"ok": ok, "status": resp.status_code, "response": body,
|
|
3437
|
+
"user_id": uid, "target_group_id": tgt_gid
|
|
3438
|
+
})
|
|
3439
|
+
|
|
3440
|
+
# 날짜만 업데이트하는 사용자들 처리
|
|
3441
|
+
if user_date_updates:
|
|
3442
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
3443
|
+
for ch in user_date_updates:
|
|
3444
|
+
uid = ch["user_id"]
|
|
3445
|
+
|
|
3446
|
+
# Hydrate details from server
|
|
3447
|
+
user_obj = await self._fetch_user_detail(headers, uid)
|
|
3448
|
+
if not user_obj:
|
|
3449
|
+
user_obj = uid_to_min.get(uid, {})
|
|
3450
|
+
|
|
3451
|
+
name_val = user_obj.get("name") or (uid_to_min.get(uid) or {}).get("name") or ""
|
|
3452
|
+
cur_gid = None
|
|
3453
|
+
ug = user_obj.get("user_group_id")
|
|
3454
|
+
if isinstance(ug, dict) and ug.get("id") is not None:
|
|
3455
|
+
cur_gid = int(str(ug["id"]))
|
|
3456
|
+
|
|
3457
|
+
if cur_gid is None:
|
|
3458
|
+
date_results.append({
|
|
3459
|
+
"ok": False, "status": 0, "response": {"error": "No user_group_id found"},
|
|
3460
|
+
"user_id": uid
|
|
3461
|
+
})
|
|
3462
|
+
continue
|
|
3463
|
+
|
|
3464
|
+
# CSV에서 지정한 날짜 사용, 없으면 기존 값 유지
|
|
3465
|
+
start_val = ch.get("start_datetime") or user_obj.get("start_datetime") or (uid_to_min.get(uid) or {}).get("start_datetime") or "2001-01-01T00:00:00.00Z"
|
|
3466
|
+
expiry_val = ch.get("expiry_datetime") or user_obj.get("expiry_datetime") or (uid_to_min.get(uid) or {}).get("expiry_datetime") or "2030-12-31T23:59:00.00Z"
|
|
3467
|
+
|
|
3468
|
+
payload = {
|
|
3469
|
+
"User": {
|
|
3470
|
+
"name": name_val,
|
|
3471
|
+
"user_group_id": {"id": cur_gid},
|
|
3472
|
+
"start_datetime": start_val,
|
|
3473
|
+
"expiry_datetime": expiry_val,
|
|
3474
|
+
}
|
|
3475
|
+
}
|
|
3476
|
+
resp = await client.put(f"{base}/api/users/{uid}", headers=headers, json=payload)
|
|
3477
|
+
ok = (resp.status_code == 200)
|
|
3478
|
+
try:
|
|
3479
|
+
body = resp.json()
|
|
3480
|
+
except Exception:
|
|
3481
|
+
body = {"raw": resp.text}
|
|
3482
|
+
date_results.append({
|
|
3483
|
+
"ok": ok, "status": resp.status_code, "response": body,
|
|
3484
|
+
"user_id": uid
|
|
3485
|
+
})
|
|
3486
|
+
|
|
3487
|
+
# ---------- 11) Execute: PUT /api/access_groups/{id} with new_users (additive only) ----------
|
|
3488
|
+
ag_results: list[dict] = []
|
|
3489
|
+
if ag_to_user_ids:
|
|
3490
|
+
async with httpx.AsyncClient(verify=False, timeout=timeout) as client:
|
|
3491
|
+
for gid, uids in ag_to_user_ids.items():
|
|
3492
|
+
gobj = ag_id_to_obj.get(gid) or {}
|
|
3493
|
+
body = {
|
|
3494
|
+
"AccessGroup": {
|
|
3495
|
+
"name": gobj.get("name") or f"AG-{gid}",
|
|
3496
|
+
"access_levels": gobj.get("access_levels") or [],
|
|
3497
|
+
"user_groups": gobj.get("user_groups") or [],
|
|
3498
|
+
"new_users": [{"user_id": int(uid)} for uid in sorted(uids)],
|
|
3499
|
+
}
|
|
3500
|
+
}
|
|
3501
|
+
resp = await client.put(f"{base}/api/access_groups/{gid}", headers=headers, json=body)
|
|
3502
|
+
ok = (resp.status_code == 200)
|
|
3503
|
+
try:
|
|
3504
|
+
j = resp.json()
|
|
3505
|
+
except Exception:
|
|
3506
|
+
j = {"raw": resp.text}
|
|
3507
|
+
ag_results.append({
|
|
3508
|
+
"group_id": gid, "ok": ok, "status": resp.status_code, "response": j,
|
|
3509
|
+
"new_users_count": len(uids),
|
|
3510
|
+
})
|
|
3511
|
+
|
|
3512
|
+
# ---------- 12) Return summary ----------
|
|
3513
|
+
return self.success_response({
|
|
3514
|
+
"message": "Bulk update completed.",
|
|
3515
|
+
"file": basename,
|
|
3516
|
+
"user_group_updates": ug_results,
|
|
3517
|
+
"user_date_only_updates": date_results,
|
|
3518
|
+
"access_group_updates": ag_results,
|
|
3519
|
+
"unknown_user_groups": sorted(list(unknown_user_groups)),
|
|
3520
|
+
})
|
|
3521
|
+
|
|
3522
|
+
except Exception as e:
|
|
3523
|
+
return await self.handle_api_error(e)
|
|
3524
|
+
|
|
3525
|
+
|
|
3526
|
+
# ---------- NEW HELPER (place once at module level; do not modify existing helpers) ----------
|
|
3527
|
+
def _build_full_mapping_arrays(
|
|
3528
|
+
server_columns: list[str],
|
|
3529
|
+
csv_headers: list[str],
|
|
3530
|
+
*,
|
|
3531
|
+
alias_map: dict[str, list[str]],
|
|
3532
|
+
forbid_headers_lower: set[str],
|
|
3533
|
+
) -> tuple[list[str], list[str]]:
|
|
3534
|
+
"""
|
|
3535
|
+
Build full Query.columns/Query.headers arrays:
|
|
3536
|
+
- columns: exact server-case list from /api/users/csv_option (as given)
|
|
3537
|
+
- headers: same length; for each server column, put the CSV header name to map, or "" to skip
|
|
3538
|
+
- forbid headers: any CSV header whose lower() is in forbid_headers_lower will NOT be mapped
|
|
3539
|
+
- alias_map: keys are server column lower() -> list of candidate CSV header names (lower-case)
|
|
3540
|
+
"""
|
|
3541
|
+
csv_idx = {str(h).strip().lower(): str(h).strip() for h in csv_headers if isinstance(h, str) and str(h).strip()}
|
|
3542
|
+
|
|
3543
|
+
full_columns: list[str] = list(server_columns)
|
|
3544
|
+
full_headers: list[str] = []
|
|
3545
|
+
|
|
3546
|
+
for server_col in server_columns:
|
|
3547
|
+
sc_lower = str(server_col).strip().lower()
|
|
3548
|
+
|
|
3549
|
+
# Never map user-defined IDs or forbidden headers
|
|
3550
|
+
if sc_lower in forbid_headers_lower:
|
|
3551
|
+
full_headers.append("") # skip
|
|
3552
|
+
continue
|
|
3553
|
+
|
|
3554
|
+
mapped_header = ""
|
|
3555
|
+
# direct match
|
|
3556
|
+
if sc_lower in csv_idx:
|
|
3557
|
+
mapped_header = csv_idx[sc_lower]
|
|
3558
|
+
else:
|
|
3559
|
+
# alias match
|
|
3560
|
+
for cand in alias_map.get(sc_lower, []):
|
|
3561
|
+
lc = str(cand).strip().lower()
|
|
3562
|
+
if lc in csv_idx and lc not in forbid_headers_lower:
|
|
3563
|
+
mapped_header = csv_idx[lc]
|
|
3564
|
+
break
|
|
3565
|
+
|
|
3566
|
+
full_headers.append(mapped_header or "")
|
|
3567
|
+
|
|
3568
|
+
return full_columns, full_headers
|