suprema-biostar-mcp 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- biostar_x_mcp_server/__init__.py +25 -0
- biostar_x_mcp_server/__main__.py +15 -0
- biostar_x_mcp_server/config.py +87 -0
- biostar_x_mcp_server/handlers/__init__.py +35 -0
- biostar_x_mcp_server/handlers/access_handler.py +2162 -0
- biostar_x_mcp_server/handlers/audit_handler.py +489 -0
- biostar_x_mcp_server/handlers/auth_handler.py +216 -0
- biostar_x_mcp_server/handlers/base_handler.py +228 -0
- biostar_x_mcp_server/handlers/card_handler.py +746 -0
- biostar_x_mcp_server/handlers/device_handler.py +4344 -0
- biostar_x_mcp_server/handlers/door_handler.py +3969 -0
- biostar_x_mcp_server/handlers/event_handler.py +1331 -0
- biostar_x_mcp_server/handlers/file_handler.py +212 -0
- biostar_x_mcp_server/handlers/help_web_handler.py +379 -0
- biostar_x_mcp_server/handlers/log_handler.py +1051 -0
- biostar_x_mcp_server/handlers/navigation_handler.py +109 -0
- biostar_x_mcp_server/handlers/occupancy_handler.py +541 -0
- biostar_x_mcp_server/handlers/user_handler.py +3568 -0
- biostar_x_mcp_server/schemas/__init__.py +21 -0
- biostar_x_mcp_server/schemas/access.py +158 -0
- biostar_x_mcp_server/schemas/audit.py +73 -0
- biostar_x_mcp_server/schemas/auth.py +24 -0
- biostar_x_mcp_server/schemas/cards.py +128 -0
- biostar_x_mcp_server/schemas/devices.py +496 -0
- biostar_x_mcp_server/schemas/doors.py +306 -0
- biostar_x_mcp_server/schemas/events.py +104 -0
- biostar_x_mcp_server/schemas/files.py +7 -0
- biostar_x_mcp_server/schemas/help.py +29 -0
- biostar_x_mcp_server/schemas/logs.py +33 -0
- biostar_x_mcp_server/schemas/occupancy.py +19 -0
- biostar_x_mcp_server/schemas/tool_response.py +29 -0
- biostar_x_mcp_server/schemas/users.py +166 -0
- biostar_x_mcp_server/server.py +335 -0
- biostar_x_mcp_server/session.py +221 -0
- biostar_x_mcp_server/tool_manager.py +172 -0
- biostar_x_mcp_server/tools/__init__.py +45 -0
- biostar_x_mcp_server/tools/access.py +510 -0
- biostar_x_mcp_server/tools/audit.py +227 -0
- biostar_x_mcp_server/tools/auth.py +59 -0
- biostar_x_mcp_server/tools/cards.py +269 -0
- biostar_x_mcp_server/tools/categories.py +197 -0
- biostar_x_mcp_server/tools/devices.py +1552 -0
- biostar_x_mcp_server/tools/doors.py +865 -0
- biostar_x_mcp_server/tools/events.py +305 -0
- biostar_x_mcp_server/tools/files.py +28 -0
- biostar_x_mcp_server/tools/help.py +80 -0
- biostar_x_mcp_server/tools/logs.py +123 -0
- biostar_x_mcp_server/tools/navigation.py +89 -0
- biostar_x_mcp_server/tools/occupancy.py +91 -0
- biostar_x_mcp_server/tools/users.py +1113 -0
- biostar_x_mcp_server/utils/__init__.py +31 -0
- biostar_x_mcp_server/utils/category_mapper.py +206 -0
- biostar_x_mcp_server/utils/decorators.py +101 -0
- biostar_x_mcp_server/utils/language_detector.py +51 -0
- biostar_x_mcp_server/utils/search.py +42 -0
- biostar_x_mcp_server/utils/timezone.py +122 -0
- suprema_biostar_mcp-1.0.1.dist-info/METADATA +163 -0
- suprema_biostar_mcp-1.0.1.dist-info/RECORD +61 -0
- suprema_biostar_mcp-1.0.1.dist-info/WHEEL +4 -0
- suprema_biostar_mcp-1.0.1.dist-info/entry_points.txt +2 -0
- suprema_biostar_mcp-1.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,489 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import csv
|
|
3
|
+
import io
|
|
4
|
+
import os
|
|
5
|
+
import platform
|
|
6
|
+
import shutil
|
|
7
|
+
import re
|
|
8
|
+
import uuid
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Sequence, Dict, Any
|
|
11
|
+
from mcp.types import TextContent
|
|
12
|
+
import httpx
|
|
13
|
+
from .base_handler import BaseHandler
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AuditHandler(BaseHandler):
|
|
19
|
+
"""Handler for audit-related operations."""
|
|
20
|
+
|
|
21
|
+
async def audit_search(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
22
|
+
"""Search audit logs with various filters."""
|
|
23
|
+
try:
|
|
24
|
+
self.check_auth()
|
|
25
|
+
|
|
26
|
+
headers = {
|
|
27
|
+
"bs-session-id": self.get_session_id(),
|
|
28
|
+
"Content-Type": "application/json"
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
# Build search query
|
|
32
|
+
search_query = {
|
|
33
|
+
"Query": {
|
|
34
|
+
"limit": args.get("limit", 100),
|
|
35
|
+
"offset": args.get("offset", 0),
|
|
36
|
+
"conditions": []
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
# Add filters
|
|
41
|
+
if args.get("start_datetime"):
|
|
42
|
+
search_query["Query"]["conditions"].append({
|
|
43
|
+
"column": "datetime",
|
|
44
|
+
"operator": 5, # GREATER
|
|
45
|
+
"values": [args["start_datetime"]]
|
|
46
|
+
})
|
|
47
|
+
|
|
48
|
+
if args.get("end_datetime"):
|
|
49
|
+
search_query["Query"]["conditions"].append({
|
|
50
|
+
"column": "datetime",
|
|
51
|
+
"operator": 6, # LESS
|
|
52
|
+
"values": [args["end_datetime"]]
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
if args.get("operator_name"):
|
|
56
|
+
search_query["Query"]["conditions"].append({
|
|
57
|
+
"column": "operator_id.name",
|
|
58
|
+
"operator": 2, # CONTAINS
|
|
59
|
+
"values": [args["operator_name"]]
|
|
60
|
+
})
|
|
61
|
+
|
|
62
|
+
if args.get("target_type"):
|
|
63
|
+
search_query["Query"]["conditions"].append({
|
|
64
|
+
"column": "target_type",
|
|
65
|
+
"operator": 0, # EQUAL
|
|
66
|
+
"values": [args["target_type"]]
|
|
67
|
+
})
|
|
68
|
+
|
|
69
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
70
|
+
response = await client.post(
|
|
71
|
+
f"{self.session.config.biostar_url}/api/audit/search",
|
|
72
|
+
headers=headers,
|
|
73
|
+
json=search_query
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
if response.status_code != 200:
|
|
77
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
78
|
+
|
|
79
|
+
data = response.json()
|
|
80
|
+
audit_logs = data.get("AuditCollection", {}).get("rows", [])
|
|
81
|
+
total = data.get("AuditCollection", {}).get("total", 0)
|
|
82
|
+
|
|
83
|
+
return self.success_response({
|
|
84
|
+
"message": f"Found {len(audit_logs)} audit logs",
|
|
85
|
+
"total": total,
|
|
86
|
+
"count": len(audit_logs),
|
|
87
|
+
"audit_logs": audit_logs
|
|
88
|
+
})
|
|
89
|
+
|
|
90
|
+
except Exception as e:
|
|
91
|
+
return await self.handle_api_error(e)
|
|
92
|
+
|
|
93
|
+
async def audit_search_user(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
94
|
+
"""Search audit logs for specific user operations."""
|
|
95
|
+
try:
|
|
96
|
+
self.check_auth()
|
|
97
|
+
|
|
98
|
+
headers = {
|
|
99
|
+
"bs-session-id": self.get_session_id(),
|
|
100
|
+
"Content-Type": "application/json"
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
user_name = args.get("user_name")
|
|
104
|
+
if not user_name:
|
|
105
|
+
return self.error_response("user_name parameter is required")
|
|
106
|
+
|
|
107
|
+
# Build search query
|
|
108
|
+
search_query = {
|
|
109
|
+
"Query": {
|
|
110
|
+
"limit": args.get("limit", 100),
|
|
111
|
+
"offset": args.get("offset", 0),
|
|
112
|
+
"conditions": [
|
|
113
|
+
{
|
|
114
|
+
"column": "target_id.name",
|
|
115
|
+
"operator": 2, # CONTAINS
|
|
116
|
+
"values": [user_name]
|
|
117
|
+
}
|
|
118
|
+
]
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
# Add time range if provided
|
|
123
|
+
if args.get("start_datetime"):
|
|
124
|
+
search_query["Query"]["conditions"].append({
|
|
125
|
+
"column": "datetime",
|
|
126
|
+
"operator": 5, # GREATER
|
|
127
|
+
"values": [args["start_datetime"]]
|
|
128
|
+
})
|
|
129
|
+
|
|
130
|
+
if args.get("end_datetime"):
|
|
131
|
+
search_query["Query"]["conditions"].append({
|
|
132
|
+
"column": "datetime",
|
|
133
|
+
"operator": 6, # LESS
|
|
134
|
+
"values": [args["end_datetime"]]
|
|
135
|
+
})
|
|
136
|
+
|
|
137
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
138
|
+
response = await client.post(
|
|
139
|
+
f"{self.session.config.biostar_url}/api/audit/search",
|
|
140
|
+
headers=headers,
|
|
141
|
+
json=search_query
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
if response.status_code != 200:
|
|
145
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
146
|
+
|
|
147
|
+
data = response.json()
|
|
148
|
+
audit_logs = data.get("AuditCollection", {}).get("rows", [])
|
|
149
|
+
|
|
150
|
+
return self.success_response({
|
|
151
|
+
"message": f"Found {len(audit_logs)} audit logs for user '{user_name}'",
|
|
152
|
+
"count": len(audit_logs),
|
|
153
|
+
"audit_logs": audit_logs
|
|
154
|
+
})
|
|
155
|
+
|
|
156
|
+
except Exception as e:
|
|
157
|
+
return await self.handle_api_error(e)
|
|
158
|
+
|
|
159
|
+
async def audit_search_operator_level(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
160
|
+
"""Search audit logs by operator access level."""
|
|
161
|
+
try:
|
|
162
|
+
self.check_auth()
|
|
163
|
+
|
|
164
|
+
headers = {
|
|
165
|
+
"bs-session-id": self.get_session_id(),
|
|
166
|
+
"Content-Type": "application/json"
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
operator_level = args.get("operator_level")
|
|
170
|
+
if not operator_level:
|
|
171
|
+
return self.error_response("operator_level parameter is required")
|
|
172
|
+
|
|
173
|
+
# Build search query
|
|
174
|
+
search_query = {
|
|
175
|
+
"Query": {
|
|
176
|
+
"limit": args.get("limit", 100),
|
|
177
|
+
"offset": args.get("offset", 0),
|
|
178
|
+
"conditions": [
|
|
179
|
+
{
|
|
180
|
+
"column": "operator_id.access_level_id.name",
|
|
181
|
+
"operator": 0, # EQUAL
|
|
182
|
+
"values": [operator_level]
|
|
183
|
+
}
|
|
184
|
+
]
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
189
|
+
response = await client.post(
|
|
190
|
+
f"{self.session.config.biostar_url}/api/audit/search",
|
|
191
|
+
headers=headers,
|
|
192
|
+
json=search_query
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
if response.status_code != 200:
|
|
196
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
197
|
+
|
|
198
|
+
data = response.json()
|
|
199
|
+
audit_logs = data.get("AuditCollection", {}).get("rows", [])
|
|
200
|
+
|
|
201
|
+
return self.success_response({
|
|
202
|
+
"message": f"Found {len(audit_logs)} audit logs for operator level '{operator_level}'",
|
|
203
|
+
"count": len(audit_logs),
|
|
204
|
+
"audit_logs": audit_logs
|
|
205
|
+
})
|
|
206
|
+
|
|
207
|
+
except Exception as e:
|
|
208
|
+
return await self.handle_api_error(e)
|
|
209
|
+
|
|
210
|
+
async def audit_search_ip_list(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
211
|
+
"""Search audit logs by IP address."""
|
|
212
|
+
try:
|
|
213
|
+
self.check_auth()
|
|
214
|
+
|
|
215
|
+
headers = {
|
|
216
|
+
"bs-session-id": self.get_session_id(),
|
|
217
|
+
"Content-Type": "application/json"
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
ip_address = args.get("ip_address")
|
|
221
|
+
if not ip_address:
|
|
222
|
+
return self.error_response("ip_address parameter is required")
|
|
223
|
+
|
|
224
|
+
# Build search query
|
|
225
|
+
search_query = {
|
|
226
|
+
"Query": {
|
|
227
|
+
"limit": args.get("limit", 100),
|
|
228
|
+
"offset": args.get("offset", 0),
|
|
229
|
+
"conditions": [
|
|
230
|
+
{
|
|
231
|
+
"column": "ip_address",
|
|
232
|
+
"operator": 0, # EQUAL
|
|
233
|
+
"values": [ip_address]
|
|
234
|
+
}
|
|
235
|
+
]
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
240
|
+
response = await client.post(
|
|
241
|
+
f"{self.session.config.biostar_url}/api/audit/search",
|
|
242
|
+
headers=headers,
|
|
243
|
+
json=search_query
|
|
244
|
+
)
|
|
245
|
+
|
|
246
|
+
if response.status_code != 200:
|
|
247
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
248
|
+
|
|
249
|
+
data = response.json()
|
|
250
|
+
audit_logs = data.get("AuditCollection", {}).get("rows", [])
|
|
251
|
+
|
|
252
|
+
return self.success_response({
|
|
253
|
+
"message": f"Found {len(audit_logs)} audit logs for IP '{ip_address}'",
|
|
254
|
+
"count": len(audit_logs),
|
|
255
|
+
"audit_logs": audit_logs
|
|
256
|
+
})
|
|
257
|
+
|
|
258
|
+
except Exception as e:
|
|
259
|
+
return await self.handle_api_error(e)
|
|
260
|
+
|
|
261
|
+
async def audit_search_target_list(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
262
|
+
"""Search audit logs by target type."""
|
|
263
|
+
try:
|
|
264
|
+
self.check_auth()
|
|
265
|
+
|
|
266
|
+
headers = {
|
|
267
|
+
"bs-session-id": self.get_session_id(),
|
|
268
|
+
"Content-Type": "application/json"
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
target_type = args.get("target_type")
|
|
272
|
+
if not target_type:
|
|
273
|
+
return self.error_response("target_type parameter is required")
|
|
274
|
+
|
|
275
|
+
# Build search query
|
|
276
|
+
search_query = {
|
|
277
|
+
"Query": {
|
|
278
|
+
"limit": args.get("limit", 100),
|
|
279
|
+
"offset": args.get("offset", 0),
|
|
280
|
+
"conditions": [
|
|
281
|
+
{
|
|
282
|
+
"column": "target_type",
|
|
283
|
+
"operator": 0, # EQUAL
|
|
284
|
+
"values": [target_type]
|
|
285
|
+
}
|
|
286
|
+
]
|
|
287
|
+
}
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
291
|
+
response = await client.post(
|
|
292
|
+
f"{self.session.config.biostar_url}/api/audit/search",
|
|
293
|
+
headers=headers,
|
|
294
|
+
json=search_query
|
|
295
|
+
)
|
|
296
|
+
|
|
297
|
+
if response.status_code != 200:
|
|
298
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
299
|
+
|
|
300
|
+
data = response.json()
|
|
301
|
+
audit_logs = data.get("AuditCollection", {}).get("rows", [])
|
|
302
|
+
|
|
303
|
+
return self.success_response({
|
|
304
|
+
"message": f"Found {len(audit_logs)} audit logs for target type '{target_type}'",
|
|
305
|
+
"count": len(audit_logs),
|
|
306
|
+
"audit_logs": audit_logs
|
|
307
|
+
})
|
|
308
|
+
|
|
309
|
+
except Exception as e:
|
|
310
|
+
return await self.handle_api_error(e)
|
|
311
|
+
|
|
312
|
+
async def audit_csv_export(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
313
|
+
"""
|
|
314
|
+
Export audit logs to CSV via POST /api/audit/export and copy to Windows Downloads.
|
|
315
|
+
- Uses query param: time_offset (minutes)
|
|
316
|
+
- Payload shape:
|
|
317
|
+
{
|
|
318
|
+
"Query": {
|
|
319
|
+
"offset": <int>,
|
|
320
|
+
"conditions": [...], # e.g. [{"column":"DATE","operator":3,"values":[start,end]}]
|
|
321
|
+
"columns": ["DATE","USRID","PERM","IP","MENU","TARGET","METHOD","CONTENT"],
|
|
322
|
+
"headers": ["Datetime","User","Operator Level","IP","Category","Target","Action","Modification"]
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
- After export, reads file from Nginx download dir and copies to user's Downloads.
|
|
326
|
+
"""
|
|
327
|
+
try:
|
|
328
|
+
self.check_auth()
|
|
329
|
+
|
|
330
|
+
# ----- copy options -----
|
|
331
|
+
copy_to_downloads: bool = bool(args.get("copy_to_downloads", True))
|
|
332
|
+
dest_dir_arg: str = (args.get("dest_dir") or "").strip()
|
|
333
|
+
target_username: str = (args.get("target_username") or "").strip()
|
|
334
|
+
|
|
335
|
+
headers = {
|
|
336
|
+
"bs-session-id": self.get_session_id(),
|
|
337
|
+
"Content-Type": "application/json"
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
# ----- columns / headers (force UPPER columns per API spec) -----
|
|
341
|
+
columns = args.get("columns") or ["DATE","USRID","PERM","IP","MENU","TARGET","METHOD","CONTENT"]
|
|
342
|
+
columns = [str(c).upper() for c in columns]
|
|
343
|
+
|
|
344
|
+
headers_csv = args.get("headers") or [
|
|
345
|
+
"Datetime","User","Operator Level","IP","Category","Target","Action","Modification"
|
|
346
|
+
]
|
|
347
|
+
if len(headers_csv) != len(columns):
|
|
348
|
+
return self.error_response("Length of 'headers' must match 'columns'.")
|
|
349
|
+
|
|
350
|
+
# ----- conditions -----
|
|
351
|
+
conditions = args.get("conditions") or []
|
|
352
|
+
# convenience: support start_datetime / end_datetime -> build DATE condition
|
|
353
|
+
start_iso = args.get("start_datetime")
|
|
354
|
+
end_iso = args.get("end_datetime")
|
|
355
|
+
if not conditions and (start_iso or end_iso):
|
|
356
|
+
if start_iso and end_iso:
|
|
357
|
+
conditions = [{"column": "DATE", "operator": 3, "values": [start_iso, end_iso]}] # BETWEEN
|
|
358
|
+
elif start_iso:
|
|
359
|
+
conditions = [{"column": "DATE", "operator": 5, "values": [start_iso]}] # GREATER
|
|
360
|
+
else:
|
|
361
|
+
conditions = [{"column": "DATE", "operator": 6, "values": [end_iso]}] # LESS
|
|
362
|
+
|
|
363
|
+
offset = int(args.get("offset", 0))
|
|
364
|
+
time_offset = int(args.get("time_offset_minutes", args.get("time_offset", 0)))
|
|
365
|
+
|
|
366
|
+
payload = {
|
|
367
|
+
"Query": {
|
|
368
|
+
"offset": offset,
|
|
369
|
+
"conditions": conditions,
|
|
370
|
+
"columns": columns,
|
|
371
|
+
"headers": headers_csv
|
|
372
|
+
}
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
# ----- call /api/audit/export (NOT /api/audit/search) -----
|
|
376
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
377
|
+
resp = await client.post(
|
|
378
|
+
f"{self.session.config.biostar_url}/api/audit/export",
|
|
379
|
+
headers=headers,
|
|
380
|
+
params={"time_offset": time_offset},
|
|
381
|
+
json=payload
|
|
382
|
+
)
|
|
383
|
+
|
|
384
|
+
if resp.status_code != 200:
|
|
385
|
+
return self.error_response(f"Audit export failed: {resp.status_code} - {resp.text}")
|
|
386
|
+
|
|
387
|
+
body = resp.json() or {}
|
|
388
|
+
filename = (body.get("File") or {}).get("uri") or body.get("filename")
|
|
389
|
+
if not filename:
|
|
390
|
+
return self.error_response("Export succeeded but no filename returned", {"response": body})
|
|
391
|
+
|
|
392
|
+
# ----- source path in Nginx download dir -----
|
|
393
|
+
download_root = getattr(self.session.config, "download_dir", r"C:\Program Files\BioStar X\nginx\html\download")
|
|
394
|
+
src_path = Path(download_root) / filename
|
|
395
|
+
|
|
396
|
+
# Some systems need a short delay until the file appears
|
|
397
|
+
for _ in range(30):
|
|
398
|
+
if src_path.exists():
|
|
399
|
+
break
|
|
400
|
+
await asyncio.sleep(0.1)
|
|
401
|
+
|
|
402
|
+
# ----- copy to Windows Downloads -----
|
|
403
|
+
copy_status = "skipped"
|
|
404
|
+
copied_to = ""
|
|
405
|
+
if copy_to_downloads:
|
|
406
|
+
if platform.system().lower() == "windows":
|
|
407
|
+
if dest_dir_arg:
|
|
408
|
+
dest_dir = Path(dest_dir_arg)
|
|
409
|
+
elif target_username:
|
|
410
|
+
dest_dir = Path(f"C:\\Users\\{target_username}\\Downloads")
|
|
411
|
+
else:
|
|
412
|
+
userprofile = os.environ.get("USERPROFILE") or str(Path.home())
|
|
413
|
+
dest_dir = Path(userprofile) / "Downloads"
|
|
414
|
+
|
|
415
|
+
try:
|
|
416
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
417
|
+
if not src_path.exists():
|
|
418
|
+
copy_status = "failed: source not found"
|
|
419
|
+
else:
|
|
420
|
+
dest_path = dest_dir / filename
|
|
421
|
+
shutil.copy2(src_path, dest_path)
|
|
422
|
+
copied_to = str(dest_path)
|
|
423
|
+
copy_status = "success"
|
|
424
|
+
except Exception as ce:
|
|
425
|
+
copy_status = f"failed: {ce}"
|
|
426
|
+
else:
|
|
427
|
+
copy_status = "skipped: non-windows"
|
|
428
|
+
|
|
429
|
+
return self.success_response({
|
|
430
|
+
"message": "Audit CSV exported successfully",
|
|
431
|
+
"filename": filename,
|
|
432
|
+
"download_url": f"/download/{filename}",
|
|
433
|
+
"source_path": str(src_path),
|
|
434
|
+
"copy": {"enabled": copy_to_downloads, "status": copy_status, "destination": copied_to},
|
|
435
|
+
"export_params": {"time_offset": time_offset, "offset": offset, "columns": columns, "headers": headers_csv, "conditions": conditions}
|
|
436
|
+
})
|
|
437
|
+
|
|
438
|
+
except Exception as e:
|
|
439
|
+
return await self.handle_api_error(e)
|
|
440
|
+
|
|
441
|
+
def _generate_audit_csv(self, audit_logs: list, columns: list, headers: list) -> str:
|
|
442
|
+
r"""
|
|
443
|
+
Build CSV text from audit logs using logical column tokens.
|
|
444
|
+
|
|
445
|
+
Supported column tokens -> log field mapping:
|
|
446
|
+
- "DATE" -> log["datetime"]
|
|
447
|
+
- "USRID" -> log["operator_id"]["name"]
|
|
448
|
+
- "PERM" -> log["operator_id"]["access_level_id"]["name"]
|
|
449
|
+
- "IP" -> log["ip_address"]
|
|
450
|
+
- "MENU" -> log["menu"] or log["category"] (fallback empty)
|
|
451
|
+
- "TARGET" -> log["target_id"]["name"]
|
|
452
|
+
- "METHOD" -> log["action"]
|
|
453
|
+
- "CONTENT"-> log["details"] or log["result"]
|
|
454
|
+
|
|
455
|
+
If a field is missing, use empty string.
|
|
456
|
+
"""
|
|
457
|
+
# header line
|
|
458
|
+
out_lines = [",".join(f'"{h}"' for h in headers)]
|
|
459
|
+
|
|
460
|
+
def get_value(token: str, log: Dict[str, Any]) -> str:
|
|
461
|
+
t = (token or "").upper()
|
|
462
|
+
try:
|
|
463
|
+
if t == "DATE":
|
|
464
|
+
return str(log.get("datetime", ""))
|
|
465
|
+
if t == "USRID":
|
|
466
|
+
return str((log.get("operator_id") or {}).get("name", ""))
|
|
467
|
+
if t == "PERM":
|
|
468
|
+
return str(((log.get("operator_id") or {}).get("access_level_id") or {}).get("name", ""))
|
|
469
|
+
if t == "IP":
|
|
470
|
+
return str(log.get("ip_address", ""))
|
|
471
|
+
if t == "MENU":
|
|
472
|
+
return str(log.get("menu") or log.get("category") or "")
|
|
473
|
+
if t == "TARGET":
|
|
474
|
+
return str((log.get("target_id") or {}).get("name", ""))
|
|
475
|
+
if t == "METHOD":
|
|
476
|
+
return str(log.get("action", ""))
|
|
477
|
+
if t == "CONTENT":
|
|
478
|
+
return str(log.get("details") or log.get("result") or "")
|
|
479
|
+
except Exception:
|
|
480
|
+
return ""
|
|
481
|
+
return ""
|
|
482
|
+
|
|
483
|
+
for log in audit_logs or []:
|
|
484
|
+
row_vals = [get_value(tok, log) for tok in columns]
|
|
485
|
+
# CSV escaping (wrap in quotes, replace inner quotes)
|
|
486
|
+
escaped = ['"' + str(v).replace('"', '""') + '"' for v in row_vals]
|
|
487
|
+
out_lines.append(",".join(escaped))
|
|
488
|
+
|
|
489
|
+
return "\n".join(out_lines)
|