suprema-biostar-mcp 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- biostar_x_mcp_server/__init__.py +25 -0
- biostar_x_mcp_server/__main__.py +15 -0
- biostar_x_mcp_server/config.py +87 -0
- biostar_x_mcp_server/handlers/__init__.py +35 -0
- biostar_x_mcp_server/handlers/access_handler.py +2162 -0
- biostar_x_mcp_server/handlers/audit_handler.py +489 -0
- biostar_x_mcp_server/handlers/auth_handler.py +216 -0
- biostar_x_mcp_server/handlers/base_handler.py +228 -0
- biostar_x_mcp_server/handlers/card_handler.py +746 -0
- biostar_x_mcp_server/handlers/device_handler.py +4344 -0
- biostar_x_mcp_server/handlers/door_handler.py +3969 -0
- biostar_x_mcp_server/handlers/event_handler.py +1331 -0
- biostar_x_mcp_server/handlers/file_handler.py +212 -0
- biostar_x_mcp_server/handlers/help_web_handler.py +379 -0
- biostar_x_mcp_server/handlers/log_handler.py +1051 -0
- biostar_x_mcp_server/handlers/navigation_handler.py +109 -0
- biostar_x_mcp_server/handlers/occupancy_handler.py +541 -0
- biostar_x_mcp_server/handlers/user_handler.py +3568 -0
- biostar_x_mcp_server/schemas/__init__.py +21 -0
- biostar_x_mcp_server/schemas/access.py +158 -0
- biostar_x_mcp_server/schemas/audit.py +73 -0
- biostar_x_mcp_server/schemas/auth.py +24 -0
- biostar_x_mcp_server/schemas/cards.py +128 -0
- biostar_x_mcp_server/schemas/devices.py +496 -0
- biostar_x_mcp_server/schemas/doors.py +306 -0
- biostar_x_mcp_server/schemas/events.py +104 -0
- biostar_x_mcp_server/schemas/files.py +7 -0
- biostar_x_mcp_server/schemas/help.py +29 -0
- biostar_x_mcp_server/schemas/logs.py +33 -0
- biostar_x_mcp_server/schemas/occupancy.py +19 -0
- biostar_x_mcp_server/schemas/tool_response.py +29 -0
- biostar_x_mcp_server/schemas/users.py +166 -0
- biostar_x_mcp_server/server.py +335 -0
- biostar_x_mcp_server/session.py +221 -0
- biostar_x_mcp_server/tool_manager.py +172 -0
- biostar_x_mcp_server/tools/__init__.py +45 -0
- biostar_x_mcp_server/tools/access.py +510 -0
- biostar_x_mcp_server/tools/audit.py +227 -0
- biostar_x_mcp_server/tools/auth.py +59 -0
- biostar_x_mcp_server/tools/cards.py +269 -0
- biostar_x_mcp_server/tools/categories.py +197 -0
- biostar_x_mcp_server/tools/devices.py +1552 -0
- biostar_x_mcp_server/tools/doors.py +865 -0
- biostar_x_mcp_server/tools/events.py +305 -0
- biostar_x_mcp_server/tools/files.py +28 -0
- biostar_x_mcp_server/tools/help.py +80 -0
- biostar_x_mcp_server/tools/logs.py +123 -0
- biostar_x_mcp_server/tools/navigation.py +89 -0
- biostar_x_mcp_server/tools/occupancy.py +91 -0
- biostar_x_mcp_server/tools/users.py +1113 -0
- biostar_x_mcp_server/utils/__init__.py +31 -0
- biostar_x_mcp_server/utils/category_mapper.py +206 -0
- biostar_x_mcp_server/utils/decorators.py +101 -0
- biostar_x_mcp_server/utils/language_detector.py +51 -0
- biostar_x_mcp_server/utils/search.py +42 -0
- biostar_x_mcp_server/utils/timezone.py +122 -0
- suprema_biostar_mcp-1.0.1.dist-info/METADATA +163 -0
- suprema_biostar_mcp-1.0.1.dist-info/RECORD +61 -0
- suprema_biostar_mcp-1.0.1.dist-info/WHEEL +4 -0
- suprema_biostar_mcp-1.0.1.dist-info/entry_points.txt +2 -0
- suprema_biostar_mcp-1.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,1331 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import json
|
|
3
|
+
import csv
|
|
4
|
+
import io
|
|
5
|
+
import asyncio
|
|
6
|
+
import platform
|
|
7
|
+
from datetime import datetime, timedelta
|
|
8
|
+
from typing import Sequence, Dict, Any, Optional, List
|
|
9
|
+
from mcp.types import TextContent
|
|
10
|
+
import httpx
|
|
11
|
+
from pydantic import ValidationError
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
import os
|
|
14
|
+
import shutil
|
|
15
|
+
|
|
16
|
+
from .base_handler import BaseHandler
|
|
17
|
+
from ..utils import get_timezone_offset, convert_utc_to_local
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
# Alias for backward compatibility
|
|
22
|
+
convert_utc_to_server_time = convert_utc_to_local
|
|
23
|
+
|
|
24
|
+
# BioStar X Event Type Mapping - 이벤트 ID를 이벤트 이름으로 매핑
|
|
25
|
+
EVENT_TYPE_NAMES = {
|
|
26
|
+
"3100": "Video recording space shortage",
|
|
27
|
+
"3110": "Video recording space shortage",
|
|
28
|
+
"3111": "Add NVR successful",
|
|
29
|
+
"3113": "Add NVR",
|
|
30
|
+
"3131": "NVR update successful",
|
|
31
|
+
"3132": "NVR update failed",
|
|
32
|
+
"3133": "Update NVR",
|
|
33
|
+
"3141": "Remove NVR successful",
|
|
34
|
+
"3142": "Remove NVR failed",
|
|
35
|
+
"3143": "Remove NVR",
|
|
36
|
+
"3211": "Add camera successful",
|
|
37
|
+
"3212": "Add camera failed",
|
|
38
|
+
"3213": "Add camera",
|
|
39
|
+
"3231": "Update camera successful",
|
|
40
|
+
"3232": "Update camera failed",
|
|
41
|
+
"3233": "Update camera",
|
|
42
|
+
"3241": "Remove camera successful",
|
|
43
|
+
"3242": "Remove camera failed",
|
|
44
|
+
"3243": "Remove camera",
|
|
45
|
+
"3351": "Recording completed",
|
|
46
|
+
"3352": "Recording failed",
|
|
47
|
+
"3353": "Start recording",
|
|
48
|
+
"3354": "Recording failed - End of Retry",
|
|
49
|
+
"3355": "Recording - NVR connect failed",
|
|
50
|
+
"3356": "Recording - NVR Busy",
|
|
51
|
+
"3357": "Recording - No Data in NVR",
|
|
52
|
+
"3461": "Snapshot completed",
|
|
53
|
+
"3462": "Snapshot failed",
|
|
54
|
+
"3463": "Start snapshot",
|
|
55
|
+
"3464": "Snapshot failed - End of Retry",
|
|
56
|
+
"3465": "Snapshot - NVR connect failed",
|
|
57
|
+
"3466": "Snapshot - NVR Busy",
|
|
58
|
+
"3467": "Snapshot - No Data in NVR",
|
|
59
|
+
"4001": "Exceeded maximum login attempts",
|
|
60
|
+
"4002": "BioStar X Core Service Abnormal Status",
|
|
61
|
+
"4003": "BioStar X Core Service Normal Status Recovered",
|
|
62
|
+
"4004": "BioStar X Device Service Abnormal Status",
|
|
63
|
+
"4005": "BioStar X Device Service Normal Status Recovered",
|
|
64
|
+
"4016": "VMS connected",
|
|
65
|
+
"4017": "VMS disconnected",
|
|
66
|
+
"4018": "Tailgating Detected (AI)",
|
|
67
|
+
"4019": "Loitering Detected (AI)",
|
|
68
|
+
"4020": "Intrusion Detected (AI)",
|
|
69
|
+
"4021": "Fall Detected (AI)",
|
|
70
|
+
"4022": "Tailgating Alarm Cleared (AI)",
|
|
71
|
+
"4023": "Loitering Alarm Cleared (AI)",
|
|
72
|
+
"4024": "Intrusion Alarm Cleared (AI)",
|
|
73
|
+
"4025": "Fall Alarm Cleared (AI)",
|
|
74
|
+
"4026": "Abandon Detected (AI)",
|
|
75
|
+
"4027": "Abandon Alarm Cleared (AI)",
|
|
76
|
+
"4088": "Access granted",
|
|
77
|
+
"4089": "Roll Call Started",
|
|
78
|
+
"4090": "Roll Call Ended",
|
|
79
|
+
"4091": "User Sync Failed (No Compatible Face Template)",
|
|
80
|
+
"4094": "Firmware Upgrade Required (Log Upload Failed Due to Device Database Corruption)",
|
|
81
|
+
"4095": "Device Disconnection Detected",
|
|
82
|
+
"4096": "1:1 authentication succeeded",
|
|
83
|
+
"4097": "1:1 authentication succeeded (ID + PIN)",
|
|
84
|
+
"4098": "1:1 authentication succeeded (ID + Fingerprint)",
|
|
85
|
+
"4099": "1:1 authentication succeeded (ID + Fingerprint + PIN)",
|
|
86
|
+
"4100": "1:1 authentication succeeded (ID + Face)",
|
|
87
|
+
"4101": "1:1 authentication succeeded (ID + Face + PIN)",
|
|
88
|
+
"4102": "1:1 authentication succeeded (Card)",
|
|
89
|
+
"4103": "1:1 authentication succeeded (Card + PIN)",
|
|
90
|
+
"4104": "1:1 authentication succeeded (Card + Fingerprint)",
|
|
91
|
+
"4105": "1:1 authentication succeeded (Card + Fingerprint + PIN)",
|
|
92
|
+
"4106": "1:1 authentication succeeded (Card + Face)",
|
|
93
|
+
"4107": "1:1 authentication succeeded (Card + Face + PIN)",
|
|
94
|
+
"4108": "1:1 authentication succeeded (Access-on-card)",
|
|
95
|
+
"4109": "1:1 authentication succeeded (Access-on-card + PIN)",
|
|
96
|
+
"4110": "1:1 authentication succeeded (Access-on-card + Fingerprint)",
|
|
97
|
+
"4111": "1:1 authentication succeeded (Access-on-card + Fingerprint + PIN)",
|
|
98
|
+
"4112": "1:1 authentication succeeded (Card + Face + Fingerprint)",
|
|
99
|
+
"4113": "1:1 authentication succeeded (Card + Fingerprint + Face)",
|
|
100
|
+
"4114": "1:1 authentication succeeded (ID + Face + Fingerprint)",
|
|
101
|
+
"4115": "1:1 authentication succeeded (ID + Fingerprint + Face)",
|
|
102
|
+
"4118": "1:1 authentication succeeded (Mobile Card)",
|
|
103
|
+
"4119": "1:1 authentication succeeded (Mobile Card + PIN)",
|
|
104
|
+
"4120": "1:1 authentication succeeded (Mobile Card + Fingerprint)",
|
|
105
|
+
"4121": "1:1 authentication succeeded (Mobile Card + Fingerprint + PIN)",
|
|
106
|
+
"4122": "1:1 authentication succeeded (Mobile Card + Face)",
|
|
107
|
+
"4123": "1:1 authentication succeeded (Mobile Card + Face + PIN)",
|
|
108
|
+
"4128": "1:1 authentication succeeded (Mobile Card + Face + Fingerprint)",
|
|
109
|
+
"4129": "1:1 authentication succeeded (Mobile Card + Fingerprint + Face)",
|
|
110
|
+
"4133": "1:1 authentication succeeded (QR/Barcode)",
|
|
111
|
+
"4134": "1:1 authentication succeeded (QR/Barcode + PIN)",
|
|
112
|
+
"4135": "1:1 authentication succeeded (QR/Barcode + Fingerprint)",
|
|
113
|
+
"4136": "1:1 authentication succeeded (QR/Barcode + Fingerprint + PIN)",
|
|
114
|
+
"4137": "1:1 authentication succeeded (QR/Barcode + Face)",
|
|
115
|
+
"4138": "1:1 authentication succeeded (QR/Barcode + Face + PIN)",
|
|
116
|
+
"4139": "1:1 authentication succeeded (QR/Barcode + Face + Fingerprint)",
|
|
117
|
+
"4140": "1:1 authentication succeeded (QR/Barcode + Fingerprint + Face)",
|
|
118
|
+
"4145": "1:1 authentication succeeded (Lock Override)",
|
|
119
|
+
"4352": "1:1 authentication failed",
|
|
120
|
+
"4353": "1:1 authentication failed (ID)",
|
|
121
|
+
"4354": "1:1 authentication failed (Card)",
|
|
122
|
+
"4355": "1:1 authentication failed (PIN)",
|
|
123
|
+
"4356": "1:1 authentication failed (Fingerprint)",
|
|
124
|
+
"4357": "1:1 authentication failed (Face)",
|
|
125
|
+
"4358": "1:1 authentication failed (Access-on-card + PIN)",
|
|
126
|
+
"4359": "1:1 authentication failed (Access-on-card + Fingerprint)",
|
|
127
|
+
"4360": "1:1 authentication failed (Mobile Card)",
|
|
128
|
+
"4361": "1:1 authentication failed (Non-numeric Data)",
|
|
129
|
+
"4362": "1:1 authentication failed (Unsupported character)",
|
|
130
|
+
"4363": "1:1 authentication failed (Exceeded QR max size)",
|
|
131
|
+
"4364": "1:1 authentication failed (QR/Barcode)",
|
|
132
|
+
"4608": "1:1 duress authentication succeeded",
|
|
133
|
+
"4609": "1:1 duress authentication succeeded (ID + PIN)",
|
|
134
|
+
"4610": "1:1 duress authentication succeeded (ID + Fingerprint)",
|
|
135
|
+
"4611": "1:1 duress authentication succeeded (ID + Fingerprint + PIN)",
|
|
136
|
+
"4612": "1:1 duress authentication succeeded (ID + Face)",
|
|
137
|
+
"4613": "1:1 duress authentication succeeded (ID + Face + PIN)",
|
|
138
|
+
"4614": "1:1 duress authentication succeeded (Card)",
|
|
139
|
+
"4615": "1:1 duress authentication succeeded (Card + PIN)",
|
|
140
|
+
"4616": "1:1 duress authentication succeeded (Card + Fingerprint)",
|
|
141
|
+
"4617": "1:1 duress authentication succeeded (Card + Fingerprint + PIN)",
|
|
142
|
+
"4618": "1:1 duress authentication succeeded (Card + Face)",
|
|
143
|
+
"4619": "1:1 duress authentication succeeded (Card + Face + PIN)",
|
|
144
|
+
"4620": "1:1 duress authentication succeeded (Access-on-card)",
|
|
145
|
+
"4621": "1:1 duress authentication succeeded (Access-on-card + PIN)",
|
|
146
|
+
"4622": "1:1 duress authentication succeeded (Access-on-card + Fingerprint)",
|
|
147
|
+
"4623": "1:1 duress authentication succeeded (Access-on-card + Fingerprint + PIN)",
|
|
148
|
+
"4624": "1:1 duress authentication succeeded (Card + Face + Fingerprint)",
|
|
149
|
+
"4625": "1:1 duress authentication succeeded (Card + Fingerprint + Face)",
|
|
150
|
+
"4626": "1:1 duress authentication succeeded (ID + Face + Fingerprint)",
|
|
151
|
+
"4627": "1:1 duress authentication succeeded (ID + Fingerprint + Face)",
|
|
152
|
+
"4632": "1:1 duress authentication succeeded (Mobile Card + Fingerprint)",
|
|
153
|
+
"4633": "1:1 duress authentication succeeded (Mobile Card + Fingerprint + PIN)",
|
|
154
|
+
"4640": "1:1 duress authentication succeeded (Mobile Card + Face + Fingerprint)",
|
|
155
|
+
"4641": "1:1 duress authentication succeeded (Mobile Card + Fingerprint + Face)",
|
|
156
|
+
"4647": "1:1 duress authentication succeeded (QR/Barcode + Fingerprint)",
|
|
157
|
+
"4648": "1:1 duress authentication succeeded (QR/Barcode + Fingerprint + PIN)",
|
|
158
|
+
"4651": "1:1 duress authentication succeeded (QR/Barcode + Face + Fingerprint)",
|
|
159
|
+
"4652": "1:1 duress authentication succeeded (QR/Barcode + Fingerprint + Face)",
|
|
160
|
+
"4864": "1:N authentication succeeded",
|
|
161
|
+
"4865": "1:N authentication succeeded (Fingerprint)",
|
|
162
|
+
"4866": "1:N authentication succeeded (Fingerprint + PIN)",
|
|
163
|
+
"4867": "1:N authentication succeeded (Face)",
|
|
164
|
+
"4868": "1:N authentication succeeded (Face + PIN)",
|
|
165
|
+
"4869": "1:N authentication succeeded (Face + Fingerprint)",
|
|
166
|
+
"4870": "1:N authentication succeeded (Face + Fingerprint + PIN)",
|
|
167
|
+
"4871": "1:N authentication succeeded (Fingerprint + Face)",
|
|
168
|
+
"4872": "1:N authentication succeeded (Fingerprint + Face + PIN)",
|
|
169
|
+
"5120": "1:N authentication failed",
|
|
170
|
+
"5123": "1:N authentication failed (PIN)",
|
|
171
|
+
"5124": "1:N authentication failed (Fingerprint)",
|
|
172
|
+
"5125": "1:N authentication failed (Face)",
|
|
173
|
+
"5126": "1:N authentication failed (Access-on-card + PIN)",
|
|
174
|
+
"5127": "1:N authentication failed (Access-on-card + Fingerprint)",
|
|
175
|
+
"5376": "1:N duress authentication succeeded",
|
|
176
|
+
"5377": "1:N duress authentication succeeded (Fingerprint)",
|
|
177
|
+
"5378": "1:N duress authentication succeeded (Fingerprint + PIN)",
|
|
178
|
+
"5379": "1:N duress authentication succeeded (Face)",
|
|
179
|
+
"5380": "1:N duress authentication succeeded (Face + PIN)",
|
|
180
|
+
"5381": "1:N duress authentication succeeded (Face + Fingerprint)",
|
|
181
|
+
"5382": "1:N duress authentication succeeded (Face + Fingerprint + PIN)",
|
|
182
|
+
"5383": "1:N duress authentication succeeded (Fingerprint + Face)",
|
|
183
|
+
"5384": "1:N duress authentication succeeded (Fingerprint + Face + PIN)",
|
|
184
|
+
"5632": "Dual authentication succeeded",
|
|
185
|
+
"5888": "Dual authentication failed",
|
|
186
|
+
"5889": "Dual authentication failed (Timeout)",
|
|
187
|
+
"5890": "Dual authentication failed (Invalid access group)",
|
|
188
|
+
"6144": "Authentication failed",
|
|
189
|
+
"6145": "Authentication failed (Invalid authentication mode)",
|
|
190
|
+
"6146": "Authentication failed (Invalid credential)",
|
|
191
|
+
"6147": "Authentication failed (Timeout)",
|
|
192
|
+
"6148": "Authentication failed. (Server matching is off)",
|
|
193
|
+
"6149": "3 consecutive authentication failed",
|
|
194
|
+
"6400": "Access denied",
|
|
195
|
+
"6401": "Access denied (Invalid access group)",
|
|
196
|
+
"6402": "Access denied (Disabled user)",
|
|
197
|
+
"6403": "Access denied(Invalid period)",
|
|
198
|
+
"6404": "Access denied (Blacklist)",
|
|
199
|
+
"6405": "Access denied (Hard Anti-passback)",
|
|
200
|
+
"6406": "Access denied (Timed anti-passback)",
|
|
201
|
+
"6407": "Access denied (Forced lock schedule)",
|
|
202
|
+
"6408": "Access denied (Soft anti-passback)",
|
|
203
|
+
"6409": "Access denied (Soft timed anti-passback)",
|
|
204
|
+
"6410": "Access denied (Face detection failure)",
|
|
205
|
+
"6411": "Access denied (Capture failure)",
|
|
206
|
+
"6412": "Fake Fingerprint Detected",
|
|
207
|
+
"6414": "Intrusion alarm access denied",
|
|
208
|
+
"6415": "Access denied (Interlock)",
|
|
209
|
+
"6418": "Access Denied (Anti-tailgating)",
|
|
210
|
+
"6419": "Access denied (Exceeded threshold temp.)",
|
|
211
|
+
"6420": "Access denied (Temp. not measured correctly)",
|
|
212
|
+
"6421": "Access denied (Mask not detected)",
|
|
213
|
+
"6422": "Access Denied (Occupancy Limit Violation)",
|
|
214
|
+
"6423": "Access denied (Locked)",
|
|
215
|
+
"6656": "Authentication failed (Bad fingerprint placement)",
|
|
216
|
+
"6912": "Access granted (Check only)",
|
|
217
|
+
"6913": "Access granted (Soft temp. violation on check only)",
|
|
218
|
+
"6914": "Access granted (Soft mask violation on check only)",
|
|
219
|
+
"6915": "Access granted (Soft temp. and mask violation on check only)",
|
|
220
|
+
"7168": "Access denied (Exceeded threshold temp. on check only)",
|
|
221
|
+
"7169": "Access denied (Temp. not measured correctly on check only)",
|
|
222
|
+
"7170": "Access denied (Mask not detected on check only)",
|
|
223
|
+
"7188": "Access denied (Temperature not measured properly)",
|
|
224
|
+
"7424": "Abnormal temp. detected (Exceeded Threshold temp.)",
|
|
225
|
+
"7425": "Abnormal temp. detected (Temp. not measured correctly)",
|
|
226
|
+
"7680": "Mask not detected",
|
|
227
|
+
"8192": "User enrollment succeeded",
|
|
228
|
+
"8448": "User enrollment failed",
|
|
229
|
+
"8449": "User enrollment failed (Face extraction failed)",
|
|
230
|
+
"8450": "User enrollment failed (Different fingerprint template format)",
|
|
231
|
+
"8451": "User enrollment failed (Exceeded max credential count)",
|
|
232
|
+
"8457": "User enrollment failed (User data error)",
|
|
233
|
+
"8704": "User update succeeded",
|
|
234
|
+
"8960": "User update failed",
|
|
235
|
+
"8961": "User update failed (Face extraction failed)",
|
|
236
|
+
"8962": "User update failed (Different fingerprint template format)",
|
|
237
|
+
"8969": "User update failed (User data error)",
|
|
238
|
+
"9216": "User deletion succeeded",
|
|
239
|
+
"9472": "User deletion failed",
|
|
240
|
+
"9728": "All user deletion succeeded",
|
|
241
|
+
"9984": "Access-on-card issue succeeded",
|
|
242
|
+
"10240": "Duplicate Credential",
|
|
243
|
+
"10242": "Duplicate Card",
|
|
244
|
+
"10244": "Duplicate Fingerprint",
|
|
245
|
+
"10245": "Duplicate Face",
|
|
246
|
+
"10496": "User partial update succeeded",
|
|
247
|
+
"10752": "User partial update failed",
|
|
248
|
+
"10753": "User partial update failed (Face extraction failed)",
|
|
249
|
+
"10754": "User partial update failed (Different fingerprint template format)",
|
|
250
|
+
"10755": "User partial update failed (Exceeded max credential count)",
|
|
251
|
+
"10756": "User partial update failed (User does not exist)",
|
|
252
|
+
"10761": "User partial update failed (User data error)",
|
|
253
|
+
"12288": "Device restarted",
|
|
254
|
+
"12368": "Device restarted (System Reboot)",
|
|
255
|
+
"12544": "Device started",
|
|
256
|
+
"12800": "Device time changed",
|
|
257
|
+
"12801": "Time-zone changed",
|
|
258
|
+
"12802": "DST applied",
|
|
259
|
+
"13056": "Network connected",
|
|
260
|
+
"13312": "Network disconnected",
|
|
261
|
+
"13568": "DHCP connected",
|
|
262
|
+
"13824": "Administrator menu entered",
|
|
263
|
+
"13825": "Administrator authentication failed",
|
|
264
|
+
"13826": "Administrator authentication failed (Invalid credential)",
|
|
265
|
+
"14080": "Device locked",
|
|
266
|
+
"14336": "Device unlocked",
|
|
267
|
+
"14592": "BioStar X communication locked",
|
|
268
|
+
"14848": "BioStar X communication unlocked",
|
|
269
|
+
"15104": "BioStar X connected",
|
|
270
|
+
"15120": "RTSP connected",
|
|
271
|
+
"15360": "BioStar X disconnected",
|
|
272
|
+
"15376": "RTSP disconnected",
|
|
273
|
+
"15616": "RS-485 connected",
|
|
274
|
+
"15872": "RS-485 disconnected",
|
|
275
|
+
"16128": "Input detected on",
|
|
276
|
+
"16129": "Input detected off",
|
|
277
|
+
"16384": "Tamper on",
|
|
278
|
+
"16640": "Tamper off",
|
|
279
|
+
"16896": "Event log cleared",
|
|
280
|
+
"17152": "Firmware upgrade succeeded",
|
|
281
|
+
"17408": "Resource upgrade succeeded",
|
|
282
|
+
"17664": "Device reset",
|
|
283
|
+
"17665": "Database Reset",
|
|
284
|
+
"17666": "Factory Reset",
|
|
285
|
+
"17667": "Restored to default without network settings",
|
|
286
|
+
"17680": "License Activation Succeeded",
|
|
287
|
+
"17681": "License Activation Failed",
|
|
288
|
+
"17682": "License Deactivation Succeeded",
|
|
289
|
+
"17683": "License Deactivation Failed",
|
|
290
|
+
"17684": "License Expired",
|
|
291
|
+
"17920": "Supervised Input (Short)",
|
|
292
|
+
"18176": "Supervised Input (Open)",
|
|
293
|
+
"18432": "AC Power Failure",
|
|
294
|
+
"18688": "AC Power Success",
|
|
295
|
+
"18944": "Door open request by exit button",
|
|
296
|
+
"18945": "Door open request by exit button (Relay does not activate)",
|
|
297
|
+
"19200": "Door open request by operator",
|
|
298
|
+
"19456": "Door open request by Intercom door open button",
|
|
299
|
+
"19712": "License Activation Succeeded",
|
|
300
|
+
"19713": "License Activation Failed",
|
|
301
|
+
"19714": "License Deactivation Succeeded",
|
|
302
|
+
"19715": "License Deactivation Failed",
|
|
303
|
+
"19716": "License Expired",
|
|
304
|
+
"19969": "Low battery level",
|
|
305
|
+
"19970": "Critical battery level",
|
|
306
|
+
"19971": "Empty battery",
|
|
307
|
+
"20480": "Door unlocked",
|
|
308
|
+
"20736": "Door locked",
|
|
309
|
+
"20992": "Door opened",
|
|
310
|
+
"21248": "Door closed",
|
|
311
|
+
"21504": "Forced door opened",
|
|
312
|
+
"21760": "Held door opened",
|
|
313
|
+
"22016": "Forced door open alarmed",
|
|
314
|
+
"22017": "Lock Override Alarm Activated",
|
|
315
|
+
"22272": "Forced door open cleared",
|
|
316
|
+
"22274": "Lock Override Alarm Cleared",
|
|
317
|
+
"22528": "Held door open alarmed",
|
|
318
|
+
"22784": "Held door open cleared",
|
|
319
|
+
"23040": "Anti-passback alarmed",
|
|
320
|
+
"23296": "Anti-passback cleared",
|
|
321
|
+
"23553": "Door release request by schedule",
|
|
322
|
+
"23554": "Door release request by emergency",
|
|
323
|
+
"23556": "Door release request by operator",
|
|
324
|
+
"23809": "Door lock request by schedule",
|
|
325
|
+
"23810": "Door lock request by emergency",
|
|
326
|
+
"23812": "Door lock request by operator",
|
|
327
|
+
"24065": "Door unlock request by schedule",
|
|
328
|
+
"24066": "Door unlock request by emergency",
|
|
329
|
+
"24068": "Door unlock request by operator",
|
|
330
|
+
"24320": "Door open request sent",
|
|
331
|
+
"24321": "Door unlock request sent",
|
|
332
|
+
"24322": "Door lock request sent",
|
|
333
|
+
"24323": "Door release request sent",
|
|
334
|
+
"24324": "Aux input detected (Fire)",
|
|
335
|
+
"24325": "Fire alarm detected",
|
|
336
|
+
"24326": "Fire alarm cleared",
|
|
337
|
+
"24327": "Door normalized",
|
|
338
|
+
"24576": "Anti-passback violation detected",
|
|
339
|
+
"24577": "Hard anti-passback violation detected",
|
|
340
|
+
"24578": "Soft anti-passback violation detected",
|
|
341
|
+
"24832": "Anti-passback zone alarm detected",
|
|
342
|
+
"24833": "Occupancy Limit Zone Availability Recovered",
|
|
343
|
+
"24834": "Access Denied (Occupancy Count Full)",
|
|
344
|
+
"24835": "Exit Occurred While Count Zero",
|
|
345
|
+
"24836": "Occupancy Limit zone Almost Full 1st Level Detected",
|
|
346
|
+
"24837": "Occupancy Limit zone Almost Full 2nd Level Detected",
|
|
347
|
+
"24838": "Occupancy Limit Zone Full Detected",
|
|
348
|
+
"25088": "Anti-passback zone alarm cleared",
|
|
349
|
+
"25344": "Timed anti-passback violation detected",
|
|
350
|
+
"25345": "Timed anti-passback violation detected",
|
|
351
|
+
"25346": "Soft Timed anti-passback violation detected",
|
|
352
|
+
"25600": "Timed anti-passback alarm detected",
|
|
353
|
+
"25856": "Timed anti-passback alarm cleared",
|
|
354
|
+
"26112": "Fire alarm input detected.",
|
|
355
|
+
"26368": "Fire alarm zone alarm detected",
|
|
356
|
+
"26624": "Fire alarm zone alarm cleared",
|
|
357
|
+
"26880": "Scheduled lock violation detected",
|
|
358
|
+
"27094": "Scheduled unlock zone ended (Door)",
|
|
359
|
+
"27136": "Scheduled lock zone started",
|
|
360
|
+
"27392": "Scheduled lock zone ended",
|
|
361
|
+
"27648": "Scheduled unlock zone started (Door)",
|
|
362
|
+
"27904": "Scheduled unlock zone ended (Door)",
|
|
363
|
+
"28160": "Scheduled lock zone alarm detected",
|
|
364
|
+
"28416": "Scheduled lock zone alarm cleared",
|
|
365
|
+
"28417": "Occupancy Full Detected",
|
|
366
|
+
"28418": "Occupancy Availability Recovered",
|
|
367
|
+
"28419": "Exit Occurred While Occupancy Count Zero",
|
|
368
|
+
"28420": "Occupancy Count Alert 1 Detected",
|
|
369
|
+
"28421": "Occupancy Count Alert 2 Detected",
|
|
370
|
+
"28422": "Occupancy Limit Violation",
|
|
371
|
+
"28423": "Occupancy Limit Violation (Network Failure)",
|
|
372
|
+
"28672": "Floor Activated",
|
|
373
|
+
"28928": "Floor Released",
|
|
374
|
+
"29185": "Elevator release request by schedule",
|
|
375
|
+
"29186": "Release by emergency",
|
|
376
|
+
"29188": "Release by operator",
|
|
377
|
+
"29192": "Release floor by alarm",
|
|
378
|
+
"29441": "Elevator unlock request by schedule",
|
|
379
|
+
"29442": "Unlock by emergency",
|
|
380
|
+
"29444": "Unlock by operator",
|
|
381
|
+
"29448": "Activate floor by alarm",
|
|
382
|
+
"29697": "Lock by schedule",
|
|
383
|
+
"29698": "Lock by emergency",
|
|
384
|
+
"29700": "Lock by operator",
|
|
385
|
+
"29952": "Elevator alarm input detected",
|
|
386
|
+
"30208": "Elevator alarm",
|
|
387
|
+
"30464": "Elevator alarm cleared",
|
|
388
|
+
"30720": "Enable all floor relays",
|
|
389
|
+
"30976": "Disable all floor relays",
|
|
390
|
+
"32768": "BioStar X Event",
|
|
391
|
+
"36864": "Access denied (Armed status)",
|
|
392
|
+
"37120": "Arming auth success",
|
|
393
|
+
"37376": "Armed",
|
|
394
|
+
"37632": "Arming failed",
|
|
395
|
+
"37888": "Disarming auth success",
|
|
396
|
+
"38144": "Disarmed",
|
|
397
|
+
"38656": "Intrusion alarm input",
|
|
398
|
+
"38912": "Intrusion alarm detected",
|
|
399
|
+
"39168": "Intrusion alarm cleared",
|
|
400
|
+
"39424": "Arming auth failed",
|
|
401
|
+
"39680": "Disarming auth failed",
|
|
402
|
+
"40960": "Interlock fail",
|
|
403
|
+
"40961": "Interlock door open denied",
|
|
404
|
+
"40962": "Interlock door open denied (Occupied)",
|
|
405
|
+
"41216": "Interlock zone alarm",
|
|
406
|
+
"41472": "Interlock door open denied alarm",
|
|
407
|
+
"41728": "Interlock door open denied alarm (Occupied)",
|
|
408
|
+
"41984": "Interlock zone Alarm Clear",
|
|
409
|
+
"42753": "Occupancy Limit Violation (Count Full)",
|
|
410
|
+
"42754": "Occupancy Limit Violation (Network Failure)",
|
|
411
|
+
"45056": "Muster zone time limit violation",
|
|
412
|
+
"45312": "Muster zone alarm detected",
|
|
413
|
+
"45568": "Muster zone alarm cleared",
|
|
414
|
+
"47104": "Scheduled unlock zone started (Elevator)",
|
|
415
|
+
"47360": "Scheduled unlock zone ended (Elevator)",
|
|
416
|
+
"49152": "Fail to save to the server DB",
|
|
417
|
+
"49920": "Relay Activated",
|
|
418
|
+
"49921": "Relay Activated (Supervised Input Fault Short)",
|
|
419
|
+
"49922": "Relay Activated (Supervised Input Fault Open)",
|
|
420
|
+
"49923": "Relay Activated (Input Activated)",
|
|
421
|
+
"49925": "Relay Activated (Tamper Activated)",
|
|
422
|
+
"49928": "Relay Activated (RS-485 Disconnected)",
|
|
423
|
+
"50176": "Relay Deactivated",
|
|
424
|
+
"50177": "System Backup Started",
|
|
425
|
+
"50178": "Relay Deactivated (Supervised Input Fault Open)",
|
|
426
|
+
"50179": "System Backup Complete",
|
|
427
|
+
"50180": "Relay Deactivated (Input Deactivated)",
|
|
428
|
+
"50181": "System Backup Failed",
|
|
429
|
+
"50182": "Relay Deactivated (Tamper Deactivated)",
|
|
430
|
+
"50183": "Relay Deactivated (RS-485 Connected)",
|
|
431
|
+
"50433": "Relay Retained (Supervised Input Short)",
|
|
432
|
+
"50434": "Relay Retained (Supervised Input Fault Open)",
|
|
433
|
+
"50435": "Relay Retained (Input Activated)",
|
|
434
|
+
"50436": "Relay Retained (Input Deactivated)",
|
|
435
|
+
"50437": "Relay Retained (Tamper Activated)",
|
|
436
|
+
"50438": "Relay Retained (Tamper Deactivated)",
|
|
437
|
+
"50439": "Relay Retained (RS-485 Connected)",
|
|
438
|
+
"50440": "Relay Retained (RS-485 Disconnected)",
|
|
439
|
+
"51001": "Active Directory User Sync Started",
|
|
440
|
+
"51002": "Active Directory User Sync Complete",
|
|
441
|
+
"51003": "Active Directory User Sync Failed",
|
|
442
|
+
"51004": "Entra ID User Sync Started",
|
|
443
|
+
"51005": "Entra ID User Sync Complete",
|
|
444
|
+
"51006": "Entra ID User Sync Failed",
|
|
445
|
+
"51007": "User Sync Failed (User Group Sync Error)",
|
|
446
|
+
"53248": "ToM Face Enrollment Succeeded",
|
|
447
|
+
"53504": "ToM Face Enrollment Failed",
|
|
448
|
+
"53505": "ToM Face Enrollment Failed (Scan Canceled)",
|
|
449
|
+
"53506": "ToM Face Enrollment Failed (Scan Timeout)",
|
|
450
|
+
"53507": "ToM Face Enrollment Failed (Template Extraction Failed)",
|
|
451
|
+
"53508": "ToM Face Enrollment Failed (Communication Failed)",
|
|
452
|
+
"53513": "ToM Face Enrollment Failed (Unspecified Error)",
|
|
453
|
+
"53760": "ToM Fingerprint Enrollment Succeeded",
|
|
454
|
+
"54016": "ToM Fingerprint Enrollment Failed",
|
|
455
|
+
"54017": "ToM Fingerprint Enrollment Failed (Scan Canceled)",
|
|
456
|
+
"54018": "ToM Fingerprint Enrollment Failed (Scan Timeout)",
|
|
457
|
+
"54019": "ToM Fingerprint Enrollment Failed (Template Extraction Failed)",
|
|
458
|
+
"54020": "ToM Fingerprint Enrollment Failed (Communication Failed)",
|
|
459
|
+
"54025": "ToM Fingerprint Enrollment Failed (Unspecified Error)",
|
|
460
|
+
"54500": "Quick Action Activated",
|
|
461
|
+
"54501": "Quick action",
|
|
462
|
+
"etc": "etc",
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
|
|
466
|
+
def validate_datetime(date_string: str) -> bool:
|
|
467
|
+
"""Validate datetime format is ISO 8601 with timezone."""
|
|
468
|
+
try:
|
|
469
|
+
# Check if it matches the expected format
|
|
470
|
+
if not date_string.endswith('Z') and not ('+' in date_string or date_string.count('-') > 2):
|
|
471
|
+
return False
|
|
472
|
+
# Try to parse it
|
|
473
|
+
if date_string.endswith('Z'):
|
|
474
|
+
datetime.fromisoformat(date_string.replace('Z', '+00:00'))
|
|
475
|
+
else:
|
|
476
|
+
datetime.fromisoformat(date_string)
|
|
477
|
+
return True
|
|
478
|
+
except ValueError:
|
|
479
|
+
return False
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
class EventHandler(BaseHandler):
|
|
483
|
+
"""Handle event and log related operations."""
|
|
484
|
+
|
|
485
|
+
async def _get_server_timezone_offset(self) -> int:
|
|
486
|
+
"""
|
|
487
|
+
Get server timezone offset in minutes from preferences.
|
|
488
|
+
Uses cached value if available, only fetches once per session.
|
|
489
|
+
Returns 540 (UTC+9) as default if API call fails.
|
|
490
|
+
"""
|
|
491
|
+
# 캐시된 값이 있으면 바로 반환
|
|
492
|
+
if self.session.timezone_offset_minutes is not None:
|
|
493
|
+
return self.session.timezone_offset_minutes
|
|
494
|
+
|
|
495
|
+
try:
|
|
496
|
+
headers = {
|
|
497
|
+
"bs-session-id": self.get_session_id(),
|
|
498
|
+
"Content-Type": "application/json"
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
502
|
+
response = await client.get(
|
|
503
|
+
f"{self.session.config.biostar_url}/api/preferences/1",
|
|
504
|
+
headers=headers,
|
|
505
|
+
timeout=10
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
if response.status_code == 200:
|
|
509
|
+
data = response.json()
|
|
510
|
+
preference = data.get("Preference", {})
|
|
511
|
+
timezone_id = preference.get("time_zone", "26")
|
|
512
|
+
offset = get_timezone_offset(timezone_id, default=540)
|
|
513
|
+
|
|
514
|
+
# Cache in session
|
|
515
|
+
self.session.timezone_offset_minutes = offset
|
|
516
|
+
logger.info(f"Server timezone cached: UTC{'+' if offset >= 0 else ''}{offset/60:.1f} (offset: {offset} minutes)")
|
|
517
|
+
return offset
|
|
518
|
+
except Exception as e:
|
|
519
|
+
logger.warning(f"Failed to get server timezone, using default UTC+9: {e}")
|
|
520
|
+
|
|
521
|
+
# Cache default value
|
|
522
|
+
self.session.timezone_offset_minutes = 540
|
|
523
|
+
return 540 # Default to UTC+9 (Seoul/Tokyo)
|
|
524
|
+
|
|
525
|
+
def _convert_event_times(self, events: List[Dict[str, Any]], timezone_offset_minutes: int) -> List[Dict[str, Any]]:
|
|
526
|
+
"""
|
|
527
|
+
Convert event times from UTC to server local time.
|
|
528
|
+
Adds both original UTC time and converted local time.
|
|
529
|
+
"""
|
|
530
|
+
converted_events = []
|
|
531
|
+
for event in events:
|
|
532
|
+
event_copy = event.copy()
|
|
533
|
+
|
|
534
|
+
# Convert datetime field
|
|
535
|
+
if "datetime" in event_copy and event_copy["datetime"]:
|
|
536
|
+
utc_time = event_copy["datetime"]
|
|
537
|
+
local_time = convert_utc_to_server_time(utc_time, timezone_offset_minutes)
|
|
538
|
+
event_copy["datetime_utc"] = utc_time
|
|
539
|
+
event_copy["datetime"] = local_time
|
|
540
|
+
|
|
541
|
+
converted_events.append(event_copy)
|
|
542
|
+
|
|
543
|
+
return converted_events
|
|
544
|
+
|
|
545
|
+
def _map_event_type_id_to_name(self, event_type_id: Any) -> str:
|
|
546
|
+
"""
|
|
547
|
+
이벤트 ID를 이벤트 이름으로 변환하는 내부 함수.
|
|
548
|
+
사용자에게는 항상 이벤트 이름만 표시됩니다.
|
|
549
|
+
"""
|
|
550
|
+
if event_type_id is None:
|
|
551
|
+
return "Unknown Event"
|
|
552
|
+
|
|
553
|
+
event_id_str = str(event_type_id)
|
|
554
|
+
return EVENT_TYPE_NAMES.get(event_id_str, f"Unknown Event (ID: {event_id_str})")
|
|
555
|
+
|
|
556
|
+
def _enrich_events_with_names(self, events: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
557
|
+
"""
|
|
558
|
+
이벤트 리스트의 각 이벤트에 event_type_name을 추가하고,
|
|
559
|
+
사용자에게 보여줄 때는 ID 대신 이름만 표시되도록 합니다.
|
|
560
|
+
event_type_id 필드는 완전히 이벤트 이름으로 교체합니다.
|
|
561
|
+
"""
|
|
562
|
+
enriched_events = []
|
|
563
|
+
for event in events:
|
|
564
|
+
event_copy = event.copy()
|
|
565
|
+
|
|
566
|
+
# event_type_id 필드 처리
|
|
567
|
+
if "event_type_id" in event_copy:
|
|
568
|
+
event_type_id_original = event_copy["event_type_id"]
|
|
569
|
+
|
|
570
|
+
# 이미 딕셔너리 형태로 되어있는 경우 (예: {"code": "4096", "name": "..."})
|
|
571
|
+
if isinstance(event_type_id_original, dict):
|
|
572
|
+
code = event_type_id_original.get("code")
|
|
573
|
+
event_type_name = self._map_event_type_id_to_name(code)
|
|
574
|
+
else:
|
|
575
|
+
# 숫자나 문자열로 되어있는 경우
|
|
576
|
+
event_type_name = self._map_event_type_id_to_name(event_type_id_original)
|
|
577
|
+
|
|
578
|
+
# event_type_id를 이벤트 이름으로 완전히 교체 (ID는 완전히 숨김)
|
|
579
|
+
event_copy["event_type_id"] = event_type_name
|
|
580
|
+
event_copy["event_type"] = event_type_name
|
|
581
|
+
event_copy["event_type_name"] = event_type_name
|
|
582
|
+
|
|
583
|
+
enriched_events.append(event_copy)
|
|
584
|
+
|
|
585
|
+
return enriched_events
|
|
586
|
+
|
|
587
|
+
async def get_event_types(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
588
|
+
"""
|
|
589
|
+
GET /api/event_types?is_break_glass=<bool>&setting_alert=<bool>&setting_all=<bool>
|
|
590
|
+
Server flags:
|
|
591
|
+
- is_break_glass: if true, lists user-renamed (break-glass) event names. When true and the others are not given, setting_alert/setting_all default to false.
|
|
592
|
+
- setting_alert: if true, lists alertable event types.
|
|
593
|
+
- setting_all: if true, lists all event types.
|
|
594
|
+
|
|
595
|
+
Client-side enhancements:
|
|
596
|
+
- search: case-insensitive contains across code/name/description
|
|
597
|
+
- codes: restrict to given event code(s)
|
|
598
|
+
- only_alertable: keep only items with alertable == true
|
|
599
|
+
- only_enable_alert: keep only items with enable_alert == true
|
|
600
|
+
- sort_by: "code" | "name" (default "code")
|
|
601
|
+
- sort_desc: boolean
|
|
602
|
+
- limit, offset: slicing
|
|
603
|
+
- group_by: "none" | "alertable" | "enable_alert" (default "none")
|
|
604
|
+
"""
|
|
605
|
+
try:
|
|
606
|
+
self.check_auth()
|
|
607
|
+
|
|
608
|
+
# ---- Server query parameters ----
|
|
609
|
+
is_break_glass = bool(args.get("is_break_glass", False))
|
|
610
|
+
|
|
611
|
+
if "setting_alert" in args:
|
|
612
|
+
setting_alert = bool(args.get("setting_alert"))
|
|
613
|
+
else:
|
|
614
|
+
setting_alert = False if is_break_glass else True
|
|
615
|
+
|
|
616
|
+
if "setting_all" in args:
|
|
617
|
+
setting_all = bool(args.get("setting_all"))
|
|
618
|
+
else:
|
|
619
|
+
setting_all = False if is_break_glass else True
|
|
620
|
+
|
|
621
|
+
params = {
|
|
622
|
+
"is_break_glass": str(is_break_glass).lower(),
|
|
623
|
+
"setting_alert": str(setting_alert).lower(),
|
|
624
|
+
"setting_all": str(setting_all).lower(),
|
|
625
|
+
}
|
|
626
|
+
|
|
627
|
+
headers = {
|
|
628
|
+
"bs-session-id": self.get_session_id(),
|
|
629
|
+
"Content-Type": "application/json",
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
633
|
+
resp = await client.get(
|
|
634
|
+
f"{self.session.config.biostar_url}/api/event_types",
|
|
635
|
+
headers=headers,
|
|
636
|
+
params=params,
|
|
637
|
+
)
|
|
638
|
+
|
|
639
|
+
if resp.status_code != 200:
|
|
640
|
+
return self.error_response(f"API call failed: {resp.status_code} - {resp.text}")
|
|
641
|
+
|
|
642
|
+
payload = resp.json() or {}
|
|
643
|
+
rows = (payload.get("EventTypeCollection") or {}).get("rows", []) or []
|
|
644
|
+
|
|
645
|
+
# ---- Normalize booleans ----
|
|
646
|
+
def _to_bool(v: Any) -> bool:
|
|
647
|
+
if isinstance(v, bool):
|
|
648
|
+
return v
|
|
649
|
+
if isinstance(v, str):
|
|
650
|
+
return v.strip().lower() == "true"
|
|
651
|
+
return bool(v)
|
|
652
|
+
|
|
653
|
+
normalized: List[Dict[str, Any]] = []
|
|
654
|
+
for it in rows:
|
|
655
|
+
normalized.append({
|
|
656
|
+
"code": str(it.get("code") or ""),
|
|
657
|
+
"name": it.get("name") or "",
|
|
658
|
+
"description": it.get("description") or "",
|
|
659
|
+
"alertable": _to_bool(it.get("alertable", False)),
|
|
660
|
+
"enable_alert": _to_bool(it.get("enable_alert", False)),
|
|
661
|
+
})
|
|
662
|
+
|
|
663
|
+
# ---- Client-side filters ----
|
|
664
|
+
search = (args.get("search") or "").strip().lower()
|
|
665
|
+
if search:
|
|
666
|
+
normalized = [
|
|
667
|
+
x for x in normalized
|
|
668
|
+
if (search in x["code"].lower()
|
|
669
|
+
or search in x["name"].lower()
|
|
670
|
+
or search in (x["description"] or "").lower())
|
|
671
|
+
]
|
|
672
|
+
|
|
673
|
+
codes_arg = args.get("codes") or []
|
|
674
|
+
if codes_arg:
|
|
675
|
+
code_set = {str(c) for c in codes_arg}
|
|
676
|
+
normalized = [x for x in normalized if x["code"] in code_set]
|
|
677
|
+
|
|
678
|
+
if bool(args.get("only_alertable", False)):
|
|
679
|
+
normalized = [x for x in normalized if x["alertable"]]
|
|
680
|
+
|
|
681
|
+
if bool(args.get("only_enable_alert", False)):
|
|
682
|
+
normalized = [x for x in normalized if x["enable_alert"]]
|
|
683
|
+
|
|
684
|
+
# ---- Sorting ----
|
|
685
|
+
sort_by = (args.get("sort_by") or "code").lower()
|
|
686
|
+
sort_desc = bool(args.get("sort_desc", False))
|
|
687
|
+
|
|
688
|
+
def _code_key(v: Dict[str, Any]):
|
|
689
|
+
c = v.get("code", "")
|
|
690
|
+
try:
|
|
691
|
+
return int(c)
|
|
692
|
+
except Exception:
|
|
693
|
+
return c
|
|
694
|
+
|
|
695
|
+
if sort_by == "name":
|
|
696
|
+
normalized.sort(key=lambda v: (v.get("name") or "").lower(), reverse=sort_desc)
|
|
697
|
+
else:
|
|
698
|
+
normalized.sort(key=_code_key, reverse=sort_desc)
|
|
699
|
+
|
|
700
|
+
# ---- Slicing ----
|
|
701
|
+
offset = int(args.get("offset", 0))
|
|
702
|
+
limit = args.get("limit")
|
|
703
|
+
if limit is not None:
|
|
704
|
+
limit = int(limit)
|
|
705
|
+
sliced = normalized[offset: offset + limit]
|
|
706
|
+
else:
|
|
707
|
+
sliced = normalized[offset:]
|
|
708
|
+
|
|
709
|
+
# ---- Grouping ----
|
|
710
|
+
group_by = (args.get("group_by") or "none").lower()
|
|
711
|
+
if group_by == "alertable":
|
|
712
|
+
grouped: Dict[str, List[Dict[str, Any]]] = {
|
|
713
|
+
"true": [x for x in sliced if x["alertable"]],
|
|
714
|
+
"false": [x for x in sliced if not x["alertable"]],
|
|
715
|
+
}
|
|
716
|
+
data_out: Dict[str, Any] = grouped
|
|
717
|
+
returned_count = len(grouped["true"]) + len(grouped["false"])
|
|
718
|
+
elif group_by == "enable_alert":
|
|
719
|
+
grouped = {
|
|
720
|
+
"true": [x for x in sliced if x["enable_alert"]],
|
|
721
|
+
"false": [x for x in sliced if not x["enable_alert"]],
|
|
722
|
+
}
|
|
723
|
+
data_out = grouped
|
|
724
|
+
returned_count = len(grouped["true"]) + len(grouped["false"])
|
|
725
|
+
else:
|
|
726
|
+
data_out = {"items": sliced}
|
|
727
|
+
returned_count = len(sliced)
|
|
728
|
+
|
|
729
|
+
# ---- Summary ----
|
|
730
|
+
summary = {
|
|
731
|
+
"total": len(normalized) if not search and not codes_arg and not args.get("only_alertable") and not args.get("only_enable_alert") else len(normalized),
|
|
732
|
+
"returned": returned_count,
|
|
733
|
+
"alertable_true_count": sum(1 for x in normalized if x["alertable"]),
|
|
734
|
+
"enable_alert_true_count": sum(1 for x in normalized if x["enable_alert"]),
|
|
735
|
+
}
|
|
736
|
+
|
|
737
|
+
return self.success_response({
|
|
738
|
+
"message": f"Found {summary['total']} event types",
|
|
739
|
+
"summary": summary,
|
|
740
|
+
"api_params": params,
|
|
741
|
+
"filters": {
|
|
742
|
+
"search": search or None,
|
|
743
|
+
"codes": codes_arg or None,
|
|
744
|
+
"only_alertable": bool(args.get("only_alertable", False)),
|
|
745
|
+
"only_enable_alert": bool(args.get("only_enable_alert", False)),
|
|
746
|
+
"sort_by": sort_by,
|
|
747
|
+
"sort_desc": sort_desc,
|
|
748
|
+
"offset": offset,
|
|
749
|
+
"limit": limit,
|
|
750
|
+
"group_by": group_by,
|
|
751
|
+
},
|
|
752
|
+
"event_types": data_out,
|
|
753
|
+
})
|
|
754
|
+
|
|
755
|
+
except Exception as e:
|
|
756
|
+
return await self.handle_api_error(e)
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
def _convert_to_utc(self, datetime_str: str) -> str:
|
|
760
|
+
"""
|
|
761
|
+
Convert local datetime string to UTC ISO format (BioStar API format).
|
|
762
|
+
|
|
763
|
+
Args:
|
|
764
|
+
datetime_str: Local datetime string (ISO 8601 format)
|
|
765
|
+
e.g., "2025-11-01T00:00:00", "2025-11-01T23:59:59+09:00"
|
|
766
|
+
|
|
767
|
+
Returns:
|
|
768
|
+
UTC datetime string in BioStar format
|
|
769
|
+
e.g., "2025-10-31T15:00:00.000Z"
|
|
770
|
+
"""
|
|
771
|
+
try:
|
|
772
|
+
from datetime import timezone as tz
|
|
773
|
+
|
|
774
|
+
# Parse the input datetime
|
|
775
|
+
dt_str = datetime_str.strip()
|
|
776
|
+
|
|
777
|
+
# Parse datetime with or without timezone
|
|
778
|
+
if dt_str.endswith('Z'):
|
|
779
|
+
# Already UTC
|
|
780
|
+
dt_str = dt_str.replace('Z', '+00:00')
|
|
781
|
+
dt = datetime.fromisoformat(dt_str)
|
|
782
|
+
dt_utc = dt.replace(tzinfo=None)
|
|
783
|
+
elif '+' in dt_str or (dt_str.count('-') > 2 and 'T' in dt_str):
|
|
784
|
+
# Has timezone offset (e.g., "+09:00" or "-05:00")
|
|
785
|
+
dt = datetime.fromisoformat(dt_str)
|
|
786
|
+
# Convert to UTC
|
|
787
|
+
if dt.tzinfo:
|
|
788
|
+
dt_utc = dt.astimezone(tz.utc).replace(tzinfo=None)
|
|
789
|
+
else:
|
|
790
|
+
dt_utc = dt
|
|
791
|
+
else:
|
|
792
|
+
# No timezone info - assume server local time (KST = UTC+9)
|
|
793
|
+
dt = datetime.fromisoformat(dt_str)
|
|
794
|
+
# Treat as KST and convert to UTC
|
|
795
|
+
dt_with_tz = dt.replace(tzinfo=tz(timedelta(hours=9)))
|
|
796
|
+
dt_utc = dt_with_tz.astimezone(tz.utc).replace(tzinfo=None)
|
|
797
|
+
|
|
798
|
+
# Format as BioStar expects: "2025-10-31T15:00:00.000Z"
|
|
799
|
+
return dt_utc.strftime("%Y-%m-%dT%H:%M:%S.000Z")
|
|
800
|
+
|
|
801
|
+
except Exception as e:
|
|
802
|
+
logger.error(f"Failed to convert datetime '{datetime_str}' to UTC: {e}")
|
|
803
|
+
# Return original string as fallback
|
|
804
|
+
return datetime_str
|
|
805
|
+
|
|
806
|
+
async def search_events(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
807
|
+
"""Advanced search for events (accepts both 'value' and 'values' in conditions)."""
|
|
808
|
+
try:
|
|
809
|
+
self.check_auth()
|
|
810
|
+
|
|
811
|
+
headers = {
|
|
812
|
+
"bs-session-id": self.get_session_id(),
|
|
813
|
+
"Content-Type": "application/json"
|
|
814
|
+
}
|
|
815
|
+
|
|
816
|
+
# ---- base pagination/sort ----
|
|
817
|
+
limit = int(args.get("limit", 100))
|
|
818
|
+
offset = int(args.get("offset", 0))
|
|
819
|
+
order_by = args.get("order_by", "datetime")
|
|
820
|
+
order_type = args.get("order_type", "desc")
|
|
821
|
+
descending = (order_type == "desc")
|
|
822
|
+
|
|
823
|
+
conds_in = list(args.get("conditions") or [])
|
|
824
|
+
start_dt = args.get("start_datetime")
|
|
825
|
+
end_dt = args.get("end_datetime")
|
|
826
|
+
|
|
827
|
+
# datetime 조건이 없으면 start_dt/end_dt로 추가
|
|
828
|
+
has_datetime = any(c.get("column") == "datetime" for c in conds_in)
|
|
829
|
+
if not has_datetime and (start_dt or end_dt):
|
|
830
|
+
# Convert to UTC for BioStar API
|
|
831
|
+
if start_dt and end_dt:
|
|
832
|
+
start_dt_utc = self._convert_to_utc(start_dt)
|
|
833
|
+
end_dt_utc = self._convert_to_utc(end_dt)
|
|
834
|
+
logger.info(f" Date range: {start_dt} ~ {end_dt} → UTC: {start_dt_utc} ~ {end_dt_utc}")
|
|
835
|
+
conds_in.append({"column": "datetime", "operator": 3, "values": [start_dt_utc, end_dt_utc]})
|
|
836
|
+
elif start_dt:
|
|
837
|
+
start_dt_utc = self._convert_to_utc(start_dt)
|
|
838
|
+
logger.info(f" Start date: {start_dt} → UTC: {start_dt_utc}")
|
|
839
|
+
conds_in.append({"column": "datetime", "operator": 5, "values": [start_dt_utc]})
|
|
840
|
+
else:
|
|
841
|
+
end_dt_utc = self._convert_to_utc(end_dt)
|
|
842
|
+
logger.info(f" End date: {end_dt} → UTC: {end_dt_utc}")
|
|
843
|
+
conds_in.append({"column": "datetime", "operator": 6, "values": [end_dt_utc]})
|
|
844
|
+
|
|
845
|
+
user_id = args.get("user_id")
|
|
846
|
+
user_name = args.get("user_name")
|
|
847
|
+
if user_name and not user_id:
|
|
848
|
+
resolved_id = await self._resolve_user_id_by_name(user_name, headers)
|
|
849
|
+
if not resolved_id:
|
|
850
|
+
return self.error_response(f"No user matched name '{user_name}'.")
|
|
851
|
+
user_id = resolved_id
|
|
852
|
+
if user_id:
|
|
853
|
+
has_user = any(c.get("column") in ("user_id", "user_id.user_id") for c in conds_in)
|
|
854
|
+
if not has_user:
|
|
855
|
+
# operator 2 사용 (웹페이지와 동일)
|
|
856
|
+
conds_in.append({"column": "user_id.user_id", "operator": 2, "values": [str(user_id)]})
|
|
857
|
+
logger.info(f" User filter: user_id={user_id}")
|
|
858
|
+
|
|
859
|
+
norm_conds: List[Dict[str, Any]] = []
|
|
860
|
+
for c in conds_in:
|
|
861
|
+
column = c.get("column")
|
|
862
|
+
operator = int(c.get("operator"))
|
|
863
|
+
values = c.get("values", c.get("value"))
|
|
864
|
+
|
|
865
|
+
if values is None:
|
|
866
|
+
return self.error_response("Each condition must include 'values' (or 'value').")
|
|
867
|
+
|
|
868
|
+
if column == "datetime":
|
|
869
|
+
# Convert datetime values to UTC
|
|
870
|
+
utc_values = []
|
|
871
|
+
for v in values:
|
|
872
|
+
if isinstance(v, str):
|
|
873
|
+
if not validate_datetime(v):
|
|
874
|
+
return self.error_response(
|
|
875
|
+
f"Invalid datetime format: {v}. Use ISO 8601 with timezone, e.g. 2025-08-01T00:00:00.000Z"
|
|
876
|
+
)
|
|
877
|
+
# Convert to UTC for BioStar API
|
|
878
|
+
utc_v = self._convert_to_utc(v)
|
|
879
|
+
utc_values.append(utc_v)
|
|
880
|
+
else:
|
|
881
|
+
utc_values.append(v)
|
|
882
|
+
values = utc_values
|
|
883
|
+
if len(values) == 2:
|
|
884
|
+
logger.info(f" Datetime condition converted to UTC: {utc_values[0]} ~ {utc_values[1]}")
|
|
885
|
+
|
|
886
|
+
if column == "user_id":
|
|
887
|
+
column = "user_id.user_id"
|
|
888
|
+
|
|
889
|
+
if column == "user_id.user_id" and operator not in (0, 1, 2):
|
|
890
|
+
operator = 0
|
|
891
|
+
|
|
892
|
+
if not isinstance(values, list):
|
|
893
|
+
values = [values]
|
|
894
|
+
values = [str(v) for v in values]
|
|
895
|
+
|
|
896
|
+
norm_conds.append({"column": column, "operator": operator, "values": values})
|
|
897
|
+
|
|
898
|
+
query = {
|
|
899
|
+
"Query": {
|
|
900
|
+
"limit": limit,
|
|
901
|
+
"conditions": norm_conds,
|
|
902
|
+
"orders": [{"column": order_by, "descending": bool(descending)}]
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
if offset:
|
|
906
|
+
query["Query"]["offset"] = offset
|
|
907
|
+
|
|
908
|
+
# 로그에 실제 쿼리 출력 (디버깅용)
|
|
909
|
+
import json as json_module
|
|
910
|
+
logger.info(f" BioStar API Query: {json_module.dumps(query, ensure_ascii=False)}")
|
|
911
|
+
|
|
912
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
913
|
+
response = await client.post(
|
|
914
|
+
f"{self.session.config.biostar_url}/api/events/search",
|
|
915
|
+
headers=headers,
|
|
916
|
+
json=query
|
|
917
|
+
)
|
|
918
|
+
|
|
919
|
+
if response.status_code != 200:
|
|
920
|
+
return self.error_response(f"API call failed: {response.status_code} - {response.text}")
|
|
921
|
+
|
|
922
|
+
data = response.json() or {}
|
|
923
|
+
events = (data.get("EventCollection") or {}).get("rows", []) or []
|
|
924
|
+
total = (data.get("EventCollection") or {}).get("total", 0)
|
|
925
|
+
|
|
926
|
+
# 서버 타임존 가져오기
|
|
927
|
+
timezone_offset = await self._get_server_timezone_offset()
|
|
928
|
+
|
|
929
|
+
# 이벤트 ID를 이벤트 이름으로 자동 변환
|
|
930
|
+
enriched_events = self._enrich_events_with_names(events)
|
|
931
|
+
|
|
932
|
+
# 이벤트 시간을 서버 로컬 시간으로 변환
|
|
933
|
+
enriched_events = self._convert_event_times(enriched_events, timezone_offset)
|
|
934
|
+
|
|
935
|
+
return self.success_response({
|
|
936
|
+
"message": f"Found {len(events)} events matching search criteria",
|
|
937
|
+
"total": total,
|
|
938
|
+
"count": len(enriched_events),
|
|
939
|
+
"events": enriched_events,
|
|
940
|
+
"request_query": query
|
|
941
|
+
})
|
|
942
|
+
|
|
943
|
+
except Exception as e:
|
|
944
|
+
return await self.handle_api_error(e)
|
|
945
|
+
|
|
946
|
+
async def get_realtime_events(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
947
|
+
"""Connect to websocket for real-time events (simulation)."""
|
|
948
|
+
try:
|
|
949
|
+
self.check_auth()
|
|
950
|
+
|
|
951
|
+
# Note: WebSocket implementation would require additional setup
|
|
952
|
+
# This is a simulation of what the real-time monitoring would return
|
|
953
|
+
|
|
954
|
+
duration = args.get("duration", 60)
|
|
955
|
+
event_types = args.get("event_types", [])
|
|
956
|
+
|
|
957
|
+
return self.success_response({
|
|
958
|
+
"message": "Real-time event monitoring requires WebSocket connection",
|
|
959
|
+
"note": "This feature requires additional WebSocket client setup",
|
|
960
|
+
"configuration": {
|
|
961
|
+
"duration": f"{duration} seconds",
|
|
962
|
+
"event_types": event_types if event_types else "All events",
|
|
963
|
+
"websocket_url": f"wss://{self.session.config.biostar_url.replace('https://', '').replace('http://', '')}/wsapi/v1/events"
|
|
964
|
+
}
|
|
965
|
+
})
|
|
966
|
+
|
|
967
|
+
except Exception as e:
|
|
968
|
+
return await self.handle_api_error(e)
|
|
969
|
+
|
|
970
|
+
async def get_access_logs(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
971
|
+
"""Get access control specific logs."""
|
|
972
|
+
try:
|
|
973
|
+
self.check_auth()
|
|
974
|
+
|
|
975
|
+
# Build conditions for access-related events
|
|
976
|
+
conditions = []
|
|
977
|
+
|
|
978
|
+
# Time range filter
|
|
979
|
+
if args.get("start_datetime"):
|
|
980
|
+
conditions.append({
|
|
981
|
+
"column": "datetime",
|
|
982
|
+
"operator": 5, # GREATER
|
|
983
|
+
"value": [args["start_datetime"]]
|
|
984
|
+
})
|
|
985
|
+
if args.get("end_datetime"):
|
|
986
|
+
conditions.append({
|
|
987
|
+
"column": "datetime",
|
|
988
|
+
"operator": 6, # LESS
|
|
989
|
+
"value": [args["end_datetime"]]
|
|
990
|
+
})
|
|
991
|
+
|
|
992
|
+
# User name filter
|
|
993
|
+
if args.get("user_name"):
|
|
994
|
+
conditions.append({
|
|
995
|
+
"column": "user_id.name",
|
|
996
|
+
"operator": 2, # CONTAINS
|
|
997
|
+
"value": [args["user_name"]]
|
|
998
|
+
})
|
|
999
|
+
|
|
1000
|
+
# Success only filter - filter by specific event types
|
|
1001
|
+
if args.get("success_only", False):
|
|
1002
|
+
conditions.append({
|
|
1003
|
+
"column": "event_type_id",
|
|
1004
|
+
"operator": 0, # EQUAL
|
|
1005
|
+
"value": ["4865"] # Verify Success event type
|
|
1006
|
+
})
|
|
1007
|
+
|
|
1008
|
+
# Use the search API for access logs
|
|
1009
|
+
search_args = {
|
|
1010
|
+
"conditions": conditions,
|
|
1011
|
+
"limit": args.get("limit", 100),
|
|
1012
|
+
"order_by": "datetime",
|
|
1013
|
+
"order_type": "desc"
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
return await self.search_events(search_args)
|
|
1017
|
+
|
|
1018
|
+
except Exception as e:
|
|
1019
|
+
return await self.handle_api_error(e)
|
|
1020
|
+
|
|
1021
|
+
async def export_events_csv(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1022
|
+
r"""
|
|
1023
|
+
Export event logs to CSV via POST /api/events/export and copy to Windows Downloads.
|
|
1024
|
+
|
|
1025
|
+
- Server saves CSV under Nginx download dir (e.g., C:\Program Files\BioStar X\nginx\html\download)
|
|
1026
|
+
- Then we copy it to:
|
|
1027
|
+
• dest_dir (if provided), or
|
|
1028
|
+
• C:\Users\<target_username>\Downloads (if provided), or
|
|
1029
|
+
• %USERPROFILE%\Downloads (default)
|
|
1030
|
+
- Query params:
|
|
1031
|
+
• time_offset (minutes, e.g., 480 for UTC+8)
|
|
1032
|
+
• use_centigrade (bool; default True)
|
|
1033
|
+
|
|
1034
|
+
Payload to BioStar:
|
|
1035
|
+
{
|
|
1036
|
+
"Query": {
|
|
1037
|
+
"conditions": [{"column":"datetime","operator":3,"values":[start,end]}, ...],
|
|
1038
|
+
"offset": 0,
|
|
1039
|
+
"columns": [...],
|
|
1040
|
+
"headers": [...]
|
|
1041
|
+
}
|
|
1042
|
+
}
|
|
1043
|
+
"""
|
|
1044
|
+
try:
|
|
1045
|
+
self.check_auth()
|
|
1046
|
+
|
|
1047
|
+
# ---------- file copy options ----------
|
|
1048
|
+
copy_to_downloads: bool = bool(args.get("copy_to_downloads", True))
|
|
1049
|
+
dest_dir_arg: str = (args.get("dest_dir") or "").strip()
|
|
1050
|
+
target_username: str = (args.get("target_username") or "").strip()
|
|
1051
|
+
|
|
1052
|
+
headers = {
|
|
1053
|
+
"bs-session-id": self.get_session_id(),
|
|
1054
|
+
"Content-Type": "application/json",
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
# ---------- columns / headers ----------
|
|
1058
|
+
default_columns = [
|
|
1059
|
+
"datetime",
|
|
1060
|
+
"door_id.name",
|
|
1061
|
+
"device_id.id",
|
|
1062
|
+
"device_id.name",
|
|
1063
|
+
"user_group_id",
|
|
1064
|
+
"user_id",
|
|
1065
|
+
"temperature",
|
|
1066
|
+
"event_type_id",
|
|
1067
|
+
"tna_key",
|
|
1068
|
+
]
|
|
1069
|
+
default_headers = [
|
|
1070
|
+
"Date",
|
|
1071
|
+
"Door",
|
|
1072
|
+
"Device ID",
|
|
1073
|
+
"Device",
|
|
1074
|
+
"User Group",
|
|
1075
|
+
"User",
|
|
1076
|
+
"Temperature",
|
|
1077
|
+
"Event",
|
|
1078
|
+
"TNA Key",
|
|
1079
|
+
]
|
|
1080
|
+
columns = args.get("columns") or default_columns
|
|
1081
|
+
headers_csv = args.get("headers") or default_headers
|
|
1082
|
+
if len(columns) != len(headers_csv):
|
|
1083
|
+
return self.error_response("Length of 'headers' must match 'columns'.")
|
|
1084
|
+
|
|
1085
|
+
# ---------- build/normalize conditions ----------
|
|
1086
|
+
conditions = list(args.get("conditions") or [])
|
|
1087
|
+
start_dt = args.get("start_datetime")
|
|
1088
|
+
end_dt = args.get("end_datetime")
|
|
1089
|
+
if not conditions:
|
|
1090
|
+
if start_dt and end_dt:
|
|
1091
|
+
conditions.append({"column": "datetime", "operator": 3, "values": [start_dt, end_dt]})
|
|
1092
|
+
elif start_dt:
|
|
1093
|
+
conditions.append({"column": "datetime", "operator": 5, "values": [start_dt]})
|
|
1094
|
+
elif end_dt:
|
|
1095
|
+
conditions.append({"column": "datetime", "operator": 6, "values": [end_dt]})
|
|
1096
|
+
|
|
1097
|
+
# Convenience: user constraint (user_id or user_name)
|
|
1098
|
+
# - If user_name is provided, resolve to user_id via /api/v2/users/search
|
|
1099
|
+
user_id = args.get("user_id")
|
|
1100
|
+
user_name = args.get("user_name")
|
|
1101
|
+
if user_name and not user_id:
|
|
1102
|
+
resolved_id = await self._resolve_user_id_by_name(user_name, headers)
|
|
1103
|
+
if not resolved_id:
|
|
1104
|
+
return self.error_response(f"No user matched name '{user_name}'.")
|
|
1105
|
+
user_id = resolved_id
|
|
1106
|
+
|
|
1107
|
+
if user_id:
|
|
1108
|
+
# Only add if not already constrained
|
|
1109
|
+
has_user_filter = any(
|
|
1110
|
+
(c.get("column") in ("user_id", "user_id.user_id")) for c in conditions
|
|
1111
|
+
)
|
|
1112
|
+
if not has_user_filter:
|
|
1113
|
+
conditions.append({
|
|
1114
|
+
"column": "user_id.user_id", # normalized field path
|
|
1115
|
+
"operator": 0,
|
|
1116
|
+
"values": [str(user_id)],
|
|
1117
|
+
})
|
|
1118
|
+
|
|
1119
|
+
# Normalize any 'user_id' -> 'user_id.user_id'
|
|
1120
|
+
for c in conditions:
|
|
1121
|
+
col = c.get("column")
|
|
1122
|
+
if col == "user_id":
|
|
1123
|
+
c["column"] = "user_id.user_id"
|
|
1124
|
+
# Ensure 'values' is a list of strings
|
|
1125
|
+
vals = c.get("values")
|
|
1126
|
+
if isinstance(vals, list):
|
|
1127
|
+
c["values"] = [str(v) for v in vals]
|
|
1128
|
+
elif vals is not None:
|
|
1129
|
+
c["values"] = [str(vals)]
|
|
1130
|
+
|
|
1131
|
+
# Optional: event_types convenience
|
|
1132
|
+
event_types = args.get("event_types") or []
|
|
1133
|
+
if event_types:
|
|
1134
|
+
conditions.append({
|
|
1135
|
+
"column": "event_type_id",
|
|
1136
|
+
"operator": 0,
|
|
1137
|
+
"values": [str(x) for x in event_types],
|
|
1138
|
+
})
|
|
1139
|
+
|
|
1140
|
+
offset = int(args.get("offset", 0))
|
|
1141
|
+
time_offset = int(args.get("time_offset_minutes", args.get("time_offset", 0)))
|
|
1142
|
+
use_centigrade = bool(args.get("use_centigrade", True))
|
|
1143
|
+
|
|
1144
|
+
payload = {
|
|
1145
|
+
"Query": {
|
|
1146
|
+
"conditions": conditions,
|
|
1147
|
+
"offset": offset,
|
|
1148
|
+
"columns": columns,
|
|
1149
|
+
"headers": headers_csv,
|
|
1150
|
+
}
|
|
1151
|
+
}
|
|
1152
|
+
|
|
1153
|
+
# ---------- call /api/events/export ----------
|
|
1154
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1155
|
+
resp = await client.post(
|
|
1156
|
+
f"{self.session.config.biostar_url}/api/events/export",
|
|
1157
|
+
headers=headers,
|
|
1158
|
+
params={"time_offset": time_offset, "use_centigrade": str(use_centigrade).lower()},
|
|
1159
|
+
json=payload,
|
|
1160
|
+
)
|
|
1161
|
+
|
|
1162
|
+
if resp.status_code != 200:
|
|
1163
|
+
return self.error_response(f"Events export failed: {resp.status_code} - {resp.text}")
|
|
1164
|
+
|
|
1165
|
+
body = resp.json() or {}
|
|
1166
|
+
filename = (body.get("File") or {}).get("uri") or body.get("filename")
|
|
1167
|
+
if not filename:
|
|
1168
|
+
return self.error_response("Export succeeded but no filename returned", {"response": body})
|
|
1169
|
+
|
|
1170
|
+
# ---------- wait for file in Nginx download dir ----------
|
|
1171
|
+
download_root = getattr(
|
|
1172
|
+
self.session.config,
|
|
1173
|
+
"download_dir",
|
|
1174
|
+
r"C:\Program Files\BioStar X\nginx\html\download",
|
|
1175
|
+
)
|
|
1176
|
+
src_path = Path(download_root) / filename
|
|
1177
|
+
|
|
1178
|
+
for _ in range(50):
|
|
1179
|
+
if src_path.exists():
|
|
1180
|
+
break
|
|
1181
|
+
await asyncio.sleep(0.1)
|
|
1182
|
+
|
|
1183
|
+
# ---------- copy to Windows Downloads ----------
|
|
1184
|
+
copy_status = "skipped"
|
|
1185
|
+
copied_to = ""
|
|
1186
|
+
if copy_to_downloads:
|
|
1187
|
+
if platform.system().lower() == "windows":
|
|
1188
|
+
if dest_dir_arg:
|
|
1189
|
+
dest_dir = Path(dest_dir_arg)
|
|
1190
|
+
elif target_username:
|
|
1191
|
+
dest_dir = Path(f"C:\\Users\\{target_username}\\Downloads")
|
|
1192
|
+
else:
|
|
1193
|
+
userprofile = os.environ.get("USERPROFILE") or str(Path.home())
|
|
1194
|
+
dest_dir = Path(userprofile) / "Downloads"
|
|
1195
|
+
|
|
1196
|
+
try:
|
|
1197
|
+
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
1198
|
+
if not src_path.exists():
|
|
1199
|
+
copy_status = "failed: source not found"
|
|
1200
|
+
else:
|
|
1201
|
+
dest_path = dest_dir / filename
|
|
1202
|
+
shutil.copy2(src_path, dest_path)
|
|
1203
|
+
copied_to = str(dest_path)
|
|
1204
|
+
copy_status = "success"
|
|
1205
|
+
except Exception as ce:
|
|
1206
|
+
copy_status = f"failed: {ce}"
|
|
1207
|
+
else:
|
|
1208
|
+
copy_status = "skipped: non-windows"
|
|
1209
|
+
|
|
1210
|
+
return self.success_response({
|
|
1211
|
+
"message": "Events CSV exported successfully",
|
|
1212
|
+
"filename": filename,
|
|
1213
|
+
"download_url": f"/download/{filename}",
|
|
1214
|
+
"source_path": str(src_path),
|
|
1215
|
+
"copy": {"enabled": copy_to_downloads, "status": copy_status, "destination": copied_to},
|
|
1216
|
+
"export_params": {
|
|
1217
|
+
"time_offset": time_offset,
|
|
1218
|
+
"use_centigrade": use_centigrade,
|
|
1219
|
+
"offset": offset,
|
|
1220
|
+
"columns": columns,
|
|
1221
|
+
"headers": headers_csv,
|
|
1222
|
+
"conditions": conditions,
|
|
1223
|
+
},
|
|
1224
|
+
})
|
|
1225
|
+
|
|
1226
|
+
except Exception as e:
|
|
1227
|
+
return await self.handle_api_error(e)
|
|
1228
|
+
|
|
1229
|
+
|
|
1230
|
+
async def get_temperature_logs(self, args: Dict[str, Any]) -> Sequence[TextContent]:
|
|
1231
|
+
"""Get temperature measurement logs from thermal cameras."""
|
|
1232
|
+
try:
|
|
1233
|
+
self.check_auth()
|
|
1234
|
+
|
|
1235
|
+
conditions = [
|
|
1236
|
+
{
|
|
1237
|
+
"column": "event_type_id",
|
|
1238
|
+
"operator": 0,
|
|
1239
|
+
"value": ["7168"]
|
|
1240
|
+
}
|
|
1241
|
+
]
|
|
1242
|
+
|
|
1243
|
+
if args.get("start_datetime"):
|
|
1244
|
+
conditions.append({
|
|
1245
|
+
"column": "datetime",
|
|
1246
|
+
"operator": 5,
|
|
1247
|
+
"value": [args["start_datetime"]]
|
|
1248
|
+
})
|
|
1249
|
+
if args.get("end_datetime"):
|
|
1250
|
+
conditions.append({
|
|
1251
|
+
"column": "datetime",
|
|
1252
|
+
"operator": 6,
|
|
1253
|
+
"value": [args["end_datetime"]]
|
|
1254
|
+
})
|
|
1255
|
+
if args.get("user_id"):
|
|
1256
|
+
conditions.append({
|
|
1257
|
+
"column": "user_id",
|
|
1258
|
+
"operator": 0,
|
|
1259
|
+
"value": [args["user_id"]]
|
|
1260
|
+
})
|
|
1261
|
+
|
|
1262
|
+
search_result = await self.search_events({
|
|
1263
|
+
"conditions": conditions,
|
|
1264
|
+
"limit": 100,
|
|
1265
|
+
"order_by": "datetime",
|
|
1266
|
+
"order_type": "desc"
|
|
1267
|
+
})
|
|
1268
|
+
|
|
1269
|
+
# Filter by temperature threshold if specified
|
|
1270
|
+
if args.get("temperature_threshold") or args.get("abnormal_only"):
|
|
1271
|
+
# Would need to parse temperature from event data
|
|
1272
|
+
# This is a placeholder for the actual implementation
|
|
1273
|
+
return self.success_response({
|
|
1274
|
+
"message": "Temperature log filtering requires parsing event metadata",
|
|
1275
|
+
"note": "Temperature data is stored in event metadata field",
|
|
1276
|
+
"search_performed": True,
|
|
1277
|
+
"filters_applied": {
|
|
1278
|
+
"abnormal_only": args.get("abnormal_only", False),
|
|
1279
|
+
"threshold": args.get("temperature_threshold")
|
|
1280
|
+
}
|
|
1281
|
+
})
|
|
1282
|
+
|
|
1283
|
+
return search_result
|
|
1284
|
+
|
|
1285
|
+
except Exception as e:
|
|
1286
|
+
return await self.handle_api_error(e)
|
|
1287
|
+
|
|
1288
|
+
def format_access_log_info(self, log: Dict[str, Any]) -> Dict[str, Any]:
|
|
1289
|
+
"""Format access log information for response."""
|
|
1290
|
+
# 이벤트 타입 ID를 이벤트 이름으로 변환
|
|
1291
|
+
event_type_id_obj = log.get("event_type_id")
|
|
1292
|
+
if isinstance(event_type_id_obj, dict):
|
|
1293
|
+
event_code = event_type_id_obj.get("code")
|
|
1294
|
+
event_type_name = self._map_event_type_id_to_name(event_code)
|
|
1295
|
+
else:
|
|
1296
|
+
event_type_name = self._map_event_type_id_to_name(event_type_id_obj)
|
|
1297
|
+
|
|
1298
|
+
return {
|
|
1299
|
+
"id": log.get("id"),
|
|
1300
|
+
"datetime": log.get("datetime"),
|
|
1301
|
+
"user_id": log.get("user_id", {}).get("user_id") if isinstance(log.get("user_id"), dict) else log.get("user_id"),
|
|
1302
|
+
"user_name": log.get("user_id", {}).get("name") if isinstance(log.get("user_id"), dict) else None,
|
|
1303
|
+
"door_id": log.get("door_id", {}).get("id") if isinstance(log.get("door_id"), dict) else log.get("door_id"),
|
|
1304
|
+
"door_name": log.get("door_id", {}).get("name") if isinstance(log.get("door_id"), dict) else None,
|
|
1305
|
+
"event_type": event_type_name, # 이벤트 이름으로 표시 (ID 아님)
|
|
1306
|
+
"result": log.get("result"),
|
|
1307
|
+
"message": log.get("message")
|
|
1308
|
+
}
|
|
1309
|
+
|
|
1310
|
+
async def _resolve_user_id_by_name(self, name: str, headers: Dict[str, str]) -> Optional[str]:
|
|
1311
|
+
"""
|
|
1312
|
+
Resolve a single user_id by user name (partial match allowed).
|
|
1313
|
+
Returns user_id as string, or None if not found/ambiguous.
|
|
1314
|
+
"""
|
|
1315
|
+
try:
|
|
1316
|
+
payload = {"limit": 2, "search_text": name}
|
|
1317
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
1318
|
+
r = await client.post(f"{self.session.config.biostar_url}/api/v2/users/search",
|
|
1319
|
+
headers=headers, json=payload)
|
|
1320
|
+
if r.status_code != 200:
|
|
1321
|
+
return None
|
|
1322
|
+
users = (r.json() or {}).get("UserCollection", {}).get("rows", []) or []
|
|
1323
|
+
if len(users) == 1:
|
|
1324
|
+
return str(users[0].get("user_id"))
|
|
1325
|
+
# 완전일치 우선
|
|
1326
|
+
exact = [u for u in users if str(u.get("name") or "") == name]
|
|
1327
|
+
if len(exact) == 1:
|
|
1328
|
+
return str(exact[0].get("user_id"))
|
|
1329
|
+
return None
|
|
1330
|
+
except Exception:
|
|
1331
|
+
return None
|