suprema-biostar-mcp 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- biostar_x_mcp_server/__init__.py +25 -0
- biostar_x_mcp_server/__main__.py +15 -0
- biostar_x_mcp_server/config.py +87 -0
- biostar_x_mcp_server/handlers/__init__.py +35 -0
- biostar_x_mcp_server/handlers/access_handler.py +2162 -0
- biostar_x_mcp_server/handlers/audit_handler.py +489 -0
- biostar_x_mcp_server/handlers/auth_handler.py +216 -0
- biostar_x_mcp_server/handlers/base_handler.py +228 -0
- biostar_x_mcp_server/handlers/card_handler.py +746 -0
- biostar_x_mcp_server/handlers/device_handler.py +4344 -0
- biostar_x_mcp_server/handlers/door_handler.py +3969 -0
- biostar_x_mcp_server/handlers/event_handler.py +1331 -0
- biostar_x_mcp_server/handlers/file_handler.py +212 -0
- biostar_x_mcp_server/handlers/help_web_handler.py +379 -0
- biostar_x_mcp_server/handlers/log_handler.py +1051 -0
- biostar_x_mcp_server/handlers/navigation_handler.py +109 -0
- biostar_x_mcp_server/handlers/occupancy_handler.py +541 -0
- biostar_x_mcp_server/handlers/user_handler.py +3568 -0
- biostar_x_mcp_server/schemas/__init__.py +21 -0
- biostar_x_mcp_server/schemas/access.py +158 -0
- biostar_x_mcp_server/schemas/audit.py +73 -0
- biostar_x_mcp_server/schemas/auth.py +24 -0
- biostar_x_mcp_server/schemas/cards.py +128 -0
- biostar_x_mcp_server/schemas/devices.py +496 -0
- biostar_x_mcp_server/schemas/doors.py +306 -0
- biostar_x_mcp_server/schemas/events.py +104 -0
- biostar_x_mcp_server/schemas/files.py +7 -0
- biostar_x_mcp_server/schemas/help.py +29 -0
- biostar_x_mcp_server/schemas/logs.py +33 -0
- biostar_x_mcp_server/schemas/occupancy.py +19 -0
- biostar_x_mcp_server/schemas/tool_response.py +29 -0
- biostar_x_mcp_server/schemas/users.py +166 -0
- biostar_x_mcp_server/server.py +335 -0
- biostar_x_mcp_server/session.py +221 -0
- biostar_x_mcp_server/tool_manager.py +172 -0
- biostar_x_mcp_server/tools/__init__.py +45 -0
- biostar_x_mcp_server/tools/access.py +510 -0
- biostar_x_mcp_server/tools/audit.py +227 -0
- biostar_x_mcp_server/tools/auth.py +59 -0
- biostar_x_mcp_server/tools/cards.py +269 -0
- biostar_x_mcp_server/tools/categories.py +197 -0
- biostar_x_mcp_server/tools/devices.py +1552 -0
- biostar_x_mcp_server/tools/doors.py +865 -0
- biostar_x_mcp_server/tools/events.py +305 -0
- biostar_x_mcp_server/tools/files.py +28 -0
- biostar_x_mcp_server/tools/help.py +80 -0
- biostar_x_mcp_server/tools/logs.py +123 -0
- biostar_x_mcp_server/tools/navigation.py +89 -0
- biostar_x_mcp_server/tools/occupancy.py +91 -0
- biostar_x_mcp_server/tools/users.py +1113 -0
- biostar_x_mcp_server/utils/__init__.py +31 -0
- biostar_x_mcp_server/utils/category_mapper.py +206 -0
- biostar_x_mcp_server/utils/decorators.py +101 -0
- biostar_x_mcp_server/utils/language_detector.py +51 -0
- biostar_x_mcp_server/utils/search.py +42 -0
- biostar_x_mcp_server/utils/timezone.py +122 -0
- suprema_biostar_mcp-1.0.1.dist-info/METADATA +163 -0
- suprema_biostar_mcp-1.0.1.dist-info/RECORD +61 -0
- suprema_biostar_mcp-1.0.1.dist-info/WHEEL +4 -0
- suprema_biostar_mcp-1.0.1.dist-info/entry_points.txt +2 -0
- suprema_biostar_mcp-1.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"""Utility functions for BioStar X MCP Server"""
|
|
2
|
+
|
|
3
|
+
from .timezone import (
|
|
4
|
+
TIMEZONE_OFFSETS,
|
|
5
|
+
get_timezone_offset,
|
|
6
|
+
get_timezone_string,
|
|
7
|
+
convert_utc_to_local,
|
|
8
|
+
)
|
|
9
|
+
from .search import search_users_by_name
|
|
10
|
+
from .decorators import (
|
|
11
|
+
handle_api_errors,
|
|
12
|
+
require_auth,
|
|
13
|
+
log_execution,
|
|
14
|
+
validate_args,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
# Timezone utilities
|
|
19
|
+
"TIMEZONE_OFFSETS",
|
|
20
|
+
"get_timezone_offset",
|
|
21
|
+
"get_timezone_string",
|
|
22
|
+
"convert_utc_to_local",
|
|
23
|
+
# Search utilities
|
|
24
|
+
"search_users_by_name",
|
|
25
|
+
# Decorators
|
|
26
|
+
"handle_api_errors",
|
|
27
|
+
"require_auth",
|
|
28
|
+
"log_execution",
|
|
29
|
+
"validate_args",
|
|
30
|
+
]
|
|
31
|
+
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Category Mapper Module
|
|
3
|
+
|
|
4
|
+
Maps documents and endpoints to BioStar MCP Server categories based on content and structure.
|
|
5
|
+
"""
|
|
6
|
+
import logging
|
|
7
|
+
import re
|
|
8
|
+
from typing import Dict, List, Optional, Set, Any
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class CategoryMapper:
|
|
15
|
+
"""Maps content to BioStar MCP Server categories"""
|
|
16
|
+
|
|
17
|
+
# Category keywords mapping (Korean + English)
|
|
18
|
+
CATEGORY_KEYWORDS: Dict[str, List[str]] = {
|
|
19
|
+
"auth": [
|
|
20
|
+
"authentication", "login", "logout", "session", "credential", "token",
|
|
21
|
+
"인증", "로그인", "로그아웃", "세션", "토큰", "인증서"
|
|
22
|
+
],
|
|
23
|
+
"users": [
|
|
24
|
+
"user", "users", "person", "people", "employee", "staff", "member",
|
|
25
|
+
"사용자", "유저", "직원", "인원", "멤버", "회원",
|
|
26
|
+
"user group", "user management", "사용자 그룹", "사용자 관리"
|
|
27
|
+
],
|
|
28
|
+
"cards": [
|
|
29
|
+
"card", "cards", "credential", "credentials", "badge", "smartcard",
|
|
30
|
+
"카드", "크리덴셜", "배지", "스마트카드",
|
|
31
|
+
"card type", "wiegand", "csn", "카드 타입", "위간드"
|
|
32
|
+
],
|
|
33
|
+
"doors": [
|
|
34
|
+
"door", "doors", "gate", "gates", "entry", "entrance", "exit",
|
|
35
|
+
"문", "도어", "출입문", "게이트", "입구", "출구",
|
|
36
|
+
"door group", "door control", "도어 그룹", "도어 제어"
|
|
37
|
+
],
|
|
38
|
+
"access": [
|
|
39
|
+
"access", "permission", "authorization", "access level", "access group",
|
|
40
|
+
"출입", "권한", "액세스", "접근", "권한 그룹", "접근 레벨",
|
|
41
|
+
"access control", "permission management", "접근 제어", "권한 관리"
|
|
42
|
+
],
|
|
43
|
+
"devices": [
|
|
44
|
+
"device", "devices", "reader", "readers", "terminal", "controller",
|
|
45
|
+
"장치", "디바이스", "리더", "터미널", "컨트롤러",
|
|
46
|
+
"device group", "device management", "장치 그룹", "장치 관리"
|
|
47
|
+
],
|
|
48
|
+
"events": [
|
|
49
|
+
"event", "events", "log", "logs", "record", "records", "history",
|
|
50
|
+
"이벤트", "로그", "기록", "이력", "히스토리",
|
|
51
|
+
"access log", "event log", "출입 기록", "이벤트 로그"
|
|
52
|
+
],
|
|
53
|
+
"audit": [
|
|
54
|
+
"audit", "audits", "trail", "tracking", "change history",
|
|
55
|
+
"감사", "추적", "변경 이력", "감사 추적",
|
|
56
|
+
"audit log", "audit trail", "감사 로그"
|
|
57
|
+
],
|
|
58
|
+
"navigation": [
|
|
59
|
+
"navigation", "navigate", "page", "route",
|
|
60
|
+
"네비게이션", "페이지", "이동"
|
|
61
|
+
],
|
|
62
|
+
"occupancy": [
|
|
63
|
+
"occupancy", "occupied", "entry", "exit", "inside", "present",
|
|
64
|
+
"재실", "입실", "퇴실", "재실 현황", "재실자"
|
|
65
|
+
],
|
|
66
|
+
"files": [
|
|
67
|
+
"file", "files", "upload", "import", "export", "csv", "pdf",
|
|
68
|
+
"파일", "업로드", "임포트", "내보내기"
|
|
69
|
+
],
|
|
70
|
+
"logs": [
|
|
71
|
+
"server log", "system log", "log file", "server status",
|
|
72
|
+
"서버 로그", "시스템 로그", "로그 파일", "서버 상태"
|
|
73
|
+
],
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
# API endpoint path to category mapping
|
|
77
|
+
ENDPOINT_CATEGORY_MAP: Dict[str, str] = {
|
|
78
|
+
"/api/auth": "auth",
|
|
79
|
+
"/api/users": "users",
|
|
80
|
+
"/api/user-groups": "users",
|
|
81
|
+
"/api/cards": "cards",
|
|
82
|
+
"/api/card-types": "cards",
|
|
83
|
+
"/api/doors": "doors",
|
|
84
|
+
"/api/door-groups": "doors",
|
|
85
|
+
"/api/access": "access",
|
|
86
|
+
"/api/access-groups": "access",
|
|
87
|
+
"/api/access-levels": "access",
|
|
88
|
+
"/api/devices": "devices",
|
|
89
|
+
"/api/device-groups": "devices",
|
|
90
|
+
"/api/events": "events",
|
|
91
|
+
"/api/audit": "audit",
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
def __init__(self):
|
|
95
|
+
"""Initialize CategoryMapper"""
|
|
96
|
+
# Build reverse keyword index for faster lookup
|
|
97
|
+
self._keyword_to_category: Dict[str, str] = {}
|
|
98
|
+
for category, keywords in self.CATEGORY_KEYWORDS.items():
|
|
99
|
+
for keyword in keywords:
|
|
100
|
+
self._keyword_to_category[keyword.lower()] = category
|
|
101
|
+
|
|
102
|
+
def map_endpoint_to_category(self, endpoint: Dict[str, Any]) -> Optional[str]:
|
|
103
|
+
"""
|
|
104
|
+
Map API endpoint to category based on path and tags
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
endpoint: Endpoint information with path, method, tags, etc.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Category name or None if not found
|
|
111
|
+
"""
|
|
112
|
+
path = endpoint.get("path", "").lower()
|
|
113
|
+
tags = endpoint.get("tags", [])
|
|
114
|
+
|
|
115
|
+
# Check path mapping first
|
|
116
|
+
for endpoint_pattern, category in self.ENDPOINT_CATEGORY_MAP.items():
|
|
117
|
+
if path.startswith(endpoint_pattern.lower()):
|
|
118
|
+
return category
|
|
119
|
+
|
|
120
|
+
# Check tags
|
|
121
|
+
for tag in tags:
|
|
122
|
+
tag_lower = tag.lower()
|
|
123
|
+
if tag_lower in self._keyword_to_category:
|
|
124
|
+
return self._keyword_to_category[tag_lower]
|
|
125
|
+
|
|
126
|
+
# Check path segments
|
|
127
|
+
path_segments = [seg for seg in path.split("/") if seg and seg != "api"]
|
|
128
|
+
if path_segments:
|
|
129
|
+
first_segment = path_segments[0]
|
|
130
|
+
if first_segment in self._keyword_to_category:
|
|
131
|
+
return self._keyword_to_category[first_segment]
|
|
132
|
+
|
|
133
|
+
# Default: try to infer from path
|
|
134
|
+
for keyword, category in self._keyword_to_category.items():
|
|
135
|
+
if keyword in path:
|
|
136
|
+
return category
|
|
137
|
+
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
def map_text_to_category(self, text: str, title: Optional[str] = None) -> Optional[str]:
|
|
141
|
+
"""
|
|
142
|
+
Map text content to category based on keywords
|
|
143
|
+
|
|
144
|
+
Args:
|
|
145
|
+
text: Text content to analyze
|
|
146
|
+
title: Optional title (has higher weight)
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
Category name or None if not found
|
|
150
|
+
"""
|
|
151
|
+
if not text:
|
|
152
|
+
return None
|
|
153
|
+
|
|
154
|
+
text_lower = text.lower()
|
|
155
|
+
title_lower = title.lower() if title else ""
|
|
156
|
+
|
|
157
|
+
# Score categories based on keyword matches
|
|
158
|
+
category_scores: Dict[str, int] = {}
|
|
159
|
+
|
|
160
|
+
for category, keywords in self.CATEGORY_KEYWORDS.items():
|
|
161
|
+
score = 0
|
|
162
|
+
for keyword in keywords:
|
|
163
|
+
keyword_lower = keyword.lower()
|
|
164
|
+
# Title matches have higher weight
|
|
165
|
+
if title_lower and keyword_lower in title_lower:
|
|
166
|
+
score += 3
|
|
167
|
+
# Text matches
|
|
168
|
+
if keyword_lower in text_lower:
|
|
169
|
+
score += 1
|
|
170
|
+
|
|
171
|
+
if score > 0:
|
|
172
|
+
category_scores[category] = score
|
|
173
|
+
|
|
174
|
+
if not category_scores:
|
|
175
|
+
return None
|
|
176
|
+
|
|
177
|
+
# Return category with highest score
|
|
178
|
+
return max(category_scores.items(), key=lambda x: x[1])[0]
|
|
179
|
+
|
|
180
|
+
def map_section_to_category(self, section: Dict[str, Any]) -> Optional[str]:
|
|
181
|
+
"""
|
|
182
|
+
Map PDF section to category
|
|
183
|
+
|
|
184
|
+
Args:
|
|
185
|
+
section: Section information with title and content
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
Category name or None if not found
|
|
189
|
+
"""
|
|
190
|
+
title = section.get("title", "")
|
|
191
|
+
content = section.get("content", "")
|
|
192
|
+
|
|
193
|
+
# Try title first (more reliable)
|
|
194
|
+
category = self.map_text_to_category(title, title=title)
|
|
195
|
+
if category:
|
|
196
|
+
return category
|
|
197
|
+
|
|
198
|
+
# Try content (first 500 chars for performance)
|
|
199
|
+
content_preview = content[:500] if content else ""
|
|
200
|
+
category = self.map_text_to_category(content_preview, title=title)
|
|
201
|
+
|
|
202
|
+
return category
|
|
203
|
+
|
|
204
|
+
def get_all_categories(self) -> List[str]:
|
|
205
|
+
"""Get list of all available categories"""
|
|
206
|
+
return list(self.CATEGORY_KEYWORDS.keys())
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Decorator utilities for BioStar X MCP Server handlers
|
|
3
|
+
Provides common decorators for error handling, authentication, and logging
|
|
4
|
+
"""
|
|
5
|
+
import logging
|
|
6
|
+
import functools
|
|
7
|
+
from typing import Callable, Any
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def handle_api_errors(func: Callable) -> Callable:
|
|
13
|
+
"""
|
|
14
|
+
Decorator to handle API errors consistently across all handler methods
|
|
15
|
+
|
|
16
|
+
Usage:
|
|
17
|
+
@handle_api_errors
|
|
18
|
+
async def my_handler_method(self, args):
|
|
19
|
+
# Your code here
|
|
20
|
+
pass
|
|
21
|
+
"""
|
|
22
|
+
@functools.wraps(func)
|
|
23
|
+
async def wrapper(self, *args, **kwargs):
|
|
24
|
+
try:
|
|
25
|
+
return await func(self, *args, **kwargs)
|
|
26
|
+
except Exception as e:
|
|
27
|
+
logger.error(f"Error in {func.__name__}: {e}", exc_info=True)
|
|
28
|
+
return await self.handle_api_error(e)
|
|
29
|
+
return wrapper
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def require_auth(func: Callable) -> Callable:
|
|
33
|
+
"""
|
|
34
|
+
Decorator to ensure authentication before executing handler method
|
|
35
|
+
|
|
36
|
+
Usage:
|
|
37
|
+
@require_auth
|
|
38
|
+
@handle_api_errors
|
|
39
|
+
async def my_handler_method(self, args):
|
|
40
|
+
# Your code here (auth is guaranteed)
|
|
41
|
+
pass
|
|
42
|
+
"""
|
|
43
|
+
@functools.wraps(func)
|
|
44
|
+
async def wrapper(self, *args, **kwargs):
|
|
45
|
+
self.check_auth()
|
|
46
|
+
return await func(self, *args, **kwargs)
|
|
47
|
+
return wrapper
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def log_execution(func: Callable) -> Callable:
|
|
51
|
+
"""
|
|
52
|
+
Decorator to log method execution start and end
|
|
53
|
+
|
|
54
|
+
Usage:
|
|
55
|
+
@log_execution
|
|
56
|
+
@require_auth
|
|
57
|
+
@handle_api_errors
|
|
58
|
+
async def my_handler_method(self, args):
|
|
59
|
+
# Your code here
|
|
60
|
+
pass
|
|
61
|
+
"""
|
|
62
|
+
@functools.wraps(func)
|
|
63
|
+
async def wrapper(self, *args, **kwargs):
|
|
64
|
+
logger.debug(f"Starting execution: {func.__name__}")
|
|
65
|
+
try:
|
|
66
|
+
result = await func(self, *args, **kwargs)
|
|
67
|
+
logger.debug(f"Completed execution: {func.__name__}")
|
|
68
|
+
return result
|
|
69
|
+
except Exception as e:
|
|
70
|
+
logger.error(f"Failed execution: {func.__name__} - {e}")
|
|
71
|
+
raise
|
|
72
|
+
return wrapper
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def validate_args(*required_keys: str):
|
|
76
|
+
"""
|
|
77
|
+
Decorator to validate required arguments in args dict
|
|
78
|
+
|
|
79
|
+
Usage:
|
|
80
|
+
@validate_args("user_id", "name")
|
|
81
|
+
@require_auth
|
|
82
|
+
@handle_api_errors
|
|
83
|
+
async def my_handler_method(self, args):
|
|
84
|
+
# user_id and name are guaranteed to exist in args
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
Args:
|
|
88
|
+
required_keys: Keys that must exist in args dict
|
|
89
|
+
"""
|
|
90
|
+
def decorator(func: Callable) -> Callable:
|
|
91
|
+
@functools.wraps(func)
|
|
92
|
+
async def wrapper(self, args: dict, *extra_args, **kwargs):
|
|
93
|
+
missing = [key for key in required_keys if key not in args]
|
|
94
|
+
if missing:
|
|
95
|
+
return self.error_response(
|
|
96
|
+
"Missing required parameters",
|
|
97
|
+
{"missing": missing, "required": list(required_keys)}
|
|
98
|
+
)
|
|
99
|
+
return await func(self, args, *extra_args, **kwargs)
|
|
100
|
+
return wrapper
|
|
101
|
+
return decorator
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Language detection utility for determining user's preferred language.
|
|
3
|
+
"""
|
|
4
|
+
import re
|
|
5
|
+
from typing import Literal
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def detect_language(text: str) -> Literal["ko", "en"]:
|
|
9
|
+
"""
|
|
10
|
+
Detect if text is primarily Korean or English.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
text: Input text to analyze
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
"ko" if Korean, "en" if English (default)
|
|
17
|
+
"""
|
|
18
|
+
if not text or not isinstance(text, str):
|
|
19
|
+
return "en"
|
|
20
|
+
|
|
21
|
+
# Remove whitespace and common punctuation
|
|
22
|
+
text = text.strip()
|
|
23
|
+
if not text:
|
|
24
|
+
return "en"
|
|
25
|
+
|
|
26
|
+
# Check for Korean characters (Hangul)
|
|
27
|
+
korean_pattern = re.compile(r'[가-힣]')
|
|
28
|
+
korean_chars = len(korean_pattern.findall(text))
|
|
29
|
+
|
|
30
|
+
# Check for English characters (Latin alphabet)
|
|
31
|
+
english_pattern = re.compile(r'[a-zA-Z]')
|
|
32
|
+
english_chars = len(english_pattern.findall(text))
|
|
33
|
+
|
|
34
|
+
# Count total meaningful characters (excluding numbers, spaces, punctuation)
|
|
35
|
+
total_chars = korean_chars + english_chars
|
|
36
|
+
|
|
37
|
+
if total_chars == 0:
|
|
38
|
+
return "en" # Default to English if no meaningful characters
|
|
39
|
+
|
|
40
|
+
# If Korean characters make up more than 20% of meaningful characters, consider it Korean
|
|
41
|
+
korean_ratio = korean_chars / total_chars if total_chars > 0 else 0
|
|
42
|
+
|
|
43
|
+
if korean_ratio > 0.2:
|
|
44
|
+
return "ko"
|
|
45
|
+
else:
|
|
46
|
+
return "en"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def is_korean(text: str) -> bool:
|
|
50
|
+
"""Check if text is primarily Korean."""
|
|
51
|
+
return detect_language(text) == "ko"
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import httpx
|
|
2
|
+
from typing import List, Dict
|
|
3
|
+
import os
|
|
4
|
+
|
|
5
|
+
API_BASE = os.getenv("API_BASE", "https://192.168.120.114:443/api")
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
async def search_users_by_name(session_token: str, name: str) -> List[Dict]:
|
|
9
|
+
"""
|
|
10
|
+
Search for users by name using the BioStar API.
|
|
11
|
+
|
|
12
|
+
CRITICAL: BioStar API's name parameter seems unreliable, so we do client-side filtering
|
|
13
|
+
to ensure exact or partial matches only.
|
|
14
|
+
"""
|
|
15
|
+
headers = {
|
|
16
|
+
"bs-session-id": session_token,
|
|
17
|
+
"Content-Type": "application/json"
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
async with httpx.AsyncClient(verify=False) as client:
|
|
21
|
+
response = await client.get(
|
|
22
|
+
f"{API_BASE}/users",
|
|
23
|
+
headers=headers,
|
|
24
|
+
params={"name": name}
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
if response.status_code == 200:
|
|
28
|
+
data = response.json()
|
|
29
|
+
all_users = data.get("UserCollection", {}).get("rows", [])
|
|
30
|
+
|
|
31
|
+
# Client-side filtering: only return users whose name contains the search term
|
|
32
|
+
# (case-insensitive)
|
|
33
|
+
search_lower = name.lower().strip()
|
|
34
|
+
filtered_users = [
|
|
35
|
+
{"user_id": u["user_id"], "name": u["name"]}
|
|
36
|
+
for u in all_users
|
|
37
|
+
if search_lower in u.get("name", "").lower()
|
|
38
|
+
]
|
|
39
|
+
|
|
40
|
+
return filtered_users
|
|
41
|
+
|
|
42
|
+
return []
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Timezone utility functions
|
|
3
|
+
Convert BioStar X API timezone IDs to UTC offsets (in minutes)
|
|
4
|
+
"""
|
|
5
|
+
from typing import Optional
|
|
6
|
+
from datetime import datetime, timedelta
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
# BioStar Timezone ID to UTC offset mapping (in minutes)
|
|
10
|
+
TIMEZONE_OFFSETS = {
|
|
11
|
+
"0": -720, # (UTC -12:00) Eniwetok, Kwajalein
|
|
12
|
+
"1": -660, # (UTC -11:00) Midway Island, Samoa
|
|
13
|
+
"2": -600, # (UTC -10:00) Hawaii
|
|
14
|
+
"3": -540, # (UTC -9:00) Alaska
|
|
15
|
+
"4": -480, # (UTC -8:00) Pacific Time (US & Canada)
|
|
16
|
+
"5": -420, # (UTC -7:00) Mountain Time (US & Canada)
|
|
17
|
+
"6": -360, # (UTC -6:00) Central Time (US & Canada), Mexico City
|
|
18
|
+
"7": -300, # (UTC -5:00) Eastern Time (US & Canada), Bogota, Lima
|
|
19
|
+
"8": -240, # (UTC -4:00) Atlantic Time (Canada), Caracas, La Paz
|
|
20
|
+
"9": -210, # (UTC -3:30) Newfoundland
|
|
21
|
+
"10": -180, # (UTC -3:00) Brazil, Buenos Aires, Georgetown
|
|
22
|
+
"11": -120, # (UTC -2:00) Mid-Atlantic
|
|
23
|
+
"12": -60, # (UTC -1:00) Azores, Cape Verde Islands
|
|
24
|
+
"13": 0, # (UTC) Western Europe Time, London, Lisbon, Casablanca
|
|
25
|
+
"14": 60, # (UTC +1:00) Brussels, Copenhagen, Madrid, Paris
|
|
26
|
+
"15": 120, # (UTC +2:00) Kaliningrad, South Africa
|
|
27
|
+
"16": 180, # (UTC +3:00) Baghdad, Riyadh, Moscow, St. Petersburg
|
|
28
|
+
"17": 210, # (UTC +3:30) Tehran
|
|
29
|
+
"18": 240, # (UTC +4:00) Abu Dhabi, Muscat, Baku, Tbilisi
|
|
30
|
+
"19": 270, # (UTC +4:30) Kabul
|
|
31
|
+
"20": 300, # (UTC +5:00) Ekaterinburg, Islamabad, Karachi, Tashkent
|
|
32
|
+
"21": 330, # (UTC +5:30) Bombay, Calcutta, Madras, New Delhi, Colombo
|
|
33
|
+
"22": 345, # (UTC +5:45) Kathmandu
|
|
34
|
+
"23": 360, # (UTC +6:00) Almaty, Dhaka
|
|
35
|
+
"24": 420, # (UTC +7:00) Bangkok, Hanoi, Jakarta
|
|
36
|
+
"25": 480, # (UTC +8:00) Beijing, Perth, Singapore, Hong Kong
|
|
37
|
+
"26": 540, # (UTC +9:00) Seoul, Tokyo, Osaka, Sapporo, Yakutsk
|
|
38
|
+
"27": 570, # (UTC +9:30) Adelaide, Darwin
|
|
39
|
+
"28": 600, # (UTC +10:00) Eastern Australia, Guam, Vladivostok
|
|
40
|
+
"29": 660, # (UTC +11:00) Magadan, Solomon Islands, New Caledonia
|
|
41
|
+
"30": 720, # (UTC +12:00) Auckland, Wellington, Fiji, Kamchatka
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def get_timezone_offset(timezone_id: str, default: int = 540) -> int:
|
|
46
|
+
"""
|
|
47
|
+
Convert BioStar timezone ID to UTC offset in minutes
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
timezone_id: BioStar timezone ID (string)
|
|
51
|
+
default: Default offset to return if timezone_id not found (default: 540 = UTC+9 Seoul)
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
UTC offset in minutes
|
|
55
|
+
"""
|
|
56
|
+
return TIMEZONE_OFFSETS.get(str(timezone_id), default)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def get_timezone_string(timezone_id: str, default: int = 540) -> str:
|
|
60
|
+
"""
|
|
61
|
+
Convert BioStar timezone ID to "UTC+9:00" format string
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
timezone_id: BioStar timezone ID (string)
|
|
65
|
+
default: Default offset to use if timezone_id not found (in minutes)
|
|
66
|
+
|
|
67
|
+
Returns:
|
|
68
|
+
Timezone string in "UTC+9:00" format
|
|
69
|
+
"""
|
|
70
|
+
offset_minutes = get_timezone_offset(timezone_id, default)
|
|
71
|
+
offset_hours = offset_minutes / 60
|
|
72
|
+
minutes = abs(offset_minutes % 60)
|
|
73
|
+
|
|
74
|
+
if offset_minutes >= 0:
|
|
75
|
+
return f"UTC+{int(offset_hours)}:{minutes:02d}"
|
|
76
|
+
else:
|
|
77
|
+
return f"UTC{int(offset_hours)}:{minutes:02d}"
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def convert_utc_to_local(utc_time_str: str, timezone_offset_minutes: int) -> str:
|
|
81
|
+
"""
|
|
82
|
+
Convert UTC time string to local time
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
utc_time_str: ISO 8601 UTC time string
|
|
86
|
+
(e.g., "2024-01-15T10:30:00.00Z" or "2025-01-20T00:00:08.00+00:00")
|
|
87
|
+
timezone_offset_minutes: Timezone offset in minutes (e.g., 540 = UTC+9)
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Local time string in "2024-01-15 19:30:00 (UTC+9:00)" format
|
|
91
|
+
"""
|
|
92
|
+
try:
|
|
93
|
+
# Normalize time string
|
|
94
|
+
time_str = utc_time_str.strip()
|
|
95
|
+
|
|
96
|
+
# Handle UTC format
|
|
97
|
+
if time_str.endswith('Z'):
|
|
98
|
+
time_str = time_str.replace('Z', '+00:00')
|
|
99
|
+
|
|
100
|
+
# Fix millisecond format: .00 -> .000 (Python requires 3 or 6 digits)
|
|
101
|
+
if '.00+' in time_str or '.00-' in time_str:
|
|
102
|
+
time_str = time_str.replace('.00+', '.000+').replace('.00-', '.000-')
|
|
103
|
+
|
|
104
|
+
# Parse UTC time
|
|
105
|
+
dt_utc = datetime.fromisoformat(time_str)
|
|
106
|
+
|
|
107
|
+
# Apply timezone offset
|
|
108
|
+
dt_local = dt_utc + timedelta(minutes=timezone_offset_minutes)
|
|
109
|
+
|
|
110
|
+
# Generate timezone string
|
|
111
|
+
hours = int(timezone_offset_minutes / 60)
|
|
112
|
+
minutes = abs(timezone_offset_minutes % 60)
|
|
113
|
+
if timezone_offset_minutes >= 0:
|
|
114
|
+
tz_str = f"UTC+{hours}:{minutes:02d}"
|
|
115
|
+
else:
|
|
116
|
+
tz_str = f"UTC{hours}:{minutes:02d}"
|
|
117
|
+
|
|
118
|
+
return f"{dt_local.strftime('%Y-%m-%d %H:%M:%S')} ({tz_str})"
|
|
119
|
+
|
|
120
|
+
except Exception as e:
|
|
121
|
+
# Return original on conversion failure
|
|
122
|
+
return utc_time_str
|