vnai 2.1.9__py3-none-any.whl → 2.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +193 -33
- vnai/beam/__init__.py +26 -3
- vnai/beam/auth.py +312 -0
- vnai/beam/fundamental.py +168 -0
- vnai/beam/patching.py +223 -0
- vnai/beam/quota.py +114 -44
- vnai/beam/sync.py +87 -0
- vnai/flow/relay.py +18 -12
- vnai/scope/__init__.py +8 -1
- vnai/scope/device.py +315 -0
- vnai/scope/lc_integration.py +351 -0
- vnai/scope/license.py +197 -0
- vnai/scope/profile.py +37 -17
- vnai/scope/promo.py +203 -107
- {vnai-2.1.9.dist-info → vnai-2.3.7.dist-info}/METADATA +3 -2
- vnai-2.3.7.dist-info/RECORD +23 -0
- {vnai-2.1.9.dist-info → vnai-2.3.7.dist-info}/WHEEL +1 -1
- vnai-2.1.9.dist-info/RECORD +0 -16
- {vnai-2.1.9.dist-info → vnai-2.3.7.dist-info}/top_level.txt +0 -0
vnai/beam/sync.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Backend Integration Layer
|
|
3
|
+
Integrates vnai with the backend vnstock API for:
|
|
4
|
+
- Real-time quota verification
|
|
5
|
+
- Device management
|
|
6
|
+
- Add-on package tracking
|
|
7
|
+
- Offline fallback support
|
|
8
|
+
"""
|
|
9
|
+
import logging
|
|
10
|
+
import requests
|
|
11
|
+
from typing import Dict, Any, Optional
|
|
12
|
+
from datetime import datetime
|
|
13
|
+
log = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
class BackendIntegration:
|
|
16
|
+
def __init__(self, backend_url: str ="https://vnstocks.com/api/vnstock"):
|
|
17
|
+
self.backend_url = backend_url
|
|
18
|
+
self.timeout = 5
|
|
19
|
+
self.cache = {}
|
|
20
|
+
|
|
21
|
+
def _make_request(self, endpoint: str, api_key: str, method: str ="GET") -> Dict[str, Any]:
|
|
22
|
+
try:
|
|
23
|
+
headers = {
|
|
24
|
+
"Authorization":f"Bearer {api_key}",
|
|
25
|
+
"Content-Type":"application/json"
|
|
26
|
+
}
|
|
27
|
+
url =f"{self.backend_url}{endpoint}"
|
|
28
|
+
if method =="GET":
|
|
29
|
+
response = requests.get(url, headers=headers, timeout=self.timeout)
|
|
30
|
+
else:
|
|
31
|
+
return {"success": False,"error":f"Unsupported method: {method}"}
|
|
32
|
+
if response.status_code == 200:
|
|
33
|
+
return response.json()
|
|
34
|
+
elif response.status_code == 401:
|
|
35
|
+
return {"success": False,"error":"Unauthorized - Invalid API key"}
|
|
36
|
+
elif response.status_code == 404:
|
|
37
|
+
return {"success": False,"error":"Resource not found"}
|
|
38
|
+
else:
|
|
39
|
+
return {"success": False,"error":f"HTTP {response.status_code}"}
|
|
40
|
+
except requests.exceptions.Timeout:
|
|
41
|
+
log.warning(f"Backend API timeout: {endpoint}")
|
|
42
|
+
return {"success": False,"error":"Backend API timeout"}
|
|
43
|
+
except requests.exceptions.ConnectionError:
|
|
44
|
+
log.warning(f"Backend API connection error: {endpoint}")
|
|
45
|
+
return {"success": False,"error":"Backend API connection error"}
|
|
46
|
+
except Exception as e:
|
|
47
|
+
log.warning(f"Backend API error: {e}")
|
|
48
|
+
return {"success": False,"error": str(e)}
|
|
49
|
+
|
|
50
|
+
def get_quota_status(self, api_key: str) -> Dict[str, Any]:
|
|
51
|
+
return self._make_request("/quota/status", api_key)
|
|
52
|
+
|
|
53
|
+
def get_devices(self, api_key: str) -> Dict[str, Any]:
|
|
54
|
+
return self._make_request("/devices", api_key)
|
|
55
|
+
|
|
56
|
+
def get_device_limits(self, api_key: str) -> Dict[str, Any]:
|
|
57
|
+
return self._make_request("/devices/limits", api_key)
|
|
58
|
+
|
|
59
|
+
def get_device(self, api_key: str, device_id: str) -> Dict[str, Any]:
|
|
60
|
+
return self._make_request(f"/devices/{device_id}", api_key)
|
|
61
|
+
|
|
62
|
+
def get_active_addons(self, api_key: str) -> Dict[str, Any]:
|
|
63
|
+
return self._make_request("/addons/active", api_key)
|
|
64
|
+
|
|
65
|
+
def get_complete_metadata(self, api_key: str) -> Dict[str, Any]:
|
|
66
|
+
try:
|
|
67
|
+
quota = self.get_quota_status(api_key)
|
|
68
|
+
devices = self.get_devices(api_key)
|
|
69
|
+
addons = self.get_active_addons(api_key)
|
|
70
|
+
return {
|
|
71
|
+
"success": True,
|
|
72
|
+
"data": {
|
|
73
|
+
"quota": quota.get("data") if quota.get("success") else None,
|
|
74
|
+
"devices": devices.get("data") if devices.get("success") else None,
|
|
75
|
+
"addons": addons.get("data") if addons.get("success") else None,
|
|
76
|
+
"timestamp": datetime.now().isoformat()
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
except Exception as e:
|
|
80
|
+
log.warning(f"Failed to get complete metadata: {e}")
|
|
81
|
+
return {"success": False,"error": str(e)}
|
|
82
|
+
backend_integration = BackendIntegration()
|
|
83
|
+
|
|
84
|
+
def get_backend_integration(backend_url: Optional[str] = None) -> BackendIntegration:
|
|
85
|
+
if backend_url:
|
|
86
|
+
return BackendIntegration(backend_url)
|
|
87
|
+
return backend_integration
|
vnai/flow/relay.py
CHANGED
|
@@ -30,20 +30,26 @@ class Conduit:
|
|
|
30
30
|
self.last_sync_time = time.time()
|
|
31
31
|
self.sync_count = 0
|
|
32
32
|
self.failed_queue = []
|
|
33
|
-
self.
|
|
34
|
-
self.project_dir =
|
|
35
|
-
self.project_dir.mkdir(exist_ok=True)
|
|
33
|
+
self.project_dir = self._get_project_dir()
|
|
34
|
+
self.project_dir.mkdir(parents=True, exist_ok=True)
|
|
36
35
|
self.data_dir = self.project_dir /'data'
|
|
37
|
-
self.data_dir.mkdir(exist_ok=True)
|
|
36
|
+
self.data_dir.mkdir(parents=True, exist_ok=True)
|
|
38
37
|
self.config_path = self.data_dir /"relay_config.json"
|
|
39
38
|
try:
|
|
40
39
|
from vnai.scope.profile import inspector
|
|
41
40
|
self.machine_id = inspector.fingerprint()
|
|
42
|
-
except:
|
|
41
|
+
except Exception:
|
|
43
42
|
self.machine_id = self._generate_fallback_id()
|
|
44
43
|
self._load_config()
|
|
45
44
|
self._start_periodic_sync()
|
|
46
45
|
|
|
46
|
+
def _get_project_dir(self) -> Path:
|
|
47
|
+
try:
|
|
48
|
+
from vnstock.core.config.ggcolab import get_vnstock_directory
|
|
49
|
+
return get_vnstock_directory()
|
|
50
|
+
except ImportError:
|
|
51
|
+
return Path.home() /".vnstock"
|
|
52
|
+
|
|
47
53
|
def _generate_fallback_id(self) -> str:
|
|
48
54
|
try:
|
|
49
55
|
import platform
|
|
@@ -245,13 +251,14 @@ class Conduit:
|
|
|
245
251
|
self.last_sync_time = time.time()
|
|
246
252
|
self.sync_count += 1
|
|
247
253
|
self._save_config()
|
|
254
|
+
machine_id = self.machine_id
|
|
248
255
|
try:
|
|
249
|
-
from vnai.scope.
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
256
|
+
from vnai.scope.device import device_registry
|
|
257
|
+
cached_id = device_registry.get_device_id()
|
|
258
|
+
if cached_id:
|
|
259
|
+
machine_id = cached_id
|
|
260
|
+
except Exception:
|
|
261
|
+
pass
|
|
255
262
|
payload = {
|
|
256
263
|
"analytics_data": data_to_send,
|
|
257
264
|
"metadata": {
|
|
@@ -259,7 +266,6 @@ class Conduit:
|
|
|
259
266
|
"machine_id": machine_id,
|
|
260
267
|
"sync_count": self.sync_count,
|
|
261
268
|
"trigger_reason": reason,
|
|
262
|
-
"environment": environment_info,
|
|
263
269
|
"data_counts": {
|
|
264
270
|
"function_calls": len(data_to_send["function_calls"]),
|
|
265
271
|
"api_requests": len(data_to_send["api_requests"]),
|
vnai/scope/__init__.py
CHANGED
|
@@ -1,4 +1,11 @@
|
|
|
1
1
|
from vnai.scope.profile import inspector
|
|
2
2
|
from vnai.scope.state import tracker, record
|
|
3
3
|
from vnai.scope.promo import manager as content_manager
|
|
4
|
-
from vnai.scope.promo import present as present_content
|
|
4
|
+
from vnai.scope.promo import present as present_content
|
|
5
|
+
from vnai.scope.lc_integration import (
|
|
6
|
+
api_key_checker,
|
|
7
|
+
check_license_status,
|
|
8
|
+
update_license_from_vnii,
|
|
9
|
+
check_license_via_api_key,
|
|
10
|
+
is_paid_user_via_api_key
|
|
11
|
+
)
|
vnai/scope/device.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Device registration and management with IDE detection.
|
|
3
|
+
Handles one-time device registration on first install or version update.
|
|
4
|
+
Subsequent operations use cached device_id to avoid expensive system scans.
|
|
5
|
+
Includes IDE environment detection for comprehensive device profiling.
|
|
6
|
+
"""
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import psutil
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
import logging
|
|
13
|
+
from typing import Optional, Tuple, Dict, List, Any
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
class IDEDetector:
|
|
17
|
+
IDE_MARKERS = {
|
|
18
|
+
'pycharm':'PyCharm',
|
|
19
|
+
'idea':'IntelliJ IDEA',
|
|
20
|
+
'webstorm':'WebStorm',
|
|
21
|
+
'code':'VS Code',
|
|
22
|
+
'code helper':'VS Code',
|
|
23
|
+
'code-server':'VS Code Server',
|
|
24
|
+
'code-oss':'VS Code OSS',
|
|
25
|
+
'vscodium':'VSCodium',
|
|
26
|
+
'cursor':'Cursor',
|
|
27
|
+
'cursor helper':'Cursor',
|
|
28
|
+
'windsurf':'Windsurf',
|
|
29
|
+
'windsurf helper':'Windsurf',
|
|
30
|
+
'jupyter-lab':'Jupyter Lab',
|
|
31
|
+
'jupyter-notebook':'Classic Jupyter',
|
|
32
|
+
'jupyter-server':'Jupyter Server',
|
|
33
|
+
'ipython':'IPython',
|
|
34
|
+
'docker-init':'Docker Container',
|
|
35
|
+
'node':'Node.js',
|
|
36
|
+
}
|
|
37
|
+
ENV_MARKERS = {
|
|
38
|
+
'COLAB_GPU':'Google Colab',
|
|
39
|
+
'COLAB_RELEASE_TAG':'Google Colab',
|
|
40
|
+
'KAGGLE_KERNEL_RUN_TYPE':'Kaggle Notebook',
|
|
41
|
+
'JUPYTERHUB_SERVICE_PREFIX':'JupyterHub',
|
|
42
|
+
}
|
|
43
|
+
SHELL_MARKERS = {
|
|
44
|
+
'zsh','bash','fish','sh','ksh','tcsh',
|
|
45
|
+
'cmd','powershell','pwsh','commandline'
|
|
46
|
+
}
|
|
47
|
+
@staticmethod
|
|
48
|
+
|
|
49
|
+
def get_process_chain(max_depth: int = 20) -> List[Dict]:
|
|
50
|
+
try:
|
|
51
|
+
proc = psutil.Process(os.getpid())
|
|
52
|
+
chain = []
|
|
53
|
+
chain.append({
|
|
54
|
+
'pid': proc.pid,
|
|
55
|
+
'name': proc.name(),
|
|
56
|
+
'exe': proc.exe(),
|
|
57
|
+
'depth': 0
|
|
58
|
+
})
|
|
59
|
+
for depth, ancestor in enumerate(proc.parents(), start=1):
|
|
60
|
+
if depth > max_depth:
|
|
61
|
+
break
|
|
62
|
+
try:
|
|
63
|
+
chain.append({
|
|
64
|
+
'pid': ancestor.pid,
|
|
65
|
+
'name': ancestor.name(),
|
|
66
|
+
'exe': ancestor.exe(),
|
|
67
|
+
'depth': depth
|
|
68
|
+
})
|
|
69
|
+
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
|
70
|
+
chain.append({
|
|
71
|
+
'pid': ancestor.pid,
|
|
72
|
+
'name':'<access_denied or terminated>',
|
|
73
|
+
'exe': None,
|
|
74
|
+
'depth': depth
|
|
75
|
+
})
|
|
76
|
+
return chain
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.debug(f"Error getting process chain: {e}")
|
|
79
|
+
return []
|
|
80
|
+
@staticmethod
|
|
81
|
+
|
|
82
|
+
def _check_jupyter_environment() -> Optional[str]:
|
|
83
|
+
try:
|
|
84
|
+
from IPython import get_ipython
|
|
85
|
+
ipython = get_ipython()
|
|
86
|
+
if ipython is None:
|
|
87
|
+
return None
|
|
88
|
+
kernel_type = type(ipython).__name__
|
|
89
|
+
if'ZMQInteractiveShell' in kernel_type:
|
|
90
|
+
return'Jupyter Kernel'
|
|
91
|
+
elif'TerminalInteractiveShell' in kernel_type:
|
|
92
|
+
return'IPython Terminal'
|
|
93
|
+
else:
|
|
94
|
+
return'IPython'
|
|
95
|
+
except (ImportError, AttributeError):
|
|
96
|
+
return None
|
|
97
|
+
@staticmethod
|
|
98
|
+
|
|
99
|
+
def detect_ide() -> Tuple[str, Dict]:
|
|
100
|
+
for env_var, ide_name in IDEDetector.ENV_MARKERS.items():
|
|
101
|
+
if env_var in os.environ:
|
|
102
|
+
return ide_name, {
|
|
103
|
+
'detection_method':'environment_variable',
|
|
104
|
+
'env_var': env_var,
|
|
105
|
+
'detected_at': datetime.now().isoformat(),
|
|
106
|
+
}
|
|
107
|
+
jupyter_env = IDEDetector._check_jupyter_environment()
|
|
108
|
+
if jupyter_env and jupyter_env =='Jupyter Kernel':
|
|
109
|
+
chain = IDEDetector.get_process_chain()
|
|
110
|
+
for process_info in chain:
|
|
111
|
+
name = (process_info['name'] or"").lower()
|
|
112
|
+
for marker, ide_name in IDEDetector.IDE_MARKERS.items():
|
|
113
|
+
if marker in name and marker not in ['docker-init','node']:
|
|
114
|
+
ide_display =f"{ide_name} (Jupyter)"
|
|
115
|
+
return ide_display, {
|
|
116
|
+
'detection_method':'jupyter_with_ide_frontend',
|
|
117
|
+
'frontend': ide_name,
|
|
118
|
+
'detected_at': datetime.now().isoformat(),
|
|
119
|
+
}
|
|
120
|
+
kernel_name ='Jupyter Kernel'
|
|
121
|
+
return kernel_name, {
|
|
122
|
+
'detection_method':'ipython_kernel',
|
|
123
|
+
'detected_at': datetime.now().isoformat(),
|
|
124
|
+
}
|
|
125
|
+
chain = IDEDetector.get_process_chain()
|
|
126
|
+
if not chain:
|
|
127
|
+
return'Unknown', {
|
|
128
|
+
'detection_method':'failed_to_get_chain',
|
|
129
|
+
'detected_at': datetime.now().isoformat(),
|
|
130
|
+
}
|
|
131
|
+
for process_info in chain:
|
|
132
|
+
name = (process_info['name'] or"").lower()
|
|
133
|
+
exe = (process_info['exe'] or"").lower()
|
|
134
|
+
for marker, ide_name in IDEDetector.IDE_MARKERS.items():
|
|
135
|
+
if marker in name or marker in exe:
|
|
136
|
+
if marker in ['node','docker-init']:
|
|
137
|
+
chain_names = [p['name'].lower() for p in chain]
|
|
138
|
+
if not any('jupyter' in n for n in chain_names):
|
|
139
|
+
continue
|
|
140
|
+
return ide_name, {
|
|
141
|
+
'detection_method':'process_chain',
|
|
142
|
+
'matched_process': process_info['name'],
|
|
143
|
+
'depth': process_info['depth'],
|
|
144
|
+
'detected_at': datetime.now().isoformat(),
|
|
145
|
+
}
|
|
146
|
+
if chain:
|
|
147
|
+
first_process_name = chain[0]['name'].lower()
|
|
148
|
+
if any(sh in first_process_name for sh in IDEDetector.SHELL_MARKERS):
|
|
149
|
+
return'Terminal', {
|
|
150
|
+
'detection_method':'shell_detected',
|
|
151
|
+
'shell_name': chain[0]['name'],
|
|
152
|
+
'detected_at': datetime.now().isoformat(),
|
|
153
|
+
}
|
|
154
|
+
return'Unknown', {
|
|
155
|
+
'detection_method':'no_match',
|
|
156
|
+
'detected_at': datetime.now().isoformat(),
|
|
157
|
+
}
|
|
158
|
+
@staticmethod
|
|
159
|
+
|
|
160
|
+
def get_ide_info() -> Dict:
|
|
161
|
+
ide_name, detection_info = IDEDetector.detect_ide()
|
|
162
|
+
return {
|
|
163
|
+
'ide_name': ide_name,
|
|
164
|
+
'detection_method': detection_info.get('detection_method'),
|
|
165
|
+
'detected_at': detection_info.get('detected_at'),
|
|
166
|
+
'process_chain_depth': detection_info.get('depth'),
|
|
167
|
+
'matched_process': detection_info.get('matched_process'),
|
|
168
|
+
'environment_variable': detection_info.get('env_var'),
|
|
169
|
+
'frontend': detection_info.get('frontend'),
|
|
170
|
+
'shell_name': detection_info.get('shell_name'),
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
def detect_current_ide() -> Tuple[str, Dict]:
|
|
174
|
+
return IDEDetector.detect_ide()
|
|
175
|
+
|
|
176
|
+
def get_current_ide_info() -> Dict:
|
|
177
|
+
return IDEDetector.get_ide_info()
|
|
178
|
+
|
|
179
|
+
class DeviceRegistry:
|
|
180
|
+
_instance = None
|
|
181
|
+
_lock = None
|
|
182
|
+
|
|
183
|
+
def __new__(cls, project_dir: str | None = None):
|
|
184
|
+
import threading
|
|
185
|
+
if cls._lock is None:
|
|
186
|
+
cls._lock = threading.Lock()
|
|
187
|
+
with cls._lock:
|
|
188
|
+
if cls._instance is None:
|
|
189
|
+
cls._instance = super(DeviceRegistry, cls).__new__(cls)
|
|
190
|
+
cls._instance._initialize(project_dir)
|
|
191
|
+
return cls._instance
|
|
192
|
+
|
|
193
|
+
def _initialize(self, project_dir: str | None = None) -> None:
|
|
194
|
+
if project_dir is None:
|
|
195
|
+
project_dir = Path.home() /".vnstock"
|
|
196
|
+
else:
|
|
197
|
+
project_dir = Path(project_dir)
|
|
198
|
+
self.id_dir = project_dir /'id'
|
|
199
|
+
self.registry_file = self.id_dir /'hw_info.json'
|
|
200
|
+
old_registry_file = self.id_dir /'device_registry.json'
|
|
201
|
+
if old_registry_file.exists() and not self.registry_file.exists():
|
|
202
|
+
try:
|
|
203
|
+
old_registry_file.rename(self.registry_file)
|
|
204
|
+
logger.info("Migrated device_registry.json → hw_info.json")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.warning(f"Failed to migrate registry file: {e}")
|
|
207
|
+
self.id_dir.mkdir(exist_ok=True, parents=True)
|
|
208
|
+
self._registry = None
|
|
209
|
+
if self.registry_file.exists():
|
|
210
|
+
try:
|
|
211
|
+
with open(self.registry_file,'r', encoding='utf-8') as f:
|
|
212
|
+
self._registry = json.load(f)
|
|
213
|
+
except Exception as e:
|
|
214
|
+
logger.warning(f"Failed to load device registry: {e}")
|
|
215
|
+
self._registry = None
|
|
216
|
+
|
|
217
|
+
def is_registered(self, version: str) -> bool:
|
|
218
|
+
if self._registry is None:
|
|
219
|
+
return False
|
|
220
|
+
try:
|
|
221
|
+
installed_version = self._registry.get('version_installed')
|
|
222
|
+
return installed_version == version
|
|
223
|
+
except Exception:
|
|
224
|
+
return False
|
|
225
|
+
|
|
226
|
+
def register(
|
|
227
|
+
self,
|
|
228
|
+
device_info: dict,
|
|
229
|
+
version: str,
|
|
230
|
+
ide_info: dict = None
|
|
231
|
+
) -> dict:
|
|
232
|
+
if ide_info is None:
|
|
233
|
+
try:
|
|
234
|
+
ide_info = get_current_ide_info()
|
|
235
|
+
except Exception:
|
|
236
|
+
ide_info = {}
|
|
237
|
+
registry = {
|
|
238
|
+
"device_id": device_info.get('machine_id'),
|
|
239
|
+
"register_date": datetime.now().isoformat(),
|
|
240
|
+
"version_installed": version,
|
|
241
|
+
"os": device_info.get('os_name'),
|
|
242
|
+
"os_platform": device_info.get('platform'),
|
|
243
|
+
"python": device_info.get('python_version'),
|
|
244
|
+
"arch": (
|
|
245
|
+
device_info.get('platform','').split('-')[-1]
|
|
246
|
+
if device_info.get('platform') else'unknown'
|
|
247
|
+
),
|
|
248
|
+
"cpu_count": device_info.get('cpu_count'),
|
|
249
|
+
"memory_gb": device_info.get('memory_gb'),
|
|
250
|
+
"environment": device_info.get('environment'),
|
|
251
|
+
"hosting_service": device_info.get('hosting_service'),
|
|
252
|
+
"ide": ide_info,
|
|
253
|
+
"reference_data": {
|
|
254
|
+
"commercial_usage": device_info.get(
|
|
255
|
+
'commercial_usage'
|
|
256
|
+
),
|
|
257
|
+
"packages_snapshot": (
|
|
258
|
+
device_info.get('dependencies', {}).get(
|
|
259
|
+
'vnstock_family', []
|
|
260
|
+
)
|
|
261
|
+
),
|
|
262
|
+
"git_info": device_info.get('git_info')
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
try:
|
|
266
|
+
with open(self.registry_file,'w', encoding='utf-8') as f:
|
|
267
|
+
json.dump(registry, f, indent=2)
|
|
268
|
+
self._registry = registry
|
|
269
|
+
logger.info(
|
|
270
|
+
f"Device registered: {device_info.get('machine_id')} "
|
|
271
|
+
f"(version {version})"
|
|
272
|
+
)
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error(f"Failed to register device: {e}")
|
|
275
|
+
raise
|
|
276
|
+
return registry
|
|
277
|
+
|
|
278
|
+
def get_device_id(self) -> str | None:
|
|
279
|
+
if self._registry is None:
|
|
280
|
+
return None
|
|
281
|
+
try:
|
|
282
|
+
return self._registry.get('device_id')
|
|
283
|
+
except Exception:
|
|
284
|
+
return None
|
|
285
|
+
|
|
286
|
+
def get_registry(self) -> dict | None:
|
|
287
|
+
return self._registry
|
|
288
|
+
|
|
289
|
+
def get_register_date(self) -> str | None:
|
|
290
|
+
if self._registry is None:
|
|
291
|
+
return None
|
|
292
|
+
try:
|
|
293
|
+
return self._registry.get('register_date')
|
|
294
|
+
except Exception:
|
|
295
|
+
return None
|
|
296
|
+
|
|
297
|
+
def needs_reregistration(self, current_version: str) -> bool:
|
|
298
|
+
if self._registry is None:
|
|
299
|
+
return True
|
|
300
|
+
try:
|
|
301
|
+
installed_version = self._registry.get('version_installed')
|
|
302
|
+
return installed_version != current_version
|
|
303
|
+
except Exception:
|
|
304
|
+
return True
|
|
305
|
+
|
|
306
|
+
def update_version(self, new_version: str) -> None:
|
|
307
|
+
if self._registry is not None:
|
|
308
|
+
self._registry['version_installed'] = new_version
|
|
309
|
+
self._registry['last_version_update'] = datetime.now().isoformat()
|
|
310
|
+
try:
|
|
311
|
+
with open(self.registry_file,'w', encoding='utf-8') as f:
|
|
312
|
+
json.dump(self._registry, f, indent=2)
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logger.warning(f"Failed to update version in registry: {e}")
|
|
315
|
+
device_registry = DeviceRegistry()
|