vnai 0.1.3__py3-none-any.whl → 2.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +304 -292
- vnai/beam/__init__.py +26 -0
- vnai/beam/auth.py +312 -0
- vnai/beam/fundamental.py +168 -0
- vnai/beam/metrics.py +167 -0
- vnai/beam/patching.py +223 -0
- vnai/beam/pulse.py +79 -0
- vnai/beam/quota.py +403 -0
- vnai/beam/sync.py +87 -0
- vnai/flow/__init__.py +2 -0
- vnai/flow/queue.py +100 -0
- vnai/flow/relay.py +347 -0
- vnai/scope/__init__.py +11 -0
- vnai/scope/device.py +315 -0
- vnai/scope/lc_integration.py +351 -0
- vnai/scope/license.py +197 -0
- vnai/scope/profile.py +599 -0
- vnai/scope/promo.py +389 -0
- vnai/scope/state.py +155 -0
- vnai-2.3.7.dist-info/METADATA +21 -0
- vnai-2.3.7.dist-info/RECORD +23 -0
- {vnai-0.1.3.dist-info → vnai-2.3.7.dist-info}/WHEEL +1 -1
- vnai-0.1.3.dist-info/METADATA +0 -20
- vnai-0.1.3.dist-info/RECORD +0 -5
- {vnai-0.1.3.dist-info → vnai-2.3.7.dist-info}/top_level.txt +0 -0
vnai/flow/relay.py
ADDED
|
@@ -0,0 +1,347 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import threading
|
|
3
|
+
import json
|
|
4
|
+
import random
|
|
5
|
+
import requests
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Dict, List, Any, Optional
|
|
9
|
+
|
|
10
|
+
class Conduit:
|
|
11
|
+
_instance = None
|
|
12
|
+
_lock = threading.Lock()
|
|
13
|
+
|
|
14
|
+
def __new__(cls, buffer_size=50, sync_interval=300):
|
|
15
|
+
with cls._lock:
|
|
16
|
+
if cls._instance is None:
|
|
17
|
+
cls._instance = super(Conduit, cls).__new__(cls)
|
|
18
|
+
cls._instance._initialize(buffer_size, sync_interval)
|
|
19
|
+
return cls._instance
|
|
20
|
+
|
|
21
|
+
def _initialize(self, buffer_size, sync_interval):
|
|
22
|
+
self.buffer_size = buffer_size
|
|
23
|
+
self.sync_interval = sync_interval
|
|
24
|
+
self.buffer = {
|
|
25
|
+
"function_calls": [],
|
|
26
|
+
"api_requests": [],
|
|
27
|
+
"rate_limits": []
|
|
28
|
+
}
|
|
29
|
+
self.lock = threading.Lock()
|
|
30
|
+
self.last_sync_time = time.time()
|
|
31
|
+
self.sync_count = 0
|
|
32
|
+
self.failed_queue = []
|
|
33
|
+
self.project_dir = self._get_project_dir()
|
|
34
|
+
self.project_dir.mkdir(parents=True, exist_ok=True)
|
|
35
|
+
self.data_dir = self.project_dir /'data'
|
|
36
|
+
self.data_dir.mkdir(parents=True, exist_ok=True)
|
|
37
|
+
self.config_path = self.data_dir /"relay_config.json"
|
|
38
|
+
try:
|
|
39
|
+
from vnai.scope.profile import inspector
|
|
40
|
+
self.machine_id = inspector.fingerprint()
|
|
41
|
+
except Exception:
|
|
42
|
+
self.machine_id = self._generate_fallback_id()
|
|
43
|
+
self._load_config()
|
|
44
|
+
self._start_periodic_sync()
|
|
45
|
+
|
|
46
|
+
def _get_project_dir(self) -> Path:
|
|
47
|
+
try:
|
|
48
|
+
from vnstock.core.config.ggcolab import get_vnstock_directory
|
|
49
|
+
return get_vnstock_directory()
|
|
50
|
+
except ImportError:
|
|
51
|
+
return Path.home() /".vnstock"
|
|
52
|
+
|
|
53
|
+
def _generate_fallback_id(self) -> str:
|
|
54
|
+
try:
|
|
55
|
+
import platform
|
|
56
|
+
import hashlib
|
|
57
|
+
import uuid
|
|
58
|
+
system_info = platform.node() + platform.platform() + platform.processor()
|
|
59
|
+
return hashlib.md5(system_info.encode()).hexdigest()
|
|
60
|
+
except:
|
|
61
|
+
import uuid
|
|
62
|
+
return str(uuid.uuid4())
|
|
63
|
+
|
|
64
|
+
def _load_config(self):
|
|
65
|
+
if self.config_path.exists():
|
|
66
|
+
try:
|
|
67
|
+
with open(self.config_path,'r') as f:
|
|
68
|
+
config = json.load(f)
|
|
69
|
+
if'buffer_size' in config:
|
|
70
|
+
self.buffer_size = config['buffer_size']
|
|
71
|
+
if'sync_interval' in config:
|
|
72
|
+
self.sync_interval = config['sync_interval']
|
|
73
|
+
if'last_sync_time' in config:
|
|
74
|
+
self.last_sync_time = config['last_sync_time']
|
|
75
|
+
if'sync_count' in config:
|
|
76
|
+
self.sync_count = config['sync_count']
|
|
77
|
+
except:
|
|
78
|
+
pass
|
|
79
|
+
|
|
80
|
+
def _save_config(self):
|
|
81
|
+
config = {
|
|
82
|
+
'buffer_size': self.buffer_size,
|
|
83
|
+
'sync_interval': self.sync_interval,
|
|
84
|
+
'last_sync_time': self.last_sync_time,
|
|
85
|
+
'sync_count': self.sync_count
|
|
86
|
+
}
|
|
87
|
+
try:
|
|
88
|
+
with open(self.config_path,'w') as f:
|
|
89
|
+
json.dump(config, f)
|
|
90
|
+
except:
|
|
91
|
+
pass
|
|
92
|
+
|
|
93
|
+
def _start_periodic_sync(self):
|
|
94
|
+
def periodic_sync():
|
|
95
|
+
while True:
|
|
96
|
+
time.sleep(self.sync_interval)
|
|
97
|
+
self.dispatch("periodic")
|
|
98
|
+
sync_thread = threading.Thread(target=periodic_sync, daemon=True)
|
|
99
|
+
sync_thread.start()
|
|
100
|
+
|
|
101
|
+
def add_function_call(self, record):
|
|
102
|
+
if not isinstance(record, dict):
|
|
103
|
+
record = {"value": str(record)}
|
|
104
|
+
with self.lock:
|
|
105
|
+
self.buffer["function_calls"].append(record)
|
|
106
|
+
self._check_triggers("function_calls")
|
|
107
|
+
|
|
108
|
+
def add_api_request(self, record):
|
|
109
|
+
if not isinstance(record, dict):
|
|
110
|
+
record = {"value": str(record)}
|
|
111
|
+
with self.lock:
|
|
112
|
+
self.buffer["api_requests"].append(record)
|
|
113
|
+
self._check_triggers("api_requests")
|
|
114
|
+
|
|
115
|
+
def add_rate_limit(self, record):
|
|
116
|
+
if not isinstance(record, dict):
|
|
117
|
+
record = {"value": str(record)}
|
|
118
|
+
with self.lock:
|
|
119
|
+
self.buffer["rate_limits"].append(record)
|
|
120
|
+
self._check_triggers("rate_limits")
|
|
121
|
+
|
|
122
|
+
def _check_triggers(self, record_type: str):
|
|
123
|
+
current_time = time.time()
|
|
124
|
+
should_trigger = False
|
|
125
|
+
trigger_reason = None
|
|
126
|
+
total_records = sum(len(buffer) for buffer in self.buffer.values())
|
|
127
|
+
if total_records >= self.buffer_size:
|
|
128
|
+
should_trigger = True
|
|
129
|
+
trigger_reason ="buffer_full"
|
|
130
|
+
elif record_type =="rate_limits" and self.buffer["rate_limits"] and any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
|
|
131
|
+
should_trigger = True
|
|
132
|
+
trigger_reason ="rate_limit_exceeded"
|
|
133
|
+
elif record_type =="function_calls" and self.buffer["function_calls"] and any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
|
|
134
|
+
should_trigger = True
|
|
135
|
+
trigger_reason ="function_error"
|
|
136
|
+
else:
|
|
137
|
+
time_factor = min(1.0, (current_time - self.last_sync_time) / (self.sync_interval / 2))
|
|
138
|
+
if random.random() < 0.05 * time_factor:
|
|
139
|
+
should_trigger = True
|
|
140
|
+
trigger_reason ="random_time_weighted"
|
|
141
|
+
if should_trigger:
|
|
142
|
+
threading.Thread(
|
|
143
|
+
target=self.dispatch,
|
|
144
|
+
args=(trigger_reason,),
|
|
145
|
+
daemon=True
|
|
146
|
+
).start()
|
|
147
|
+
|
|
148
|
+
def queue(self, package, priority=None):
|
|
149
|
+
try:
|
|
150
|
+
from vnai.scope.promo import ContentManager
|
|
151
|
+
is_paid = ContentManager().is_paid_user
|
|
152
|
+
segment_val ="paid" if is_paid else"free"
|
|
153
|
+
except Exception:
|
|
154
|
+
segment_val ="free"
|
|
155
|
+
|
|
156
|
+
def ensure_segment(d):
|
|
157
|
+
if not isinstance(d, dict):
|
|
158
|
+
return d
|
|
159
|
+
d = dict(d)
|
|
160
|
+
if"segment" not in d:
|
|
161
|
+
d["segment"] = segment_val
|
|
162
|
+
return d
|
|
163
|
+
if isinstance(package, dict) and"segment" not in package:
|
|
164
|
+
import base64
|
|
165
|
+
api_key = base64.b64decode("MXlJOEtnYXJudFFyMHB0cmlzZUhoYjRrZG9ta2VueU5JOFZQaXlrNWFvVQ==").decode()
|
|
166
|
+
package["segment"] = segment_val
|
|
167
|
+
if isinstance(package, dict) and isinstance(package.get("data"), dict):
|
|
168
|
+
if"segment" not in package["data"]:
|
|
169
|
+
package["data"]["segment"] = segment_val
|
|
170
|
+
"""Queue data package"""
|
|
171
|
+
if not package:
|
|
172
|
+
return False
|
|
173
|
+
if not isinstance(package, dict):
|
|
174
|
+
self.add_function_call(ensure_segment({"message": str(package)}))
|
|
175
|
+
return True
|
|
176
|
+
if"timestamp" not in package:
|
|
177
|
+
package["timestamp"] = datetime.now().isoformat()
|
|
178
|
+
if"type" in package:
|
|
179
|
+
package_type = package["type"]
|
|
180
|
+
data = package.get("data", {})
|
|
181
|
+
if isinstance(data, dict) and"system" in data:
|
|
182
|
+
machine_id = data["system"].get("machine_id")
|
|
183
|
+
data.pop("system")
|
|
184
|
+
if machine_id:
|
|
185
|
+
data["machine_id"] = machine_id
|
|
186
|
+
if package_type =="function":
|
|
187
|
+
self.add_function_call(ensure_segment(data))
|
|
188
|
+
elif package_type =="api_request":
|
|
189
|
+
self.add_api_request(ensure_segment(data))
|
|
190
|
+
elif package_type =="rate_limit":
|
|
191
|
+
self.add_rate_limit(ensure_segment(data))
|
|
192
|
+
elif package_type =="system_info":
|
|
193
|
+
self.add_function_call({
|
|
194
|
+
"type":"system_info",
|
|
195
|
+
"commercial": data.get("commercial"),
|
|
196
|
+
"packages": data.get("packages"),
|
|
197
|
+
"timestamp": package.get("timestamp")
|
|
198
|
+
})
|
|
199
|
+
elif package_type =="metrics":
|
|
200
|
+
metrics_data = data
|
|
201
|
+
for metric_type, metrics_list in metrics_data.items():
|
|
202
|
+
if isinstance(metrics_list, list):
|
|
203
|
+
if metric_type =="function":
|
|
204
|
+
for item in metrics_list:
|
|
205
|
+
self.add_function_call(ensure_segment(item))
|
|
206
|
+
elif metric_type =="rate_limit":
|
|
207
|
+
for item in metrics_list:
|
|
208
|
+
self.add_rate_limit(ensure_segment(item))
|
|
209
|
+
elif metric_type =="request":
|
|
210
|
+
for item in metrics_list:
|
|
211
|
+
self.add_api_request(ensure_segment(item))
|
|
212
|
+
else:
|
|
213
|
+
if isinstance(data, dict) and data is not package:
|
|
214
|
+
self.add_function_call(ensure_segment(data))
|
|
215
|
+
else:
|
|
216
|
+
self.add_function_call(ensure_segment(package))
|
|
217
|
+
else:
|
|
218
|
+
self.add_function_call(ensure_segment(package))
|
|
219
|
+
if priority =="high":
|
|
220
|
+
self.dispatch("high_priority")
|
|
221
|
+
return True
|
|
222
|
+
|
|
223
|
+
def _send_data(self, payload):
|
|
224
|
+
import base64
|
|
225
|
+
api_key = base64.b64decode("MXlJOEtnYXJudFFyMHB0cmlzZUhoYjRrZG9ta2VueU5JOFZQaXlrNWFvVQ==").decode()
|
|
226
|
+
url ="https://hq.vnstocks.com/analytics"
|
|
227
|
+
headers = {
|
|
228
|
+
"x-api-key": api_key,
|
|
229
|
+
"Content-Type":"application/json"
|
|
230
|
+
}
|
|
231
|
+
try:
|
|
232
|
+
response = requests.post(url, json=payload, headers=headers, timeout=5)
|
|
233
|
+
return response.status_code == 200
|
|
234
|
+
except Exception:
|
|
235
|
+
return False
|
|
236
|
+
|
|
237
|
+
def dispatch(self, reason="manual"):
|
|
238
|
+
with self.lock:
|
|
239
|
+
if all(len(records) == 0 for records in self.buffer.values()):
|
|
240
|
+
return False
|
|
241
|
+
data_to_send = {
|
|
242
|
+
"function_calls": self.buffer["function_calls"].copy(),
|
|
243
|
+
"api_requests": self.buffer["api_requests"].copy(),
|
|
244
|
+
"rate_limits": self.buffer["rate_limits"].copy()
|
|
245
|
+
}
|
|
246
|
+
self.buffer = {
|
|
247
|
+
"function_calls": [],
|
|
248
|
+
"api_requests": [],
|
|
249
|
+
"rate_limits": []
|
|
250
|
+
}
|
|
251
|
+
self.last_sync_time = time.time()
|
|
252
|
+
self.sync_count += 1
|
|
253
|
+
self._save_config()
|
|
254
|
+
machine_id = self.machine_id
|
|
255
|
+
try:
|
|
256
|
+
from vnai.scope.device import device_registry
|
|
257
|
+
cached_id = device_registry.get_device_id()
|
|
258
|
+
if cached_id:
|
|
259
|
+
machine_id = cached_id
|
|
260
|
+
except Exception:
|
|
261
|
+
pass
|
|
262
|
+
payload = {
|
|
263
|
+
"analytics_data": data_to_send,
|
|
264
|
+
"metadata": {
|
|
265
|
+
"timestamp": datetime.now().isoformat(),
|
|
266
|
+
"machine_id": machine_id,
|
|
267
|
+
"sync_count": self.sync_count,
|
|
268
|
+
"trigger_reason": reason,
|
|
269
|
+
"data_counts": {
|
|
270
|
+
"function_calls": len(data_to_send["function_calls"]),
|
|
271
|
+
"api_requests": len(data_to_send["api_requests"]),
|
|
272
|
+
"rate_limits": len(data_to_send["rate_limits"])
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
success = self._send_data(payload)
|
|
277
|
+
if not success:
|
|
278
|
+
with self.lock:
|
|
279
|
+
self.failed_queue.append(payload)
|
|
280
|
+
if len(self.failed_queue) > 10:
|
|
281
|
+
self.failed_queue = self.failed_queue[-10:]
|
|
282
|
+
with self.lock:
|
|
283
|
+
to_retry = self.failed_queue.copy()
|
|
284
|
+
self.failed_queue = []
|
|
285
|
+
success_count = 0
|
|
286
|
+
for payload in to_retry:
|
|
287
|
+
if self._send_data(payload):
|
|
288
|
+
success_count += 1
|
|
289
|
+
else:
|
|
290
|
+
with self.lock:
|
|
291
|
+
self.failed_queue.append(payload)
|
|
292
|
+
return success_count
|
|
293
|
+
conduit = Conduit()
|
|
294
|
+
|
|
295
|
+
def track_function_call(function_name, source, execution_time, success=True, error=None, args=None):
|
|
296
|
+
record = {
|
|
297
|
+
"function": function_name,
|
|
298
|
+
"source": source,
|
|
299
|
+
"execution_time": execution_time,
|
|
300
|
+
"timestamp": datetime.now().isoformat(),
|
|
301
|
+
"success": success
|
|
302
|
+
}
|
|
303
|
+
if error:
|
|
304
|
+
record["error"] = error
|
|
305
|
+
if args:
|
|
306
|
+
sanitized_args = {}
|
|
307
|
+
if isinstance(args, dict):
|
|
308
|
+
for key, value in args.items():
|
|
309
|
+
if isinstance(value, (str, int, float, bool)):
|
|
310
|
+
sanitized_args[key] = value
|
|
311
|
+
else:
|
|
312
|
+
sanitized_args[key] = str(type(value))
|
|
313
|
+
else:
|
|
314
|
+
sanitized_args = {"value": str(args)}
|
|
315
|
+
record["args"] = sanitized_args
|
|
316
|
+
conduit.add_function_call(record)
|
|
317
|
+
|
|
318
|
+
def track_rate_limit(source, limit_type, limit_value, current_usage, is_exceeded):
|
|
319
|
+
record = {
|
|
320
|
+
"source": source,
|
|
321
|
+
"limit_type": limit_type,
|
|
322
|
+
"limit_value": limit_value,
|
|
323
|
+
"current_usage": current_usage,
|
|
324
|
+
"is_exceeded": is_exceeded,
|
|
325
|
+
"timestamp": datetime.now().isoformat(),
|
|
326
|
+
"usage_percentage": (current_usage / limit_value) * 100 if limit_value > 0 else 0
|
|
327
|
+
}
|
|
328
|
+
conduit.add_rate_limit(record)
|
|
329
|
+
|
|
330
|
+
def track_api_request(endpoint, source, method, status_code, execution_time, request_size=0, response_size=0):
|
|
331
|
+
record = {
|
|
332
|
+
"endpoint": endpoint,
|
|
333
|
+
"source": source,
|
|
334
|
+
"method": method,
|
|
335
|
+
"status_code": status_code,
|
|
336
|
+
"execution_time": execution_time,
|
|
337
|
+
"timestamp": datetime.now().isoformat(),
|
|
338
|
+
"request_size": request_size,
|
|
339
|
+
"response_size": response_size
|
|
340
|
+
}
|
|
341
|
+
conduit.add_api_request(record)
|
|
342
|
+
|
|
343
|
+
def sync_now():
|
|
344
|
+
return conduit.dispatch("manual")
|
|
345
|
+
|
|
346
|
+
def retry_failed():
|
|
347
|
+
return conduit.retry_failed()
|
vnai/scope/__init__.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from vnai.scope.profile import inspector
|
|
2
|
+
from vnai.scope.state import tracker, record
|
|
3
|
+
from vnai.scope.promo import manager as content_manager
|
|
4
|
+
from vnai.scope.promo import present as present_content
|
|
5
|
+
from vnai.scope.lc_integration import (
|
|
6
|
+
api_key_checker,
|
|
7
|
+
check_license_status,
|
|
8
|
+
update_license_from_vnii,
|
|
9
|
+
check_license_via_api_key,
|
|
10
|
+
is_paid_user_via_api_key
|
|
11
|
+
)
|
vnai/scope/device.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Device registration and management with IDE detection.
|
|
3
|
+
Handles one-time device registration on first install or version update.
|
|
4
|
+
Subsequent operations use cached device_id to avoid expensive system scans.
|
|
5
|
+
Includes IDE environment detection for comprehensive device profiling.
|
|
6
|
+
"""
|
|
7
|
+
import json
|
|
8
|
+
import os
|
|
9
|
+
import psutil
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from datetime import datetime
|
|
12
|
+
import logging
|
|
13
|
+
from typing import Optional, Tuple, Dict, List, Any
|
|
14
|
+
logger = logging.getLogger(__name__)
|
|
15
|
+
|
|
16
|
+
class IDEDetector:
|
|
17
|
+
IDE_MARKERS = {
|
|
18
|
+
'pycharm':'PyCharm',
|
|
19
|
+
'idea':'IntelliJ IDEA',
|
|
20
|
+
'webstorm':'WebStorm',
|
|
21
|
+
'code':'VS Code',
|
|
22
|
+
'code helper':'VS Code',
|
|
23
|
+
'code-server':'VS Code Server',
|
|
24
|
+
'code-oss':'VS Code OSS',
|
|
25
|
+
'vscodium':'VSCodium',
|
|
26
|
+
'cursor':'Cursor',
|
|
27
|
+
'cursor helper':'Cursor',
|
|
28
|
+
'windsurf':'Windsurf',
|
|
29
|
+
'windsurf helper':'Windsurf',
|
|
30
|
+
'jupyter-lab':'Jupyter Lab',
|
|
31
|
+
'jupyter-notebook':'Classic Jupyter',
|
|
32
|
+
'jupyter-server':'Jupyter Server',
|
|
33
|
+
'ipython':'IPython',
|
|
34
|
+
'docker-init':'Docker Container',
|
|
35
|
+
'node':'Node.js',
|
|
36
|
+
}
|
|
37
|
+
ENV_MARKERS = {
|
|
38
|
+
'COLAB_GPU':'Google Colab',
|
|
39
|
+
'COLAB_RELEASE_TAG':'Google Colab',
|
|
40
|
+
'KAGGLE_KERNEL_RUN_TYPE':'Kaggle Notebook',
|
|
41
|
+
'JUPYTERHUB_SERVICE_PREFIX':'JupyterHub',
|
|
42
|
+
}
|
|
43
|
+
SHELL_MARKERS = {
|
|
44
|
+
'zsh','bash','fish','sh','ksh','tcsh',
|
|
45
|
+
'cmd','powershell','pwsh','commandline'
|
|
46
|
+
}
|
|
47
|
+
@staticmethod
|
|
48
|
+
|
|
49
|
+
def get_process_chain(max_depth: int = 20) -> List[Dict]:
|
|
50
|
+
try:
|
|
51
|
+
proc = psutil.Process(os.getpid())
|
|
52
|
+
chain = []
|
|
53
|
+
chain.append({
|
|
54
|
+
'pid': proc.pid,
|
|
55
|
+
'name': proc.name(),
|
|
56
|
+
'exe': proc.exe(),
|
|
57
|
+
'depth': 0
|
|
58
|
+
})
|
|
59
|
+
for depth, ancestor in enumerate(proc.parents(), start=1):
|
|
60
|
+
if depth > max_depth:
|
|
61
|
+
break
|
|
62
|
+
try:
|
|
63
|
+
chain.append({
|
|
64
|
+
'pid': ancestor.pid,
|
|
65
|
+
'name': ancestor.name(),
|
|
66
|
+
'exe': ancestor.exe(),
|
|
67
|
+
'depth': depth
|
|
68
|
+
})
|
|
69
|
+
except (psutil.NoSuchProcess, psutil.AccessDenied):
|
|
70
|
+
chain.append({
|
|
71
|
+
'pid': ancestor.pid,
|
|
72
|
+
'name':'<access_denied or terminated>',
|
|
73
|
+
'exe': None,
|
|
74
|
+
'depth': depth
|
|
75
|
+
})
|
|
76
|
+
return chain
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.debug(f"Error getting process chain: {e}")
|
|
79
|
+
return []
|
|
80
|
+
@staticmethod
|
|
81
|
+
|
|
82
|
+
def _check_jupyter_environment() -> Optional[str]:
|
|
83
|
+
try:
|
|
84
|
+
from IPython import get_ipython
|
|
85
|
+
ipython = get_ipython()
|
|
86
|
+
if ipython is None:
|
|
87
|
+
return None
|
|
88
|
+
kernel_type = type(ipython).__name__
|
|
89
|
+
if'ZMQInteractiveShell' in kernel_type:
|
|
90
|
+
return'Jupyter Kernel'
|
|
91
|
+
elif'TerminalInteractiveShell' in kernel_type:
|
|
92
|
+
return'IPython Terminal'
|
|
93
|
+
else:
|
|
94
|
+
return'IPython'
|
|
95
|
+
except (ImportError, AttributeError):
|
|
96
|
+
return None
|
|
97
|
+
@staticmethod
|
|
98
|
+
|
|
99
|
+
def detect_ide() -> Tuple[str, Dict]:
|
|
100
|
+
for env_var, ide_name in IDEDetector.ENV_MARKERS.items():
|
|
101
|
+
if env_var in os.environ:
|
|
102
|
+
return ide_name, {
|
|
103
|
+
'detection_method':'environment_variable',
|
|
104
|
+
'env_var': env_var,
|
|
105
|
+
'detected_at': datetime.now().isoformat(),
|
|
106
|
+
}
|
|
107
|
+
jupyter_env = IDEDetector._check_jupyter_environment()
|
|
108
|
+
if jupyter_env and jupyter_env =='Jupyter Kernel':
|
|
109
|
+
chain = IDEDetector.get_process_chain()
|
|
110
|
+
for process_info in chain:
|
|
111
|
+
name = (process_info['name'] or"").lower()
|
|
112
|
+
for marker, ide_name in IDEDetector.IDE_MARKERS.items():
|
|
113
|
+
if marker in name and marker not in ['docker-init','node']:
|
|
114
|
+
ide_display =f"{ide_name} (Jupyter)"
|
|
115
|
+
return ide_display, {
|
|
116
|
+
'detection_method':'jupyter_with_ide_frontend',
|
|
117
|
+
'frontend': ide_name,
|
|
118
|
+
'detected_at': datetime.now().isoformat(),
|
|
119
|
+
}
|
|
120
|
+
kernel_name ='Jupyter Kernel'
|
|
121
|
+
return kernel_name, {
|
|
122
|
+
'detection_method':'ipython_kernel',
|
|
123
|
+
'detected_at': datetime.now().isoformat(),
|
|
124
|
+
}
|
|
125
|
+
chain = IDEDetector.get_process_chain()
|
|
126
|
+
if not chain:
|
|
127
|
+
return'Unknown', {
|
|
128
|
+
'detection_method':'failed_to_get_chain',
|
|
129
|
+
'detected_at': datetime.now().isoformat(),
|
|
130
|
+
}
|
|
131
|
+
for process_info in chain:
|
|
132
|
+
name = (process_info['name'] or"").lower()
|
|
133
|
+
exe = (process_info['exe'] or"").lower()
|
|
134
|
+
for marker, ide_name in IDEDetector.IDE_MARKERS.items():
|
|
135
|
+
if marker in name or marker in exe:
|
|
136
|
+
if marker in ['node','docker-init']:
|
|
137
|
+
chain_names = [p['name'].lower() for p in chain]
|
|
138
|
+
if not any('jupyter' in n for n in chain_names):
|
|
139
|
+
continue
|
|
140
|
+
return ide_name, {
|
|
141
|
+
'detection_method':'process_chain',
|
|
142
|
+
'matched_process': process_info['name'],
|
|
143
|
+
'depth': process_info['depth'],
|
|
144
|
+
'detected_at': datetime.now().isoformat(),
|
|
145
|
+
}
|
|
146
|
+
if chain:
|
|
147
|
+
first_process_name = chain[0]['name'].lower()
|
|
148
|
+
if any(sh in first_process_name for sh in IDEDetector.SHELL_MARKERS):
|
|
149
|
+
return'Terminal', {
|
|
150
|
+
'detection_method':'shell_detected',
|
|
151
|
+
'shell_name': chain[0]['name'],
|
|
152
|
+
'detected_at': datetime.now().isoformat(),
|
|
153
|
+
}
|
|
154
|
+
return'Unknown', {
|
|
155
|
+
'detection_method':'no_match',
|
|
156
|
+
'detected_at': datetime.now().isoformat(),
|
|
157
|
+
}
|
|
158
|
+
@staticmethod
|
|
159
|
+
|
|
160
|
+
def get_ide_info() -> Dict:
|
|
161
|
+
ide_name, detection_info = IDEDetector.detect_ide()
|
|
162
|
+
return {
|
|
163
|
+
'ide_name': ide_name,
|
|
164
|
+
'detection_method': detection_info.get('detection_method'),
|
|
165
|
+
'detected_at': detection_info.get('detected_at'),
|
|
166
|
+
'process_chain_depth': detection_info.get('depth'),
|
|
167
|
+
'matched_process': detection_info.get('matched_process'),
|
|
168
|
+
'environment_variable': detection_info.get('env_var'),
|
|
169
|
+
'frontend': detection_info.get('frontend'),
|
|
170
|
+
'shell_name': detection_info.get('shell_name'),
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
def detect_current_ide() -> Tuple[str, Dict]:
|
|
174
|
+
return IDEDetector.detect_ide()
|
|
175
|
+
|
|
176
|
+
def get_current_ide_info() -> Dict:
|
|
177
|
+
return IDEDetector.get_ide_info()
|
|
178
|
+
|
|
179
|
+
class DeviceRegistry:
|
|
180
|
+
_instance = None
|
|
181
|
+
_lock = None
|
|
182
|
+
|
|
183
|
+
def __new__(cls, project_dir: str | None = None):
|
|
184
|
+
import threading
|
|
185
|
+
if cls._lock is None:
|
|
186
|
+
cls._lock = threading.Lock()
|
|
187
|
+
with cls._lock:
|
|
188
|
+
if cls._instance is None:
|
|
189
|
+
cls._instance = super(DeviceRegistry, cls).__new__(cls)
|
|
190
|
+
cls._instance._initialize(project_dir)
|
|
191
|
+
return cls._instance
|
|
192
|
+
|
|
193
|
+
def _initialize(self, project_dir: str | None = None) -> None:
|
|
194
|
+
if project_dir is None:
|
|
195
|
+
project_dir = Path.home() /".vnstock"
|
|
196
|
+
else:
|
|
197
|
+
project_dir = Path(project_dir)
|
|
198
|
+
self.id_dir = project_dir /'id'
|
|
199
|
+
self.registry_file = self.id_dir /'hw_info.json'
|
|
200
|
+
old_registry_file = self.id_dir /'device_registry.json'
|
|
201
|
+
if old_registry_file.exists() and not self.registry_file.exists():
|
|
202
|
+
try:
|
|
203
|
+
old_registry_file.rename(self.registry_file)
|
|
204
|
+
logger.info("Migrated device_registry.json → hw_info.json")
|
|
205
|
+
except Exception as e:
|
|
206
|
+
logger.warning(f"Failed to migrate registry file: {e}")
|
|
207
|
+
self.id_dir.mkdir(exist_ok=True, parents=True)
|
|
208
|
+
self._registry = None
|
|
209
|
+
if self.registry_file.exists():
|
|
210
|
+
try:
|
|
211
|
+
with open(self.registry_file,'r', encoding='utf-8') as f:
|
|
212
|
+
self._registry = json.load(f)
|
|
213
|
+
except Exception as e:
|
|
214
|
+
logger.warning(f"Failed to load device registry: {e}")
|
|
215
|
+
self._registry = None
|
|
216
|
+
|
|
217
|
+
def is_registered(self, version: str) -> bool:
|
|
218
|
+
if self._registry is None:
|
|
219
|
+
return False
|
|
220
|
+
try:
|
|
221
|
+
installed_version = self._registry.get('version_installed')
|
|
222
|
+
return installed_version == version
|
|
223
|
+
except Exception:
|
|
224
|
+
return False
|
|
225
|
+
|
|
226
|
+
def register(
|
|
227
|
+
self,
|
|
228
|
+
device_info: dict,
|
|
229
|
+
version: str,
|
|
230
|
+
ide_info: dict = None
|
|
231
|
+
) -> dict:
|
|
232
|
+
if ide_info is None:
|
|
233
|
+
try:
|
|
234
|
+
ide_info = get_current_ide_info()
|
|
235
|
+
except Exception:
|
|
236
|
+
ide_info = {}
|
|
237
|
+
registry = {
|
|
238
|
+
"device_id": device_info.get('machine_id'),
|
|
239
|
+
"register_date": datetime.now().isoformat(),
|
|
240
|
+
"version_installed": version,
|
|
241
|
+
"os": device_info.get('os_name'),
|
|
242
|
+
"os_platform": device_info.get('platform'),
|
|
243
|
+
"python": device_info.get('python_version'),
|
|
244
|
+
"arch": (
|
|
245
|
+
device_info.get('platform','').split('-')[-1]
|
|
246
|
+
if device_info.get('platform') else'unknown'
|
|
247
|
+
),
|
|
248
|
+
"cpu_count": device_info.get('cpu_count'),
|
|
249
|
+
"memory_gb": device_info.get('memory_gb'),
|
|
250
|
+
"environment": device_info.get('environment'),
|
|
251
|
+
"hosting_service": device_info.get('hosting_service'),
|
|
252
|
+
"ide": ide_info,
|
|
253
|
+
"reference_data": {
|
|
254
|
+
"commercial_usage": device_info.get(
|
|
255
|
+
'commercial_usage'
|
|
256
|
+
),
|
|
257
|
+
"packages_snapshot": (
|
|
258
|
+
device_info.get('dependencies', {}).get(
|
|
259
|
+
'vnstock_family', []
|
|
260
|
+
)
|
|
261
|
+
),
|
|
262
|
+
"git_info": device_info.get('git_info')
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
try:
|
|
266
|
+
with open(self.registry_file,'w', encoding='utf-8') as f:
|
|
267
|
+
json.dump(registry, f, indent=2)
|
|
268
|
+
self._registry = registry
|
|
269
|
+
logger.info(
|
|
270
|
+
f"Device registered: {device_info.get('machine_id')} "
|
|
271
|
+
f"(version {version})"
|
|
272
|
+
)
|
|
273
|
+
except Exception as e:
|
|
274
|
+
logger.error(f"Failed to register device: {e}")
|
|
275
|
+
raise
|
|
276
|
+
return registry
|
|
277
|
+
|
|
278
|
+
def get_device_id(self) -> str | None:
|
|
279
|
+
if self._registry is None:
|
|
280
|
+
return None
|
|
281
|
+
try:
|
|
282
|
+
return self._registry.get('device_id')
|
|
283
|
+
except Exception:
|
|
284
|
+
return None
|
|
285
|
+
|
|
286
|
+
def get_registry(self) -> dict | None:
|
|
287
|
+
return self._registry
|
|
288
|
+
|
|
289
|
+
def get_register_date(self) -> str | None:
|
|
290
|
+
if self._registry is None:
|
|
291
|
+
return None
|
|
292
|
+
try:
|
|
293
|
+
return self._registry.get('register_date')
|
|
294
|
+
except Exception:
|
|
295
|
+
return None
|
|
296
|
+
|
|
297
|
+
def needs_reregistration(self, current_version: str) -> bool:
|
|
298
|
+
if self._registry is None:
|
|
299
|
+
return True
|
|
300
|
+
try:
|
|
301
|
+
installed_version = self._registry.get('version_installed')
|
|
302
|
+
return installed_version != current_version
|
|
303
|
+
except Exception:
|
|
304
|
+
return True
|
|
305
|
+
|
|
306
|
+
def update_version(self, new_version: str) -> None:
|
|
307
|
+
if self._registry is not None:
|
|
308
|
+
self._registry['version_installed'] = new_version
|
|
309
|
+
self._registry['last_version_update'] = datetime.now().isoformat()
|
|
310
|
+
try:
|
|
311
|
+
with open(self.registry_file,'w', encoding='utf-8') as f:
|
|
312
|
+
json.dump(self._registry, f, indent=2)
|
|
313
|
+
except Exception as e:
|
|
314
|
+
logger.warning(f"Failed to update version in registry: {e}")
|
|
315
|
+
device_registry = DeviceRegistry()
|