vnai 2.0.7__py3-none-any.whl → 2.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +69 -265
- vnai/beam/__init__.py +2 -5
- vnai/beam/metrics.py +62 -187
- vnai/beam/pulse.py +29 -107
- vnai/beam/quota.py +100 -482
- vnai/flow/__init__.py +2 -5
- vnai/flow/queue.py +52 -131
- vnai/flow/relay.py +158 -465
- vnai/scope/__init__.py +2 -5
- vnai/scope/profile.py +219 -762
- vnai/scope/promo.py +97 -366
- vnai/scope/state.py +71 -220
- {vnai-2.0.7.dist-info → vnai-2.0.9.dist-info}/METADATA +1 -1
- vnai-2.0.9.dist-info/RECORD +16 -0
- vnai-2.0.7.dist-info/RECORD +0 -16
- {vnai-2.0.7.dist-info → vnai-2.0.9.dist-info}/WHEEL +0 -0
- {vnai-2.0.7.dist-info → vnai-2.0.9.dist-info}/top_level.txt +0 -0
vnai/flow/relay.py
CHANGED
@@ -1,468 +1,161 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
1
|
+
_U='execution_time'
|
2
|
+
_T='manual'
|
3
|
+
_S='success'
|
4
|
+
_R='is_exceeded'
|
5
|
+
_Q='source'
|
6
|
+
_P='function'
|
7
|
+
_O='last_sync_time'
|
8
|
+
_N='sync_interval'
|
9
|
+
_M='buffer_size'
|
10
|
+
_L='webhook_url'
|
11
|
+
_K='value'
|
12
|
+
_J='sync_count'
|
13
|
+
_I='machine_id'
|
14
|
+
_H='data'
|
15
|
+
_G=False
|
16
|
+
_F=None
|
17
|
+
_E='timestamp'
|
18
|
+
_D='api_requests'
|
19
|
+
_C='rate_limits'
|
20
|
+
_B='function_calls'
|
21
|
+
_A=True
|
22
|
+
import time,threading,json,random,requests
|
9
23
|
from datetime import datetime
|
10
24
|
from pathlib import Path
|
11
|
-
from typing import Dict,
|
12
|
-
|
25
|
+
from typing import Dict,List,Any,Optional
|
13
26
|
class Conduit:
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
record = {"value": str(record)}
|
150
|
-
|
151
|
-
with self.lock:
|
152
|
-
self.buffer["rate_limits"].append(record)
|
153
|
-
self._check_triggers("rate_limits")
|
154
|
-
|
155
|
-
def _check_triggers(self, record_type: str):
|
156
|
-
"""Check if any sync triggers are met"""
|
157
|
-
current_time = time.time()
|
158
|
-
should_trigger = False
|
159
|
-
trigger_reason = None
|
160
|
-
|
161
|
-
# Get total buffer size
|
162
|
-
total_records = sum(len(buffer) for buffer in self.buffer.values())
|
163
|
-
|
164
|
-
# SIZE TRIGGER: Buffer size threshold reached
|
165
|
-
if total_records >= self.buffer_size:
|
166
|
-
should_trigger = True
|
167
|
-
trigger_reason = "buffer_full"
|
168
|
-
|
169
|
-
# EVENT TRIGGER: Critical events (errors, rate limit warnings)
|
170
|
-
elif record_type == "rate_limits" and self.buffer["rate_limits"] and \
|
171
|
-
any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
|
172
|
-
should_trigger = True
|
173
|
-
trigger_reason = "rate_limit_exceeded"
|
174
|
-
elif record_type == "function_calls" and self.buffer["function_calls"] and \
|
175
|
-
any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
|
176
|
-
should_trigger = True
|
177
|
-
trigger_reason = "function_error"
|
178
|
-
|
179
|
-
# TIME-WEIGHTED RANDOM TRIGGER: More likely as time since last sync increases
|
180
|
-
else:
|
181
|
-
time_factor = min(1.0, (current_time - self.last_sync_time) / (self.sync_interval / 2))
|
182
|
-
if random.random() < 0.05 * time_factor: # 0-5% chance based on time
|
183
|
-
should_trigger = True
|
184
|
-
trigger_reason = "random_time_weighted"
|
185
|
-
|
186
|
-
if should_trigger:
|
187
|
-
threading.Thread(
|
188
|
-
target=self.dispatch,
|
189
|
-
args=(trigger_reason,),
|
190
|
-
daemon=True
|
191
|
-
).start()
|
192
|
-
|
193
|
-
def queue(self, package, priority=None):
|
194
|
-
# --- Auto add 'segment' field to every payload ---
|
195
|
-
try:
|
196
|
-
from vnai.scope.promo import ContentManager
|
197
|
-
is_paid = ContentManager().is_paid_user
|
198
|
-
segment_val = "paid" if is_paid else "free"
|
199
|
-
except Exception:
|
200
|
-
segment_val = "free"
|
201
|
-
|
202
|
-
def ensure_segment(d):
|
203
|
-
if not isinstance(d, dict):
|
204
|
-
return d
|
205
|
-
d = dict(d) # tạo bản sao để không ảnh hưởng dict gốc
|
206
|
-
if "segment" not in d:
|
207
|
-
d["segment"] = segment_val
|
208
|
-
return d
|
209
|
-
# Add segment to package if not present
|
210
|
-
if isinstance(package, dict) and "segment" not in package:
|
211
|
-
package["segment"] = segment_val
|
212
|
-
# Add segment to data if exists and is dict
|
213
|
-
if isinstance(package, dict) and isinstance(package.get("data"), dict):
|
214
|
-
if "segment" not in package["data"]:
|
215
|
-
package["data"]["segment"] = segment_val
|
216
|
-
# --- End auto segment ---
|
217
|
-
|
218
|
-
"""Queue data package"""
|
219
|
-
if not package:
|
220
|
-
return False
|
221
|
-
|
222
|
-
# Handle non-dictionary packages
|
223
|
-
if not isinstance(package, dict):
|
224
|
-
self.add_function_call(ensure_segment({"message": str(package)}))
|
225
|
-
return True
|
226
|
-
|
227
|
-
# Add timestamp if not present
|
228
|
-
if "timestamp" not in package:
|
229
|
-
package["timestamp"] = datetime.now().isoformat()
|
230
|
-
|
231
|
-
# Route based on package type
|
232
|
-
if "type" in package:
|
233
|
-
package_type = package["type"]
|
234
|
-
data = package.get("data", {})
|
235
|
-
|
236
|
-
# Remove system info if present to avoid duplication
|
237
|
-
if isinstance(data, dict) and "system" in data:
|
238
|
-
# Get machine_id for reference but don't duplicate the whole system info
|
239
|
-
machine_id = data["system"].get("machine_id")
|
240
|
-
data.pop("system")
|
241
|
-
if machine_id:
|
242
|
-
data["machine_id"] = machine_id
|
243
|
-
if package_type == "function":
|
244
|
-
self.add_function_call(ensure_segment(data))
|
245
|
-
elif package_type == "api_request":
|
246
|
-
self.add_api_request(ensure_segment(data))
|
247
|
-
elif package_type == "rate_limit":
|
248
|
-
self.add_rate_limit(ensure_segment(data))
|
249
|
-
elif package_type == "system_info":
|
250
|
-
# For system info, we'll add it as a special function call
|
251
|
-
# but remove duplicated data
|
252
|
-
self.add_function_call({
|
253
|
-
"type": "system_info",
|
254
|
-
"commercial": data.get("commercial"),
|
255
|
-
"packages": data.get("packages"),
|
256
|
-
"timestamp": package.get("timestamp")
|
257
|
-
})
|
258
|
-
elif package_type == "metrics":
|
259
|
-
# Handle metrics package with multiple categories
|
260
|
-
metrics_data = data
|
261
|
-
for metric_type, metrics_list in metrics_data.items():
|
262
|
-
if isinstance(metrics_list, list):
|
263
|
-
if metric_type == "function":
|
264
|
-
for item in metrics_list:
|
265
|
-
self.add_function_call(ensure_segment(item))
|
266
|
-
elif metric_type == "rate_limit":
|
267
|
-
for item in metrics_list:
|
268
|
-
self.add_rate_limit(ensure_segment(item))
|
269
|
-
elif metric_type == "request":
|
270
|
-
for item in metrics_list:
|
271
|
-
self.add_api_request(ensure_segment(item))
|
272
|
-
else:
|
273
|
-
# Default to function calls
|
274
|
-
if isinstance(data, dict) and data is not package:
|
275
|
-
self.add_function_call(ensure_segment(data))
|
276
|
-
else:
|
277
|
-
self.add_function_call(ensure_segment(package))
|
278
|
-
else:
|
279
|
-
# No type specified, default to function call
|
280
|
-
self.add_function_call(ensure_segment(package))
|
281
|
-
|
282
|
-
# Handle high priority items
|
283
|
-
if priority == "high":
|
284
|
-
self.dispatch("high_priority")
|
285
|
-
|
286
|
-
return True
|
287
|
-
|
288
|
-
def dispatch(self, reason="manual"):
|
289
|
-
"""Send queued data"""
|
290
|
-
if not self.webhook_url:
|
291
|
-
return False
|
292
|
-
|
293
|
-
with self.lock:
|
294
|
-
# Check if all buffers are empty
|
295
|
-
if all(len(records) == 0 for records in self.buffer.values()):
|
296
|
-
return False
|
297
|
-
|
298
|
-
# Create a copy of the buffer for sending
|
299
|
-
data_to_send = {
|
300
|
-
"function_calls": self.buffer["function_calls"].copy(),
|
301
|
-
"api_requests": self.buffer["api_requests"].copy(),
|
302
|
-
"rate_limits": self.buffer["rate_limits"].copy()
|
303
|
-
}
|
304
|
-
|
305
|
-
# Clear buffer
|
306
|
-
self.buffer = {
|
307
|
-
"function_calls": [],
|
308
|
-
"api_requests": [],
|
309
|
-
"rate_limits": []
|
310
|
-
}
|
311
|
-
|
312
|
-
# Update sync time and count
|
313
|
-
self.last_sync_time = time.time()
|
314
|
-
self.sync_count += 1
|
315
|
-
self._save_config()
|
316
|
-
|
317
|
-
# Get environment information ONCE
|
318
|
-
try:
|
319
|
-
from vnai.scope.profile import inspector
|
320
|
-
environment_info = inspector.examine()
|
321
|
-
machine_id = environment_info.get("machine_id", self.machine_id)
|
322
|
-
except:
|
323
|
-
# Fallback if environment info isn't available
|
324
|
-
environment_info = {"machine_id": self.machine_id}
|
325
|
-
machine_id = self.machine_id
|
326
|
-
|
327
|
-
# Create payload with environment info only in metadata
|
328
|
-
payload = {
|
329
|
-
"analytics_data": data_to_send,
|
330
|
-
"metadata": {
|
331
|
-
"timestamp": datetime.now().isoformat(),
|
332
|
-
"machine_id": machine_id,
|
333
|
-
"sync_count": self.sync_count,
|
334
|
-
"trigger_reason": reason,
|
335
|
-
"environment": environment_info,
|
336
|
-
"data_counts": {
|
337
|
-
"function_calls": len(data_to_send["function_calls"]),
|
338
|
-
"api_requests": len(data_to_send["api_requests"]),
|
339
|
-
"rate_limits": len(data_to_send["rate_limits"])
|
340
|
-
}
|
341
|
-
}
|
342
|
-
}
|
343
|
-
|
344
|
-
# Send data
|
345
|
-
success = self._send_data(payload)
|
346
|
-
|
347
|
-
if not success:
|
348
|
-
with self.lock:
|
349
|
-
self.failed_queue.append(payload)
|
350
|
-
if len(self.failed_queue) > 10:
|
351
|
-
self.failed_queue = self.failed_queue[-10:]
|
352
|
-
|
353
|
-
return success
|
354
|
-
|
355
|
-
def _send_data(self, payload):
|
356
|
-
"""Send data to webhook"""
|
357
|
-
if not self.webhook_url:
|
358
|
-
return False
|
359
|
-
|
360
|
-
try:
|
361
|
-
response = requests.post(
|
362
|
-
self.webhook_url,
|
363
|
-
json=payload,
|
364
|
-
timeout=5 # 5 second timeout
|
365
|
-
)
|
366
|
-
|
367
|
-
return response.status_code == 200
|
368
|
-
except:
|
369
|
-
return False
|
370
|
-
|
371
|
-
def retry_failed(self):
|
372
|
-
"""Retry sending failed data"""
|
373
|
-
if not self.failed_queue:
|
374
|
-
return 0
|
375
|
-
|
376
|
-
with self.lock:
|
377
|
-
to_retry = self.failed_queue.copy()
|
378
|
-
self.failed_queue = []
|
379
|
-
|
380
|
-
success_count = 0
|
381
|
-
for payload in to_retry:
|
382
|
-
if self._send_data(payload):
|
383
|
-
success_count += 1
|
384
|
-
else:
|
385
|
-
with self.lock:
|
386
|
-
self.failed_queue.append(payload)
|
387
|
-
|
388
|
-
return success_count
|
389
|
-
|
390
|
-
def configure(self, webhook_url):
|
391
|
-
"""Configure webhook URL"""
|
392
|
-
with self.lock:
|
393
|
-
self.webhook_url = webhook_url
|
394
|
-
self._save_config()
|
395
|
-
return True
|
396
|
-
|
397
|
-
# Create singleton instance
|
398
|
-
conduit = Conduit()
|
399
|
-
|
400
|
-
# Exposed functions that match sync.py naming pattern
|
401
|
-
def track_function_call(function_name, source, execution_time, success=True, error=None, args=None):
|
402
|
-
"""Track function call (bridge to add_function_call)"""
|
403
|
-
record = {
|
404
|
-
"function": function_name,
|
405
|
-
"source": source,
|
406
|
-
"execution_time": execution_time,
|
407
|
-
"timestamp": datetime.now().isoformat(),
|
408
|
-
"success": success
|
409
|
-
}
|
410
|
-
|
411
|
-
if error:
|
412
|
-
record["error"] = error
|
413
|
-
|
414
|
-
if args:
|
415
|
-
# Sanitize arguments
|
416
|
-
sanitized_args = {}
|
417
|
-
if isinstance(args, dict):
|
418
|
-
for key, value in args.items():
|
419
|
-
if isinstance(value, (str, int, float, bool)):
|
420
|
-
sanitized_args[key] = value
|
421
|
-
else:
|
422
|
-
sanitized_args[key] = str(type(value))
|
423
|
-
else:
|
424
|
-
sanitized_args = {"value": str(args)}
|
425
|
-
record["args"] = sanitized_args
|
426
|
-
|
427
|
-
conduit.add_function_call(record)
|
428
|
-
|
429
|
-
def track_rate_limit(source, limit_type, limit_value, current_usage, is_exceeded):
|
430
|
-
"""Track rate limit checks (bridge to add_rate_limit)"""
|
431
|
-
record = {
|
432
|
-
"source": source,
|
433
|
-
"limit_type": limit_type,
|
434
|
-
"limit_value": limit_value,
|
435
|
-
"current_usage": current_usage,
|
436
|
-
"is_exceeded": is_exceeded,
|
437
|
-
"timestamp": datetime.now().isoformat(),
|
438
|
-
"usage_percentage": (current_usage / limit_value) * 100 if limit_value > 0 else 0
|
439
|
-
}
|
440
|
-
|
441
|
-
conduit.add_rate_limit(record)
|
442
|
-
|
443
|
-
def track_api_request(endpoint, source, method, status_code, execution_time, request_size=0, response_size=0):
|
444
|
-
"""Track API requests (bridge to add_api_request)"""
|
445
|
-
record = {
|
446
|
-
"endpoint": endpoint,
|
447
|
-
"source": source,
|
448
|
-
"method": method,
|
449
|
-
"status_code": status_code,
|
450
|
-
"execution_time": execution_time,
|
451
|
-
"timestamp": datetime.now().isoformat(),
|
452
|
-
"request_size": request_size,
|
453
|
-
"response_size": response_size
|
454
|
-
}
|
455
|
-
|
456
|
-
conduit.add_api_request(record)
|
457
|
-
|
458
|
-
def configure(webhook_url):
|
459
|
-
"""Configure webhook URL"""
|
460
|
-
return conduit.configure(webhook_url)
|
461
|
-
|
462
|
-
def sync_now():
|
463
|
-
"""Manually trigger synchronization"""
|
464
|
-
return conduit.dispatch("manual")
|
465
|
-
|
466
|
-
def retry_failed():
|
467
|
-
"""Retry failed synchronizations"""
|
468
|
-
return conduit.retry_failed()
|
27
|
+
_instance=_F;_lock=threading.Lock()
|
28
|
+
def __new__(cls,webhook_url=_F,buffer_size=50,sync_interval=300):
|
29
|
+
with cls._lock:
|
30
|
+
if cls._instance is _F:cls._instance=super(Conduit,cls).__new__(cls);cls._instance._initialize(webhook_url,buffer_size,sync_interval)
|
31
|
+
return cls._instance
|
32
|
+
def _initialize(self,webhook_url,buffer_size,sync_interval):
|
33
|
+
self.webhook_url=webhook_url;self.buffer_size=buffer_size;self.sync_interval=sync_interval;self.buffer={_B:[],_D:[],_C:[]};self.lock=threading.Lock();self.last_sync_time=time.time();self.sync_count=0;self.failed_queue=[];self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_A);self.data_dir=self.project_dir/_H;self.data_dir.mkdir(exist_ok=_A);self.config_path=self.data_dir/'relay_config.json'
|
34
|
+
try:from vnai.scope.profile import inspector;self.machine_id=inspector.fingerprint()
|
35
|
+
except:self.machine_id=self._generate_fallback_id()
|
36
|
+
self._load_config();self._start_periodic_sync()
|
37
|
+
def _generate_fallback_id(self)->str:
|
38
|
+
try:import platform,hashlib,uuid;system_info=platform.node()+platform.platform()+platform.processor();return hashlib.md5(system_info.encode()).hexdigest()
|
39
|
+
except:import uuid;return str(uuid.uuid4())
|
40
|
+
def _load_config(self):
|
41
|
+
if self.config_path.exists():
|
42
|
+
try:
|
43
|
+
with open(self.config_path,'r')as f:config=json.load(f)
|
44
|
+
if not self.webhook_url and _L in config:self.webhook_url=config[_L]
|
45
|
+
if _M in config:self.buffer_size=config[_M]
|
46
|
+
if _N in config:self.sync_interval=config[_N]
|
47
|
+
if _O in config:self.last_sync_time=config[_O]
|
48
|
+
if _J in config:self.sync_count=config[_J]
|
49
|
+
except:pass
|
50
|
+
def _save_config(self):
|
51
|
+
config={_L:self.webhook_url,_M:self.buffer_size,_N:self.sync_interval,_O:self.last_sync_time,_J:self.sync_count}
|
52
|
+
try:
|
53
|
+
with open(self.config_path,'w')as f:json.dump(config,f)
|
54
|
+
except:pass
|
55
|
+
def _start_periodic_sync(self):
|
56
|
+
def periodic_sync():
|
57
|
+
while _A:time.sleep(self.sync_interval);self.dispatch('periodic')
|
58
|
+
sync_thread=threading.Thread(target=periodic_sync,daemon=_A);sync_thread.start()
|
59
|
+
def add_function_call(self,record):
|
60
|
+
if not isinstance(record,dict):record={_K:str(record)}
|
61
|
+
with self.lock:self.buffer[_B].append(record);self._check_triggers(_B)
|
62
|
+
def add_api_request(self,record):
|
63
|
+
if not isinstance(record,dict):record={_K:str(record)}
|
64
|
+
with self.lock:self.buffer[_D].append(record);self._check_triggers(_D)
|
65
|
+
def add_rate_limit(self,record):
|
66
|
+
if not isinstance(record,dict):record={_K:str(record)}
|
67
|
+
with self.lock:self.buffer[_C].append(record);self._check_triggers(_C)
|
68
|
+
def _check_triggers(self,record_type:str):
|
69
|
+
current_time=time.time();should_trigger=_G;trigger_reason=_F;total_records=sum(len(buffer)for buffer in self.buffer.values())
|
70
|
+
if total_records>=self.buffer_size:should_trigger=_A;trigger_reason='buffer_full'
|
71
|
+
elif record_type==_C and self.buffer[_C]and any(item.get(_R)for item in self.buffer[_C]if isinstance(item,dict)):should_trigger=_A;trigger_reason='rate_limit_exceeded'
|
72
|
+
elif record_type==_B and self.buffer[_B]and any(not item.get(_S)for item in self.buffer[_B]if isinstance(item,dict)):should_trigger=_A;trigger_reason='function_error'
|
73
|
+
else:
|
74
|
+
time_factor=min(1.,(current_time-self.last_sync_time)/(self.sync_interval/2))
|
75
|
+
if random.random()<.05*time_factor:should_trigger=_A;trigger_reason='random_time_weighted'
|
76
|
+
if should_trigger:threading.Thread(target=self.dispatch,args=(trigger_reason,),daemon=_A).start()
|
77
|
+
def queue(self,package,priority=_F):
|
78
|
+
H='packages';G='commercial';F='system_info';E='rate_limit';D='free';C='system';B='type';A='segment'
|
79
|
+
try:from vnai.scope.promo import ContentManager;is_paid=ContentManager().is_paid_user;segment_val='paid'if is_paid else D
|
80
|
+
except Exception:segment_val=D
|
81
|
+
def ensure_segment(d):
|
82
|
+
if not isinstance(d,dict):return d
|
83
|
+
d=dict(d)
|
84
|
+
if A not in d:d[A]=segment_val
|
85
|
+
return d
|
86
|
+
if isinstance(package,dict)and A not in package:package[A]=segment_val
|
87
|
+
if isinstance(package,dict)and isinstance(package.get(_H),dict):
|
88
|
+
if A not in package[_H]:package[_H][A]=segment_val
|
89
|
+
if not package:return _G
|
90
|
+
if not isinstance(package,dict):self.add_function_call(ensure_segment({'message':str(package)}));return _A
|
91
|
+
if _E not in package:package[_E]=datetime.now().isoformat()
|
92
|
+
if B in package:
|
93
|
+
package_type=package[B];data=package.get(_H,{})
|
94
|
+
if isinstance(data,dict)and C in data:
|
95
|
+
machine_id=data[C].get(_I);data.pop(C)
|
96
|
+
if machine_id:data[_I]=machine_id
|
97
|
+
if package_type==_P:self.add_function_call(ensure_segment(data))
|
98
|
+
elif package_type=='api_request':self.add_api_request(ensure_segment(data))
|
99
|
+
elif package_type==E:self.add_rate_limit(ensure_segment(data))
|
100
|
+
elif package_type==F:self.add_function_call({B:F,G:data.get(G),H:data.get(H),_E:package.get(_E)})
|
101
|
+
elif package_type=='metrics':
|
102
|
+
metrics_data=data
|
103
|
+
for(metric_type,metrics_list)in metrics_data.items():
|
104
|
+
if isinstance(metrics_list,list):
|
105
|
+
if metric_type==_P:
|
106
|
+
for item in metrics_list:self.add_function_call(ensure_segment(item))
|
107
|
+
elif metric_type==E:
|
108
|
+
for item in metrics_list:self.add_rate_limit(ensure_segment(item))
|
109
|
+
elif metric_type=='request':
|
110
|
+
for item in metrics_list:self.add_api_request(ensure_segment(item))
|
111
|
+
elif isinstance(data,dict)and data is not package:self.add_function_call(ensure_segment(data))
|
112
|
+
else:self.add_function_call(ensure_segment(package))
|
113
|
+
else:self.add_function_call(ensure_segment(package))
|
114
|
+
if priority=='high':self.dispatch('high_priority')
|
115
|
+
return _A
|
116
|
+
def dispatch(self,reason=_T):
|
117
|
+
if not self.webhook_url:return _G
|
118
|
+
with self.lock:
|
119
|
+
if all(len(records)==0 for records in self.buffer.values()):return _G
|
120
|
+
data_to_send={_B:self.buffer[_B].copy(),_D:self.buffer[_D].copy(),_C:self.buffer[_C].copy()};self.buffer={_B:[],_D:[],_C:[]};self.last_sync_time=time.time();self.sync_count+=1;self._save_config()
|
121
|
+
try:from vnai.scope.profile import inspector;environment_info=inspector.examine();machine_id=environment_info.get(_I,self.machine_id)
|
122
|
+
except:environment_info={_I:self.machine_id};machine_id=self.machine_id
|
123
|
+
payload={'analytics_data':data_to_send,'metadata':{_E:datetime.now().isoformat(),_I:machine_id,_J:self.sync_count,'trigger_reason':reason,'environment':environment_info,'data_counts':{_B:len(data_to_send[_B]),_D:len(data_to_send[_D]),_C:len(data_to_send[_C])}}};success=self._send_data(payload)
|
124
|
+
if not success:
|
125
|
+
with self.lock:
|
126
|
+
self.failed_queue.append(payload)
|
127
|
+
if len(self.failed_queue)>10:self.failed_queue=self.failed_queue[-10:]
|
128
|
+
return success
|
129
|
+
def _send_data(self,payload):
|
130
|
+
if not self.webhook_url:return _G
|
131
|
+
try:response=requests.post(self.webhook_url,json=payload,timeout=5);return response.status_code==200
|
132
|
+
except:return _G
|
133
|
+
def retry_failed(self):
|
134
|
+
if not self.failed_queue:return 0
|
135
|
+
with self.lock:to_retry=self.failed_queue.copy();self.failed_queue=[]
|
136
|
+
success_count=0
|
137
|
+
for payload in to_retry:
|
138
|
+
if self._send_data(payload):success_count+=1
|
139
|
+
else:
|
140
|
+
with self.lock:self.failed_queue.append(payload)
|
141
|
+
return success_count
|
142
|
+
def configure(self,webhook_url):
|
143
|
+
with self.lock:self.webhook_url=webhook_url;self._save_config();return _A
|
144
|
+
conduit=Conduit()
|
145
|
+
def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
|
146
|
+
record={_P:function_name,_Q:source,_U:execution_time,_E:datetime.now().isoformat(),_S:success}
|
147
|
+
if error:record['error']=error
|
148
|
+
if args:
|
149
|
+
sanitized_args={}
|
150
|
+
if isinstance(args,dict):
|
151
|
+
for(key,value)in args.items():
|
152
|
+
if isinstance(value,(str,int,float,bool)):sanitized_args[key]=value
|
153
|
+
else:sanitized_args[key]=str(type(value))
|
154
|
+
else:sanitized_args={_K:str(args)}
|
155
|
+
record['args']=sanitized_args
|
156
|
+
conduit.add_function_call(record)
|
157
|
+
def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):record={_Q:source,'limit_type':limit_type,'limit_value':limit_value,'current_usage':current_usage,_R:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':current_usage/limit_value*100 if limit_value>0 else 0};conduit.add_rate_limit(record)
|
158
|
+
def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):record={'endpoint':endpoint,_Q:source,'method':method,'status_code':status_code,_U:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(record)
|
159
|
+
def configure(webhook_url):return conduit.configure(webhook_url)
|
160
|
+
def sync_now():return conduit.dispatch(_T)
|
161
|
+
def retry_failed():return conduit.retry_failed()
|
vnai/scope/__init__.py
CHANGED
@@ -1,7 +1,4 @@
|
|
1
|
-
# vnai/scope/__init__.py
|
2
|
-
# Environment detection and state tracking
|
3
|
-
|
4
1
|
from vnai.scope.profile import inspector
|
5
|
-
from vnai.scope.state import tracker,
|
2
|
+
from vnai.scope.state import tracker,record
|
6
3
|
from vnai.scope.promo import manager as content_manager
|
7
|
-
from vnai.scope.promo import present as present_content
|
4
|
+
from vnai.scope.promo import present as present_content
|