vnai 0.1.4__py3-none-any.whl → 2.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +270 -319
- vnai/beam/__init__.py +6 -0
- vnai/beam/metrics.py +184 -0
- vnai/beam/pulse.py +109 -0
- vnai/beam/quota.py +478 -0
- vnai/flow/__init__.py +5 -0
- vnai/flow/queue.py +134 -0
- vnai/flow/relay.py +442 -0
- vnai/scope/__init__.py +7 -0
- vnai/scope/profile.py +767 -0
- vnai/scope/promo.py +236 -0
- vnai/scope/state.py +223 -0
- vnai-2.0.1.dist-info/METADATA +32 -0
- vnai-2.0.1.dist-info/RECORD +16 -0
- {vnai-0.1.4.dist-info → vnai-2.0.1.dist-info}/WHEEL +1 -1
- vnai-0.1.4.dist-info/METADATA +0 -19
- vnai-0.1.4.dist-info/RECORD +0 -5
- {vnai-0.1.4.dist-info → vnai-2.0.1.dist-info}/top_level.txt +0 -0
vnai/flow/relay.py
ADDED
@@ -0,0 +1,442 @@
|
|
1
|
+
# vnai/flow/relay.py
|
2
|
+
# Data transmission system (formerly sync)
|
3
|
+
|
4
|
+
import time
|
5
|
+
import threading
|
6
|
+
import json
|
7
|
+
import random
|
8
|
+
import requests
|
9
|
+
from datetime import datetime
|
10
|
+
from pathlib import Path
|
11
|
+
from typing import Dict, List, Any, Optional
|
12
|
+
|
13
|
+
class Conduit:
|
14
|
+
"""Handles system telemetry flow"""
|
15
|
+
|
16
|
+
_instance = None
|
17
|
+
_lock = threading.Lock()
|
18
|
+
|
19
|
+
def __new__(cls, webhook_url=None, buffer_size=50, sync_interval=300):
|
20
|
+
with cls._lock:
|
21
|
+
if cls._instance is None:
|
22
|
+
cls._instance = super(Conduit, cls).__new__(cls)
|
23
|
+
cls._instance._initialize(webhook_url, buffer_size, sync_interval)
|
24
|
+
return cls._instance
|
25
|
+
|
26
|
+
def _initialize(self, webhook_url, buffer_size, sync_interval):
|
27
|
+
"""Initialize conduit"""
|
28
|
+
self.webhook_url = webhook_url
|
29
|
+
self.buffer_size = buffer_size
|
30
|
+
self.sync_interval = sync_interval
|
31
|
+
|
32
|
+
# Separate buffers for different data types
|
33
|
+
self.buffer = {
|
34
|
+
"function_calls": [],
|
35
|
+
"api_requests": [],
|
36
|
+
"rate_limits": []
|
37
|
+
}
|
38
|
+
|
39
|
+
self.lock = threading.Lock()
|
40
|
+
self.last_sync_time = time.time()
|
41
|
+
self.sync_count = 0
|
42
|
+
self.failed_queue = []
|
43
|
+
|
44
|
+
# Home directory setup
|
45
|
+
self.home_dir = Path.home()
|
46
|
+
self.project_dir = self.home_dir / ".vnstock"
|
47
|
+
self.project_dir.mkdir(exist_ok=True)
|
48
|
+
self.data_dir = self.project_dir / 'data'
|
49
|
+
self.data_dir.mkdir(exist_ok=True)
|
50
|
+
self.config_path = self.data_dir / "relay_config.json"
|
51
|
+
|
52
|
+
# Get machine identifier from system profile
|
53
|
+
try:
|
54
|
+
from vnai.scope.profile import inspector
|
55
|
+
self.machine_id = inspector.fingerprint()
|
56
|
+
except:
|
57
|
+
self.machine_id = self._generate_fallback_id()
|
58
|
+
|
59
|
+
# Load config if exists
|
60
|
+
self._load_config()
|
61
|
+
|
62
|
+
# Start periodic sync
|
63
|
+
self._start_periodic_sync()
|
64
|
+
|
65
|
+
def _generate_fallback_id(self) -> str:
|
66
|
+
"""Generate a fallback machine identifier if profile is unavailable"""
|
67
|
+
try:
|
68
|
+
import platform
|
69
|
+
import hashlib
|
70
|
+
import uuid
|
71
|
+
|
72
|
+
# Try to get machine-specific information
|
73
|
+
system_info = platform.node() + platform.platform() + platform.processor()
|
74
|
+
return hashlib.md5(system_info.encode()).hexdigest()
|
75
|
+
except:
|
76
|
+
import uuid
|
77
|
+
return str(uuid.uuid4())
|
78
|
+
|
79
|
+
def _load_config(self):
|
80
|
+
"""Load configuration from file"""
|
81
|
+
if self.config_path.exists():
|
82
|
+
try:
|
83
|
+
with open(self.config_path, 'r') as f:
|
84
|
+
config = json.load(f)
|
85
|
+
|
86
|
+
if not self.webhook_url and 'webhook_url' in config:
|
87
|
+
self.webhook_url = config['webhook_url']
|
88
|
+
if 'buffer_size' in config:
|
89
|
+
self.buffer_size = config['buffer_size']
|
90
|
+
if 'sync_interval' in config:
|
91
|
+
self.sync_interval = config['sync_interval']
|
92
|
+
if 'last_sync_time' in config:
|
93
|
+
self.last_sync_time = config['last_sync_time']
|
94
|
+
if 'sync_count' in config:
|
95
|
+
self.sync_count = config['sync_count']
|
96
|
+
except:
|
97
|
+
pass
|
98
|
+
|
99
|
+
def _save_config(self):
|
100
|
+
"""Save configuration to file"""
|
101
|
+
config = {
|
102
|
+
'webhook_url': self.webhook_url,
|
103
|
+
'buffer_size': self.buffer_size,
|
104
|
+
'sync_interval': self.sync_interval,
|
105
|
+
'last_sync_time': self.last_sync_time,
|
106
|
+
'sync_count': self.sync_count
|
107
|
+
}
|
108
|
+
|
109
|
+
try:
|
110
|
+
with open(self.config_path, 'w') as f:
|
111
|
+
json.dump(config, f)
|
112
|
+
except:
|
113
|
+
pass
|
114
|
+
|
115
|
+
def _start_periodic_sync(self):
|
116
|
+
"""Start periodic sync thread"""
|
117
|
+
def periodic_sync():
|
118
|
+
while True:
|
119
|
+
time.sleep(self.sync_interval)
|
120
|
+
self.dispatch("periodic")
|
121
|
+
|
122
|
+
sync_thread = threading.Thread(target=periodic_sync, daemon=True)
|
123
|
+
sync_thread.start()
|
124
|
+
|
125
|
+
def add_function_call(self, record):
|
126
|
+
"""Add function call record"""
|
127
|
+
# Ensure record is a dictionary
|
128
|
+
if not isinstance(record, dict):
|
129
|
+
record = {"value": str(record)}
|
130
|
+
|
131
|
+
with self.lock:
|
132
|
+
self.buffer["function_calls"].append(record)
|
133
|
+
self._check_triggers("function_calls")
|
134
|
+
|
135
|
+
def add_api_request(self, record):
|
136
|
+
"""Add API request record"""
|
137
|
+
# Ensure record is a dictionary
|
138
|
+
if not isinstance(record, dict):
|
139
|
+
record = {"value": str(record)}
|
140
|
+
|
141
|
+
with self.lock:
|
142
|
+
self.buffer["api_requests"].append(record)
|
143
|
+
self._check_triggers("api_requests")
|
144
|
+
|
145
|
+
def add_rate_limit(self, record):
|
146
|
+
"""Add rate limit record"""
|
147
|
+
# Ensure record is a dictionary
|
148
|
+
if not isinstance(record, dict):
|
149
|
+
record = {"value": str(record)}
|
150
|
+
|
151
|
+
with self.lock:
|
152
|
+
self.buffer["rate_limits"].append(record)
|
153
|
+
self._check_triggers("rate_limits")
|
154
|
+
|
155
|
+
def _check_triggers(self, record_type: str):
|
156
|
+
"""Check if any sync triggers are met"""
|
157
|
+
current_time = time.time()
|
158
|
+
should_trigger = False
|
159
|
+
trigger_reason = None
|
160
|
+
|
161
|
+
# Get total buffer size
|
162
|
+
total_records = sum(len(buffer) for buffer in self.buffer.values())
|
163
|
+
|
164
|
+
# SIZE TRIGGER: Buffer size threshold reached
|
165
|
+
if total_records >= self.buffer_size:
|
166
|
+
should_trigger = True
|
167
|
+
trigger_reason = "buffer_full"
|
168
|
+
|
169
|
+
# EVENT TRIGGER: Critical events (errors, rate limit warnings)
|
170
|
+
elif record_type == "rate_limits" and self.buffer["rate_limits"] and \
|
171
|
+
any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
|
172
|
+
should_trigger = True
|
173
|
+
trigger_reason = "rate_limit_exceeded"
|
174
|
+
elif record_type == "function_calls" and self.buffer["function_calls"] and \
|
175
|
+
any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
|
176
|
+
should_trigger = True
|
177
|
+
trigger_reason = "function_error"
|
178
|
+
|
179
|
+
# TIME-WEIGHTED RANDOM TRIGGER: More likely as time since last sync increases
|
180
|
+
else:
|
181
|
+
time_factor = min(1.0, (current_time - self.last_sync_time) / (self.sync_interval / 2))
|
182
|
+
if random.random() < 0.05 * time_factor: # 0-5% chance based on time
|
183
|
+
should_trigger = True
|
184
|
+
trigger_reason = "random_time_weighted"
|
185
|
+
|
186
|
+
if should_trigger:
|
187
|
+
threading.Thread(
|
188
|
+
target=self.dispatch,
|
189
|
+
args=(trigger_reason,),
|
190
|
+
daemon=True
|
191
|
+
).start()
|
192
|
+
|
193
|
+
def queue(self, package, priority=None):
|
194
|
+
"""Queue data package"""
|
195
|
+
if not package:
|
196
|
+
return False
|
197
|
+
|
198
|
+
# Handle non-dictionary packages
|
199
|
+
if not isinstance(package, dict):
|
200
|
+
self.add_function_call({"message": str(package)})
|
201
|
+
return True
|
202
|
+
|
203
|
+
# Add timestamp if not present
|
204
|
+
if "timestamp" not in package:
|
205
|
+
package["timestamp"] = datetime.now().isoformat()
|
206
|
+
|
207
|
+
# Route based on package type
|
208
|
+
if "type" in package:
|
209
|
+
package_type = package["type"]
|
210
|
+
data = package.get("data", {})
|
211
|
+
|
212
|
+
# Remove system info if present to avoid duplication
|
213
|
+
if isinstance(data, dict) and "system" in data:
|
214
|
+
# Get machine_id for reference but don't duplicate the whole system info
|
215
|
+
machine_id = data["system"].get("machine_id")
|
216
|
+
data.pop("system")
|
217
|
+
if machine_id:
|
218
|
+
data["machine_id"] = machine_id
|
219
|
+
|
220
|
+
if package_type == "function":
|
221
|
+
self.add_function_call(data)
|
222
|
+
elif package_type == "api_request":
|
223
|
+
self.add_api_request(data)
|
224
|
+
elif package_type == "rate_limit":
|
225
|
+
self.add_rate_limit(data)
|
226
|
+
elif package_type == "system_info":
|
227
|
+
# For system info, we'll add it as a special function call
|
228
|
+
# but remove duplicated data
|
229
|
+
self.add_function_call({
|
230
|
+
"type": "system_info",
|
231
|
+
"commercial": data.get("commercial"),
|
232
|
+
"packages": data.get("packages"),
|
233
|
+
"timestamp": package.get("timestamp")
|
234
|
+
})
|
235
|
+
elif package_type == "metrics":
|
236
|
+
# Handle metrics package with multiple categories
|
237
|
+
metrics_data = data
|
238
|
+
for metric_type, metrics_list in metrics_data.items():
|
239
|
+
if isinstance(metrics_list, list):
|
240
|
+
if metric_type == "function":
|
241
|
+
for item in metrics_list:
|
242
|
+
self.add_function_call(item)
|
243
|
+
elif metric_type == "rate_limit":
|
244
|
+
for item in metrics_list:
|
245
|
+
self.add_rate_limit(item)
|
246
|
+
elif metric_type == "request":
|
247
|
+
for item in metrics_list:
|
248
|
+
self.add_api_request(item)
|
249
|
+
else:
|
250
|
+
# Default to function calls
|
251
|
+
self.add_function_call(data)
|
252
|
+
else:
|
253
|
+
# No type specified, default to function call
|
254
|
+
self.add_function_call(package)
|
255
|
+
|
256
|
+
# Handle high priority items
|
257
|
+
if priority == "high":
|
258
|
+
self.dispatch("high_priority")
|
259
|
+
|
260
|
+
return True
|
261
|
+
|
262
|
+
def dispatch(self, reason="manual"):
|
263
|
+
"""Send queued data"""
|
264
|
+
if not self.webhook_url:
|
265
|
+
return False
|
266
|
+
|
267
|
+
with self.lock:
|
268
|
+
# Check if all buffers are empty
|
269
|
+
if all(len(records) == 0 for records in self.buffer.values()):
|
270
|
+
return False
|
271
|
+
|
272
|
+
# Create a copy of the buffer for sending
|
273
|
+
data_to_send = {
|
274
|
+
"function_calls": self.buffer["function_calls"].copy(),
|
275
|
+
"api_requests": self.buffer["api_requests"].copy(),
|
276
|
+
"rate_limits": self.buffer["rate_limits"].copy()
|
277
|
+
}
|
278
|
+
|
279
|
+
# Clear buffer
|
280
|
+
self.buffer = {
|
281
|
+
"function_calls": [],
|
282
|
+
"api_requests": [],
|
283
|
+
"rate_limits": []
|
284
|
+
}
|
285
|
+
|
286
|
+
# Update sync time and count
|
287
|
+
self.last_sync_time = time.time()
|
288
|
+
self.sync_count += 1
|
289
|
+
self._save_config()
|
290
|
+
|
291
|
+
# Get environment information ONCE
|
292
|
+
try:
|
293
|
+
from vnai.scope.profile import inspector
|
294
|
+
environment_info = inspector.examine()
|
295
|
+
machine_id = environment_info.get("machine_id", self.machine_id)
|
296
|
+
except:
|
297
|
+
# Fallback if environment info isn't available
|
298
|
+
environment_info = {"machine_id": self.machine_id}
|
299
|
+
machine_id = self.machine_id
|
300
|
+
|
301
|
+
# Create payload with environment info only in metadata
|
302
|
+
payload = {
|
303
|
+
"analytics_data": data_to_send,
|
304
|
+
"metadata": {
|
305
|
+
"timestamp": datetime.now().isoformat(),
|
306
|
+
"machine_id": machine_id,
|
307
|
+
"sync_count": self.sync_count,
|
308
|
+
"trigger_reason": reason,
|
309
|
+
"environment": environment_info,
|
310
|
+
"data_counts": {
|
311
|
+
"function_calls": len(data_to_send["function_calls"]),
|
312
|
+
"api_requests": len(data_to_send["api_requests"]),
|
313
|
+
"rate_limits": len(data_to_send["rate_limits"])
|
314
|
+
}
|
315
|
+
}
|
316
|
+
}
|
317
|
+
|
318
|
+
# Send data
|
319
|
+
success = self._send_data(payload)
|
320
|
+
|
321
|
+
if not success:
|
322
|
+
with self.lock:
|
323
|
+
self.failed_queue.append(payload)
|
324
|
+
if len(self.failed_queue) > 10:
|
325
|
+
self.failed_queue = self.failed_queue[-10:]
|
326
|
+
|
327
|
+
return success
|
328
|
+
|
329
|
+
def _send_data(self, payload):
|
330
|
+
"""Send data to webhook"""
|
331
|
+
if not self.webhook_url:
|
332
|
+
return False
|
333
|
+
|
334
|
+
try:
|
335
|
+
response = requests.post(
|
336
|
+
self.webhook_url,
|
337
|
+
json=payload,
|
338
|
+
timeout=5 # 5 second timeout
|
339
|
+
)
|
340
|
+
|
341
|
+
return response.status_code == 200
|
342
|
+
except:
|
343
|
+
return False
|
344
|
+
|
345
|
+
def retry_failed(self):
|
346
|
+
"""Retry sending failed data"""
|
347
|
+
if not self.failed_queue:
|
348
|
+
return 0
|
349
|
+
|
350
|
+
with self.lock:
|
351
|
+
to_retry = self.failed_queue.copy()
|
352
|
+
self.failed_queue = []
|
353
|
+
|
354
|
+
success_count = 0
|
355
|
+
for payload in to_retry:
|
356
|
+
if self._send_data(payload):
|
357
|
+
success_count += 1
|
358
|
+
else:
|
359
|
+
with self.lock:
|
360
|
+
self.failed_queue.append(payload)
|
361
|
+
|
362
|
+
return success_count
|
363
|
+
|
364
|
+
def configure(self, webhook_url):
|
365
|
+
"""Configure webhook URL"""
|
366
|
+
with self.lock:
|
367
|
+
self.webhook_url = webhook_url
|
368
|
+
self._save_config()
|
369
|
+
return True
|
370
|
+
|
371
|
+
# Create singleton instance
|
372
|
+
conduit = Conduit()
|
373
|
+
|
374
|
+
# Exposed functions that match sync.py naming pattern
|
375
|
+
def track_function_call(function_name, source, execution_time, success=True, error=None, args=None):
|
376
|
+
"""Track function call (bridge to add_function_call)"""
|
377
|
+
record = {
|
378
|
+
"function": function_name,
|
379
|
+
"source": source,
|
380
|
+
"execution_time": execution_time,
|
381
|
+
"timestamp": datetime.now().isoformat(),
|
382
|
+
"success": success
|
383
|
+
}
|
384
|
+
|
385
|
+
if error:
|
386
|
+
record["error"] = error
|
387
|
+
|
388
|
+
if args:
|
389
|
+
# Sanitize arguments
|
390
|
+
sanitized_args = {}
|
391
|
+
if isinstance(args, dict):
|
392
|
+
for key, value in args.items():
|
393
|
+
if isinstance(value, (str, int, float, bool)):
|
394
|
+
sanitized_args[key] = value
|
395
|
+
else:
|
396
|
+
sanitized_args[key] = str(type(value))
|
397
|
+
else:
|
398
|
+
sanitized_args = {"value": str(args)}
|
399
|
+
record["args"] = sanitized_args
|
400
|
+
|
401
|
+
conduit.add_function_call(record)
|
402
|
+
|
403
|
+
def track_rate_limit(source, limit_type, limit_value, current_usage, is_exceeded):
|
404
|
+
"""Track rate limit checks (bridge to add_rate_limit)"""
|
405
|
+
record = {
|
406
|
+
"source": source,
|
407
|
+
"limit_type": limit_type,
|
408
|
+
"limit_value": limit_value,
|
409
|
+
"current_usage": current_usage,
|
410
|
+
"is_exceeded": is_exceeded,
|
411
|
+
"timestamp": datetime.now().isoformat(),
|
412
|
+
"usage_percentage": (current_usage / limit_value) * 100 if limit_value > 0 else 0
|
413
|
+
}
|
414
|
+
|
415
|
+
conduit.add_rate_limit(record)
|
416
|
+
|
417
|
+
def track_api_request(endpoint, source, method, status_code, execution_time, request_size=0, response_size=0):
|
418
|
+
"""Track API requests (bridge to add_api_request)"""
|
419
|
+
record = {
|
420
|
+
"endpoint": endpoint,
|
421
|
+
"source": source,
|
422
|
+
"method": method,
|
423
|
+
"status_code": status_code,
|
424
|
+
"execution_time": execution_time,
|
425
|
+
"timestamp": datetime.now().isoformat(),
|
426
|
+
"request_size": request_size,
|
427
|
+
"response_size": response_size
|
428
|
+
}
|
429
|
+
|
430
|
+
conduit.add_api_request(record)
|
431
|
+
|
432
|
+
def configure(webhook_url):
|
433
|
+
"""Configure webhook URL"""
|
434
|
+
return conduit.configure(webhook_url)
|
435
|
+
|
436
|
+
def sync_now():
|
437
|
+
"""Manually trigger synchronization"""
|
438
|
+
return conduit.dispatch("manual")
|
439
|
+
|
440
|
+
def retry_failed():
|
441
|
+
"""Retry failed synchronizations"""
|
442
|
+
return conduit.retry_failed()
|
vnai/scope/__init__.py
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
# vnai/scope/__init__.py
|
2
|
+
# Environment detection and state tracking
|
3
|
+
|
4
|
+
from vnai.scope.profile import inspector
|
5
|
+
from vnai.scope.state import tracker, record
|
6
|
+
from vnai.scope.promo import manager as content_manager
|
7
|
+
from vnai.scope.promo import present as present_content
|