vnai 2.0.7__tar.gz → 2.0.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {vnai-2.0.7 → vnai-2.0.9}/PKG-INFO +1 -1
- {vnai-2.0.7 → vnai-2.0.9}/pyproject.toml +1 -1
- vnai-2.0.9/vnai/__init__.py +79 -0
- vnai-2.0.9/vnai/beam/__init__.py +3 -0
- vnai-2.0.9/vnai/beam/metrics.py +64 -0
- vnai-2.0.9/vnai/beam/pulse.py +31 -0
- vnai-2.0.9/vnai/beam/quota.py +105 -0
- vnai-2.0.9/vnai/flow/__init__.py +2 -0
- vnai-2.0.9/vnai/flow/queue.py +55 -0
- vnai-2.0.9/vnai/flow/relay.py +161 -0
- vnai-2.0.9/vnai/scope/__init__.py +4 -0
- vnai-2.0.9/vnai/scope/profile.py +223 -0
- vnai-2.0.9/vnai/scope/promo.py +117 -0
- vnai-2.0.9/vnai/scope/state.py +74 -0
- {vnai-2.0.7 → vnai-2.0.9}/vnai.egg-info/PKG-INFO +1 -1
- vnai-2.0.7/vnai/__init__.py +0 -275
- vnai-2.0.7/vnai/beam/__init__.py +0 -6
- vnai-2.0.7/vnai/beam/metrics.py +0 -189
- vnai-2.0.7/vnai/beam/pulse.py +0 -109
- vnai-2.0.7/vnai/beam/quota.py +0 -487
- vnai-2.0.7/vnai/flow/__init__.py +0 -5
- vnai-2.0.7/vnai/flow/queue.py +0 -134
- vnai-2.0.7/vnai/flow/relay.py +0 -468
- vnai-2.0.7/vnai/scope/__init__.py +0 -7
- vnai-2.0.7/vnai/scope/profile.py +0 -766
- vnai-2.0.7/vnai/scope/promo.py +0 -386
- vnai-2.0.7/vnai/scope/state.py +0 -223
- {vnai-2.0.7 → vnai-2.0.9}/README.md +0 -0
- {vnai-2.0.7 → vnai-2.0.9}/setup.cfg +0 -0
- {vnai-2.0.7 → vnai-2.0.9}/vnai.egg-info/SOURCES.txt +0 -0
- {vnai-2.0.7 → vnai-2.0.9}/vnai.egg-info/dependency_links.txt +0 -0
- {vnai-2.0.7 → vnai-2.0.9}/vnai.egg-info/requires.txt +0 -0
- {vnai-2.0.7 → vnai-2.0.9}/vnai.egg-info/top_level.txt +0 -0
@@ -0,0 +1,79 @@
|
|
1
|
+
_L='default'
|
2
|
+
_K='standard'
|
3
|
+
_J='accepted_agreement'
|
4
|
+
_I='environment.json'
|
5
|
+
_H='terms_agreement.txt'
|
6
|
+
_G='timestamp'
|
7
|
+
_F=False
|
8
|
+
_E='id'
|
9
|
+
_D='.vnstock'
|
10
|
+
_C='machine_id'
|
11
|
+
_B=None
|
12
|
+
_A=True
|
13
|
+
import os,pathlib,json,time,threading,functools
|
14
|
+
from datetime import datetime
|
15
|
+
from vnai.beam.quota import guardian,optimize
|
16
|
+
from vnai.beam.metrics import collector,capture
|
17
|
+
from vnai.beam.pulse import monitor
|
18
|
+
from vnai.flow.relay import conduit,configure
|
19
|
+
from vnai.flow.queue import buffer
|
20
|
+
from vnai.scope.profile import inspector
|
21
|
+
from vnai.scope.state import tracker,record
|
22
|
+
import vnai.scope.promo
|
23
|
+
from vnai.scope.promo import present
|
24
|
+
TC_VAR='ACCEPT_TC'
|
25
|
+
TC_VAL='tôi đồng ý'
|
26
|
+
TC_PATH=pathlib.Path.home()/_D/_E/_H
|
27
|
+
TERMS_AND_CONDITIONS='\nKhi tiếp tục sử dụng Vnstock, bạn xác nhận rằng bạn đã đọc, hiểu và đồng ý với Chính sách quyền riêng tư và Điều khoản, điều kiện về giấy phép sử dụng Vnstock.\n\nChi tiết:\n- Giấy phép sử dụng phần mềm: https://vnstocks.com/docs/tai-lieu/giay-phep-su-dung\n- Chính sách quyền riêng tư: https://vnstocks.com/docs/tai-lieu/chinh-sach-quyen-rieng-tu\n'
|
28
|
+
class Core:
|
29
|
+
def __init__(self):self.initialized=_F;self.webhook_url=_B;self.init_time=datetime.now().isoformat();self.home_dir=pathlib.Path.home();self.project_dir=self.home_dir/_D;self.id_dir=self.project_dir/_E;self.terms_file_path=TC_PATH;self.system_info=_B;self.project_dir.mkdir(exist_ok=_A);self.id_dir.mkdir(exist_ok=_A);self.initialize()
|
30
|
+
def initialize(self,webhook_url=_B):
|
31
|
+
if self.initialized:return _A
|
32
|
+
if not self._check_terms():self._accept_terms()
|
33
|
+
from vnai.scope.profile import inspector;inspector.setup_vnstock_environment();present()
|
34
|
+
if webhook_url:self.webhook_url=webhook_url;configure(webhook_url)
|
35
|
+
record('initialization',{_G:datetime.now().isoformat()});self.system_info=inspector.examine();conduit.queue({'type':'system_info','data':{'commercial':inspector.detect_commercial_usage(),'packages':inspector.scan_packages()}},priority='high');self.initialized=_A;return _A
|
36
|
+
def _check_terms(self):return os.path.exists(self.terms_file_path)
|
37
|
+
def _accept_terms(self):
|
38
|
+
system_info=inspector.examine()
|
39
|
+
if TC_VAR in os.environ and os.environ[TC_VAR]==TC_VAL:response=TC_VAL
|
40
|
+
else:response=TC_VAL;os.environ[TC_VAR]=TC_VAL
|
41
|
+
now=datetime.now();signed_agreement=f"""Người dùng có mã nhận dạng {system_info[_C]} đã chấp nhận điều khoản & điều kiện sử dụng Vnstock lúc {now}
|
42
|
+
---
|
43
|
+
|
44
|
+
THÔNG TIN THIẾT BỊ: {json.dumps(system_info,indent=2)}
|
45
|
+
|
46
|
+
Đính kèm bản sao nội dung bạn đã đọc, hiểu rõ và đồng ý dưới đây:
|
47
|
+
{TERMS_AND_CONDITIONS}"""
|
48
|
+
with open(self.terms_file_path,'w',encoding='utf-8')as f:f.write(signed_agreement)
|
49
|
+
env_file=self.id_dir/_I;env_data={_J:_A,_G:now.isoformat(),_C:system_info[_C]}
|
50
|
+
with open(env_file,'w')as f:json.dump(env_data,f)
|
51
|
+
return _A
|
52
|
+
def status(self):return{'initialized':self.initialized,'health':monitor.report(),'metrics':tracker.get_metrics()}
|
53
|
+
def configure_privacy(self,level=_K):from vnai.scope.state import tracker;return tracker.setup_privacy(level)
|
54
|
+
core=Core()
|
55
|
+
def tc_init(webhook_url=_B):return core.initialize(webhook_url)
|
56
|
+
def setup(webhook_url=_B):return core.initialize(webhook_url)
|
57
|
+
def optimize_execution(resource_type=_L):return optimize(resource_type)
|
58
|
+
def agg_execution(resource_type=_L):return optimize(resource_type,ad_cooldown=1500,content_trigger_threshold=100000)
|
59
|
+
def measure_performance(module_type='function'):return capture(module_type)
|
60
|
+
def accept_license_terms(terms_text=_B):
|
61
|
+
if terms_text is _B:terms_text=TERMS_AND_CONDITIONS
|
62
|
+
system_info=inspector.examine();terms_file=pathlib.Path.home()/_D/_E/_H;os.makedirs(os.path.dirname(terms_file),exist_ok=_A)
|
63
|
+
with open(terms_file,'w',encoding='utf-8')as f:f.write(f"Terms accepted at {datetime.now().isoformat()}\n");f.write(f"System: {json.dumps(system_info)}\n\n");f.write(terms_text)
|
64
|
+
return _A
|
65
|
+
def accept_vnstock_terms():
|
66
|
+
from vnai.scope.profile import inspector;system_info=inspector.examine();home_dir=pathlib.Path.home();project_dir=home_dir/_D;project_dir.mkdir(exist_ok=_A);id_dir=project_dir/_E;id_dir.mkdir(exist_ok=_A);env_file=id_dir/_I;env_data={_J:_A,_G:datetime.now().isoformat(),_C:system_info[_C]}
|
67
|
+
try:
|
68
|
+
with open(env_file,'w')as f:json.dump(env_data,f)
|
69
|
+
print('Vnstock terms accepted successfully.');return _A
|
70
|
+
except Exception as e:print(f"Error accepting terms: {e}");return _F
|
71
|
+
def setup_for_colab():from vnai.scope.profile import inspector;inspector.detect_colab_with_delayed_auth(immediate=_A);inspector.setup_vnstock_environment();return'Environment set up for Google Colab'
|
72
|
+
def display_content():return present()
|
73
|
+
def configure_privacy(level=_K):from vnai.scope.state import tracker;return tracker.setup_privacy(level)
|
74
|
+
def check_commercial_usage():from vnai.scope.profile import inspector;return inspector.detect_commercial_usage()
|
75
|
+
def authenticate_for_persistence():from vnai.scope.profile import inspector;return inspector.get_or_create_user_id()
|
76
|
+
def configure_webhook(webhook_id='80b8832b694a75c8ddc811ac7882a3de'):
|
77
|
+
if not webhook_id:return _F
|
78
|
+
from vnai.flow.relay import configure;webhook_url=f"https://botbuilder.larksuite.com/api/trigger-webhook/{webhook_id}";return configure(webhook_url)
|
79
|
+
configure_webhook()
|
@@ -0,0 +1,64 @@
|
|
1
|
+
_K='success'
|
2
|
+
_J='buffer_size'
|
3
|
+
_I='request'
|
4
|
+
_H='rate_limit'
|
5
|
+
_G='execution_time'
|
6
|
+
_F='timestamp'
|
7
|
+
_E=False
|
8
|
+
_D='error'
|
9
|
+
_C=True
|
10
|
+
_B=None
|
11
|
+
_A='function'
|
12
|
+
import sys,time,threading
|
13
|
+
from datetime import datetime
|
14
|
+
class Collector:
|
15
|
+
_instance=_B;_lock=threading.Lock()
|
16
|
+
def __new__(cls):
|
17
|
+
with cls._lock:
|
18
|
+
if cls._instance is _B:cls._instance=super(Collector,cls).__new__(cls);cls._instance._initialize()
|
19
|
+
return cls._instance
|
20
|
+
def _initialize(self):self.metrics={_A:[],_H:[],_I:[],_D:[]};self.thresholds={_J:50,'error_threshold':.1,'performance_threshold':5.};self.function_count=0;self.colab_auth_triggered=_E;self.max_metric_length=200;self._last_record_time={}
|
21
|
+
def record(self,metric_type,data,priority=_B):
|
22
|
+
A='high'
|
23
|
+
if not isinstance(data,dict):data={'value':str(data)}
|
24
|
+
if _F not in data:data[_F]=datetime.now().isoformat()
|
25
|
+
if metric_type!='system_info':data.pop('system',_B);from vnai.scope.profile import inspector;data['machine_id']=inspector.fingerprint()
|
26
|
+
now=time.time();last_time=self._last_record_time.get(metric_type,0)
|
27
|
+
if now-last_time<self.min_interval_per_type and priority!=A:return
|
28
|
+
self._last_record_time[metric_type]=now;data_hash=hashlib.md5(json.dumps(data,sort_keys=_C).encode()).hexdigest()
|
29
|
+
if data_hash in self._recent_hashes and priority!=A:return
|
30
|
+
self._recent_hashes.append(data_hash)
|
31
|
+
if metric_type in self.metrics:
|
32
|
+
self.metrics[metric_type].append(data)
|
33
|
+
if len(self.metrics[metric_type])>self.max_metric_length:self.metrics[metric_type]=self.metrics[metric_type][-self.max_metric_length:]
|
34
|
+
else:self.metrics[_A].append(data)
|
35
|
+
if metric_type==_A:
|
36
|
+
self.function_count+=1
|
37
|
+
if self.function_count>10 and not self.colab_auth_triggered and'google.colab'in sys.modules:self.colab_auth_triggered=_C;threading.Thread(target=self._trigger_colab_auth,daemon=_C).start()
|
38
|
+
if sum(len(metric_list)for metric_list in self.metrics.values())>=self.thresholds[_J]:self._send_metrics()
|
39
|
+
if priority==A or metric_type==_D:self._send_metrics()
|
40
|
+
def _trigger_colab_auth(self):
|
41
|
+
try:from vnai.scope.profile import inspector;inspector.get_or_create_user_id()
|
42
|
+
except:pass
|
43
|
+
def _send_metrics(self):
|
44
|
+
C='vnai';B='source';A='unknown';from vnai.flow.relay import track_function_call,track_rate_limit,track_api_request
|
45
|
+
for(metric_type,data_list)in self.metrics.items():
|
46
|
+
if not data_list:continue
|
47
|
+
for data in data_list:
|
48
|
+
try:
|
49
|
+
if metric_type==_A:track_function_call(function_name=data.get(_A,A),source=data.get(B,C),execution_time=data.get(_G,0),success=data.get(_K,_C),error=data.get(_D),args=data.get('args'))
|
50
|
+
elif metric_type==_H:track_rate_limit(source=data.get(B,C),limit_type=data.get('limit_type',A),limit_value=data.get('limit_value',0),current_usage=data.get('current_usage',0),is_exceeded=data.get('is_exceeded',_E))
|
51
|
+
elif metric_type==_I:track_api_request(endpoint=data.get('endpoint',A),source=data.get(B,C),method=data.get('method','GET'),status_code=data.get('status_code',200),execution_time=data.get(_G,0),request_size=data.get('request_size',0),response_size=data.get('response_size',0))
|
52
|
+
except Exception as e:continue
|
53
|
+
self.metrics[metric_type]=[]
|
54
|
+
def get_metrics_summary(self):return{metric_type:len(data_list)for(metric_type,data_list)in self.metrics.items()}
|
55
|
+
collector=Collector()
|
56
|
+
def capture(module_type=_A):
|
57
|
+
def decorator(func):
|
58
|
+
def wrapper(*args,**kwargs):
|
59
|
+
start_time=time.time();success=_E;error=_B
|
60
|
+
try:result=func(*args,**kwargs);success=_C;return result
|
61
|
+
except Exception as e:error=str(e);raise
|
62
|
+
finally:execution_time=time.time()-start_time;collector.record(module_type,{_A:func.__name__,_G:execution_time,_K:success,_D:error,_F:datetime.now().isoformat(),'args':str(args)[:100]if args else _B})
|
63
|
+
return wrapper
|
64
|
+
return decorator
|
@@ -0,0 +1,31 @@
|
|
1
|
+
_B='status'
|
2
|
+
_A='healthy'
|
3
|
+
import threading,time
|
4
|
+
from datetime import datetime
|
5
|
+
class Monitor:
|
6
|
+
_instance=None;_lock=threading.Lock()
|
7
|
+
def __new__(cls):
|
8
|
+
with cls._lock:
|
9
|
+
if cls._instance is None:cls._instance=super(Monitor,cls).__new__(cls);cls._instance._initialize()
|
10
|
+
return cls._instance
|
11
|
+
def _initialize(self):self.health_status=_A;self.last_check=time.time();self.check_interval=300;self.error_count=0;self.warning_count=0;self.status_history=[];self._start_background_check()
|
12
|
+
def _start_background_check(self):
|
13
|
+
def check_health():
|
14
|
+
while True:
|
15
|
+
try:self.check_health()
|
16
|
+
except:pass
|
17
|
+
time.sleep(self.check_interval)
|
18
|
+
thread=threading.Thread(target=check_health,daemon=True);thread.start()
|
19
|
+
def check_health(self):
|
20
|
+
from vnai.beam.metrics import collector;from vnai.beam.quota import guardian;self.last_check=time.time();metrics_summary=collector.get_metrics_summary();has_errors=metrics_summary.get('error',0)>0;resource_usage=guardian.usage();high_usage=resource_usage>80
|
21
|
+
if has_errors and high_usage:self.health_status='critical';self.error_count+=1
|
22
|
+
elif has_errors or high_usage:self.health_status='warning';self.warning_count+=1
|
23
|
+
else:self.health_status=_A
|
24
|
+
self.status_history.append({'timestamp':datetime.now().isoformat(),_B:self.health_status,'metrics':metrics_summary,'resource_usage':resource_usage})
|
25
|
+
if len(self.status_history)>10:self.status_history=self.status_history[-10:]
|
26
|
+
return self.health_status
|
27
|
+
def report(self):
|
28
|
+
if time.time()-self.last_check>self.check_interval:self.check_health()
|
29
|
+
return{_B:self.health_status,'last_check':datetime.fromtimestamp(self.last_check).isoformat(),'error_count':self.error_count,'warning_count':self.warning_count,'history':self.status_history[-3:]}
|
30
|
+
def reset(self):self.health_status=_A;self.error_count=0;self.warning_count=0;self.status_history=[];self.last_check=time.time()
|
31
|
+
monitor=Monitor()
|
@@ -0,0 +1,105 @@
|
|
1
|
+
_G='resource_type'
|
2
|
+
_F=False
|
3
|
+
_E=True
|
4
|
+
_D=None
|
5
|
+
_C='default'
|
6
|
+
_B='hour'
|
7
|
+
_A='min'
|
8
|
+
import time,functools,threading
|
9
|
+
from collections import defaultdict
|
10
|
+
from datetime import datetime
|
11
|
+
class RateLimitExceeded(Exception):
|
12
|
+
def __init__(self,resource_type,limit_type=_A,current_usage=_D,limit_value=_D,retry_after=_D):
|
13
|
+
self.resource_type=resource_type;self.limit_type=limit_type;self.current_usage=current_usage;self.limit_value=limit_value;self.retry_after=retry_after;message=f"Bạn đã gửi quá nhiều request tới {resource_type}. "
|
14
|
+
if retry_after:message+=f"Vui lòng thử lại sau {round(retry_after)} giây."
|
15
|
+
else:message+='Vui lòng thêm thời gian chờ giữa các lần gửi request.'
|
16
|
+
super().__init__(message)
|
17
|
+
class Guardian:
|
18
|
+
_instance=_D;_lock=threading.Lock()
|
19
|
+
def __new__(cls):
|
20
|
+
with cls._lock:
|
21
|
+
if cls._instance is _D:cls._instance=super(Guardian,cls).__new__(cls);cls._instance._initialize()
|
22
|
+
return cls._instance
|
23
|
+
def _initialize(self):self.resource_limits=defaultdict(lambda:defaultdict(int));self.usage_counters=defaultdict(lambda:defaultdict(list));self.resource_limits[_C]={_A:60,_B:3000};self.resource_limits['TCBS']={_A:60,_B:3000};self.resource_limits['VCI']={_A:60,_B:3000};self.resource_limits['MBK']={_A:600,_B:36000};self.resource_limits['MAS.ext']={_A:600,_B:36000};self.resource_limits['VCI.ext']={_A:600,_B:36000};self.resource_limits['FMK.ext']={_A:600,_B:36000};self.resource_limits['VND.ext']={_A:600,_B:36000};self.resource_limits['CAF.ext']={_A:600,_B:36000};self.resource_limits['SPL.ext']={_A:600,_B:36000};self.resource_limits['VDS.ext']={_A:600,_B:36000};self.resource_limits['FAD.ext']={_A:600,_B:36000}
|
24
|
+
def verify(self,operation_id,resource_type=_C):
|
25
|
+
E='is_exceeded';D='current_usage';C='limit_value';B='limit_type';A='rate_limit';current_time=time.time();limits=self.resource_limits.get(resource_type,self.resource_limits[_C]);minute_cutoff=current_time-60;self.usage_counters[resource_type][_A]=[t for t in self.usage_counters[resource_type][_A]if t>minute_cutoff];minute_usage=len(self.usage_counters[resource_type][_A]);minute_exceeded=minute_usage>=limits[_A]
|
26
|
+
if minute_exceeded:from vnai.beam.metrics import collector;collector.record(A,{_G:resource_type,B:_A,C:limits[_A],D:minute_usage,E:_E},priority='high');raise RateLimitExceeded(resource_type=resource_type,limit_type=_A,current_usage=minute_usage,limit_value=limits[_A],retry_after=60-current_time%60)
|
27
|
+
hour_cutoff=current_time-3600;self.usage_counters[resource_type][_B]=[t for t in self.usage_counters[resource_type][_B]if t>hour_cutoff];hour_usage=len(self.usage_counters[resource_type][_B]);hour_exceeded=hour_usage>=limits[_B];from vnai.beam.metrics import collector;collector.record(A,{_G:resource_type,B:_B if hour_exceeded else _A,C:limits[_B]if hour_exceeded else limits[_A],D:hour_usage if hour_exceeded else minute_usage,E:hour_exceeded})
|
28
|
+
if hour_exceeded:raise RateLimitExceeded(resource_type=resource_type,limit_type=_B,current_usage=hour_usage,limit_value=limits[_B],retry_after=3600-current_time%3600)
|
29
|
+
self.usage_counters[resource_type][_A].append(current_time);self.usage_counters[resource_type][_B].append(current_time);return _E
|
30
|
+
def usage(self,resource_type=_C):current_time=time.time();limits=self.resource_limits.get(resource_type,self.resource_limits[_C]);minute_cutoff=current_time-60;hour_cutoff=current_time-3600;self.usage_counters[resource_type][_A]=[t for t in self.usage_counters[resource_type][_A]if t>minute_cutoff];self.usage_counters[resource_type][_B]=[t for t in self.usage_counters[resource_type][_B]if t>hour_cutoff];minute_usage=len(self.usage_counters[resource_type][_A]);hour_usage=len(self.usage_counters[resource_type][_B]);minute_percentage=minute_usage/limits[_A]*100 if limits[_A]>0 else 0;hour_percentage=hour_usage/limits[_B]*100 if limits[_B]>0 else 0;return max(minute_percentage,hour_percentage)
|
31
|
+
def get_limit_status(self,resource_type=_C):E='reset_in_seconds';D='remaining';C='percentage';B='limit';A='usage';current_time=time.time();limits=self.resource_limits.get(resource_type,self.resource_limits[_C]);minute_cutoff=current_time-60;hour_cutoff=current_time-3600;minute_usage=len([t for t in self.usage_counters[resource_type][_A]if t>minute_cutoff]);hour_usage=len([t for t in self.usage_counters[resource_type][_B]if t>hour_cutoff]);return{_G:resource_type,'minute_limit':{A:minute_usage,B:limits[_A],C:minute_usage/limits[_A]*100 if limits[_A]>0 else 0,D:max(0,limits[_A]-minute_usage),E:60-current_time%60},'hour_limit':{A:hour_usage,B:limits[_B],C:hour_usage/limits[_B]*100 if limits[_B]>0 else 0,D:max(0,limits[_B]-hour_usage),E:3600-current_time%3600}}
|
32
|
+
guardian=Guardian()
|
33
|
+
class CleanErrorContext:
|
34
|
+
_last_message_time=0;_message_cooldown=5
|
35
|
+
def __enter__(self):return self
|
36
|
+
def __exit__(self,exc_type,exc_val,exc_tb):
|
37
|
+
if exc_type is RateLimitExceeded:
|
38
|
+
current_time=time.time()
|
39
|
+
if current_time-CleanErrorContext._last_message_time>=CleanErrorContext._message_cooldown:print(f"\n⚠️ {str(exc_val)}\n");CleanErrorContext._last_message_time=current_time
|
40
|
+
import sys;sys.exit(f"Rate limit exceeded. {str(exc_val)} Process terminated.");return _F
|
41
|
+
return _F
|
42
|
+
def optimize(resource_type=_C,loop_threshold=10,time_window=5,ad_cooldown=150,content_trigger_threshold=3,max_retries=2,backoff_factor=2,debug=_F):
|
43
|
+
if callable(resource_type):func=resource_type;return _create_wrapper(func,_C,loop_threshold,time_window,ad_cooldown,content_trigger_threshold,max_retries,backoff_factor,debug)
|
44
|
+
if loop_threshold<2:raise ValueError(f"loop_threshold must be at least 2, got {loop_threshold}")
|
45
|
+
if time_window<=0:raise ValueError(f"time_window must be positive, got {time_window}")
|
46
|
+
if content_trigger_threshold<1:raise ValueError(f"content_trigger_threshold must be at least 1, got {content_trigger_threshold}")
|
47
|
+
if max_retries<0:raise ValueError(f"max_retries must be non-negative, got {max_retries}")
|
48
|
+
if backoff_factor<=0:raise ValueError(f"backoff_factor must be positive, got {backoff_factor}")
|
49
|
+
def decorator(func):return _create_wrapper(func,resource_type,loop_threshold,time_window,ad_cooldown,content_trigger_threshold,max_retries,backoff_factor,debug)
|
50
|
+
return decorator
|
51
|
+
def _create_wrapper(func,resource_type,loop_threshold,time_window,ad_cooldown,content_trigger_threshold,max_retries,backoff_factor,debug):
|
52
|
+
call_history=[];last_ad_time=0;consecutive_loop_detections=0;session_displayed=_F;session_start_time=time.time();session_timeout=1800
|
53
|
+
@functools.wraps(func)
|
54
|
+
def wrapper(*args,**kwargs):
|
55
|
+
E='timestamp';D='environment';C='error';B='function';A='loop';nonlocal last_ad_time,consecutive_loop_detections,session_displayed,session_start_time;current_time=time.time();content_triggered=_F
|
56
|
+
if current_time-session_start_time>session_timeout:session_displayed=_F;session_start_time=current_time
|
57
|
+
retries=0
|
58
|
+
while _E:
|
59
|
+
call_history.append(current_time)
|
60
|
+
while call_history and current_time-call_history[0]>time_window:call_history.pop(0)
|
61
|
+
loop_detected=len(call_history)>=loop_threshold
|
62
|
+
if debug and loop_detected:print(f"[OPTIMIZE] Đã phát hiện vòng lặp cho {func.__name__}: {len(call_history)} lần gọi trong {time_window}s")
|
63
|
+
if loop_detected:
|
64
|
+
consecutive_loop_detections+=1
|
65
|
+
if debug:print(f"[OPTIMIZE] Số lần phát hiện vòng lặp liên tiếp: {consecutive_loop_detections}/{content_trigger_threshold}")
|
66
|
+
else:consecutive_loop_detections=0
|
67
|
+
should_show_content=consecutive_loop_detections>=content_trigger_threshold and current_time-last_ad_time>=ad_cooldown and not session_displayed
|
68
|
+
if should_show_content:
|
69
|
+
last_ad_time=current_time;consecutive_loop_detections=0;content_triggered=_E;session_displayed=_E
|
70
|
+
if debug:print(f"[OPTIMIZE] Đã kích hoạt nội dung cho {func.__name__}")
|
71
|
+
try:
|
72
|
+
from vnai.scope.promo import manager
|
73
|
+
try:from vnai.scope.profile import inspector;environment=inspector.examine().get(D,_D);manager.present_content(environment=environment,context=A)
|
74
|
+
except ImportError:manager.present_content(context=A)
|
75
|
+
except ImportError:print(f"Phát hiện vòng lặp: Hàm '{func.__name__}' đang được gọi trong một vòng lặp")
|
76
|
+
except Exception as e:
|
77
|
+
if debug:print(f"[OPTIMIZE] Lỗi khi hiển thị nội dung: {str(e)}")
|
78
|
+
try:
|
79
|
+
with CleanErrorContext():guardian.verify(func.__name__,resource_type)
|
80
|
+
except RateLimitExceeded as e:
|
81
|
+
from vnai.beam.metrics import collector;collector.record(C,{B:func.__name__,C:str(e),'context':'resource_verification',_G:resource_type,'retry_attempt':retries},priority='high')
|
82
|
+
if not session_displayed:
|
83
|
+
try:
|
84
|
+
from vnai.scope.promo import manager
|
85
|
+
try:from vnai.scope.profile import inspector;environment=inspector.examine().get(D,_D);manager.present_content(environment=environment,context=A);session_displayed=_E;last_ad_time=current_time
|
86
|
+
except ImportError:manager.present_content(context=A);session_displayed=_E;last_ad_time=current_time
|
87
|
+
except Exception:pass
|
88
|
+
if retries<max_retries:
|
89
|
+
wait_time=backoff_factor**retries;retries+=1
|
90
|
+
if hasattr(e,'retry_after')and e.retry_after:wait_time=min(wait_time,e.retry_after)
|
91
|
+
if debug:print(f"[OPTIMIZE] Đã đạt giới hạn tốc độ cho {func.__name__}, thử lại sau {wait_time} giây (lần thử {retries}/{max_retries})")
|
92
|
+
time.sleep(wait_time);continue
|
93
|
+
else:raise
|
94
|
+
start_time=time.time();success=_F;error=_D
|
95
|
+
try:result=func(*args,**kwargs);success=_E;return result
|
96
|
+
except Exception as e:error=str(e);raise
|
97
|
+
finally:
|
98
|
+
execution_time=time.time()-start_time
|
99
|
+
try:
|
100
|
+
from vnai.beam.metrics import collector;collector.record(B,{B:func.__name__,_G:resource_type,'execution_time':execution_time,'success':success,C:error,'in_loop':loop_detected,'loop_depth':len(call_history),'content_triggered':content_triggered,E:datetime.now().isoformat(),'retry_count':retries if retries>0 else _D})
|
101
|
+
if content_triggered:collector.record('ad_opportunity',{B:func.__name__,_G:resource_type,'call_frequency':len(call_history)/time_window,'consecutive_loops':consecutive_loop_detections,E:datetime.now().isoformat()})
|
102
|
+
except ImportError:pass
|
103
|
+
break
|
104
|
+
return wrapper
|
105
|
+
def rate_limit_status(resource_type=_C):return guardian.get_limit_status(resource_type)
|
@@ -0,0 +1,55 @@
|
|
1
|
+
_C='category'
|
2
|
+
_B=True
|
3
|
+
_A=None
|
4
|
+
import time,threading,json
|
5
|
+
from datetime import datetime
|
6
|
+
from pathlib import Path
|
7
|
+
class Buffer:
|
8
|
+
_instance=_A;_lock=threading.Lock()
|
9
|
+
def __new__(cls):
|
10
|
+
with cls._lock:
|
11
|
+
if cls._instance is _A:cls._instance=super(Buffer,cls).__new__(cls);cls._instance._initialize()
|
12
|
+
return cls._instance
|
13
|
+
def _initialize(self):self.data=[];self.lock=threading.Lock();self.max_size=1000;self.backup_interval=300;self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_B);self.data_dir=self.project_dir/'data';self.data_dir.mkdir(exist_ok=_B);self.backup_path=self.data_dir/'buffer_backup.json';self._load_from_backup();self._start_backup_thread()
|
14
|
+
def _load_from_backup(self):
|
15
|
+
if self.backup_path.exists():
|
16
|
+
try:
|
17
|
+
with open(self.backup_path,'r')as f:backup_data=json.load(f)
|
18
|
+
with self.lock:self.data=backup_data
|
19
|
+
except:pass
|
20
|
+
def _save_to_backup(self):
|
21
|
+
with self.lock:
|
22
|
+
if not self.data:return
|
23
|
+
try:
|
24
|
+
with open(self.backup_path,'w')as f:json.dump(self.data,f)
|
25
|
+
except:pass
|
26
|
+
def _start_backup_thread(self):
|
27
|
+
def backup_task():
|
28
|
+
while _B:time.sleep(self.backup_interval);self._save_to_backup()
|
29
|
+
backup_thread=threading.Thread(target=backup_task,daemon=_B);backup_thread.start()
|
30
|
+
def add(self,item,category=_A):
|
31
|
+
A='timestamp'
|
32
|
+
with self.lock:
|
33
|
+
if isinstance(item,dict):
|
34
|
+
if A not in item:item[A]=datetime.now().isoformat()
|
35
|
+
if category:item[_C]=category
|
36
|
+
self.data.append(item)
|
37
|
+
if len(self.data)>self.max_size:self.data=self.data[-self.max_size:]
|
38
|
+
if len(self.data)%100==0:self._save_to_backup()
|
39
|
+
return len(self.data)
|
40
|
+
def get(self,count=_A,category=_A):
|
41
|
+
with self.lock:
|
42
|
+
if category:filtered_data=[item for item in self.data if item.get(_C)==category]
|
43
|
+
else:filtered_data=self.data.copy()
|
44
|
+
if count:return filtered_data[:count]
|
45
|
+
else:return filtered_data
|
46
|
+
def clear(self,category=_A):
|
47
|
+
with self.lock:
|
48
|
+
if category:self.data=[item for item in self.data if item.get(_C)!=category]
|
49
|
+
else:self.data=[]
|
50
|
+
self._save_to_backup();return len(self.data)
|
51
|
+
def size(self,category=_A):
|
52
|
+
with self.lock:
|
53
|
+
if category:return len([item for item in self.data if item.get(_C)==category])
|
54
|
+
else:return len(self.data)
|
55
|
+
buffer=Buffer()
|
@@ -0,0 +1,161 @@
|
|
1
|
+
_U='execution_time'
|
2
|
+
_T='manual'
|
3
|
+
_S='success'
|
4
|
+
_R='is_exceeded'
|
5
|
+
_Q='source'
|
6
|
+
_P='function'
|
7
|
+
_O='last_sync_time'
|
8
|
+
_N='sync_interval'
|
9
|
+
_M='buffer_size'
|
10
|
+
_L='webhook_url'
|
11
|
+
_K='value'
|
12
|
+
_J='sync_count'
|
13
|
+
_I='machine_id'
|
14
|
+
_H='data'
|
15
|
+
_G=False
|
16
|
+
_F=None
|
17
|
+
_E='timestamp'
|
18
|
+
_D='api_requests'
|
19
|
+
_C='rate_limits'
|
20
|
+
_B='function_calls'
|
21
|
+
_A=True
|
22
|
+
import time,threading,json,random,requests
|
23
|
+
from datetime import datetime
|
24
|
+
from pathlib import Path
|
25
|
+
from typing import Dict,List,Any,Optional
|
26
|
+
class Conduit:
|
27
|
+
_instance=_F;_lock=threading.Lock()
|
28
|
+
def __new__(cls,webhook_url=_F,buffer_size=50,sync_interval=300):
|
29
|
+
with cls._lock:
|
30
|
+
if cls._instance is _F:cls._instance=super(Conduit,cls).__new__(cls);cls._instance._initialize(webhook_url,buffer_size,sync_interval)
|
31
|
+
return cls._instance
|
32
|
+
def _initialize(self,webhook_url,buffer_size,sync_interval):
|
33
|
+
self.webhook_url=webhook_url;self.buffer_size=buffer_size;self.sync_interval=sync_interval;self.buffer={_B:[],_D:[],_C:[]};self.lock=threading.Lock();self.last_sync_time=time.time();self.sync_count=0;self.failed_queue=[];self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_A);self.data_dir=self.project_dir/_H;self.data_dir.mkdir(exist_ok=_A);self.config_path=self.data_dir/'relay_config.json'
|
34
|
+
try:from vnai.scope.profile import inspector;self.machine_id=inspector.fingerprint()
|
35
|
+
except:self.machine_id=self._generate_fallback_id()
|
36
|
+
self._load_config();self._start_periodic_sync()
|
37
|
+
def _generate_fallback_id(self)->str:
|
38
|
+
try:import platform,hashlib,uuid;system_info=platform.node()+platform.platform()+platform.processor();return hashlib.md5(system_info.encode()).hexdigest()
|
39
|
+
except:import uuid;return str(uuid.uuid4())
|
40
|
+
def _load_config(self):
|
41
|
+
if self.config_path.exists():
|
42
|
+
try:
|
43
|
+
with open(self.config_path,'r')as f:config=json.load(f)
|
44
|
+
if not self.webhook_url and _L in config:self.webhook_url=config[_L]
|
45
|
+
if _M in config:self.buffer_size=config[_M]
|
46
|
+
if _N in config:self.sync_interval=config[_N]
|
47
|
+
if _O in config:self.last_sync_time=config[_O]
|
48
|
+
if _J in config:self.sync_count=config[_J]
|
49
|
+
except:pass
|
50
|
+
def _save_config(self):
|
51
|
+
config={_L:self.webhook_url,_M:self.buffer_size,_N:self.sync_interval,_O:self.last_sync_time,_J:self.sync_count}
|
52
|
+
try:
|
53
|
+
with open(self.config_path,'w')as f:json.dump(config,f)
|
54
|
+
except:pass
|
55
|
+
def _start_periodic_sync(self):
|
56
|
+
def periodic_sync():
|
57
|
+
while _A:time.sleep(self.sync_interval);self.dispatch('periodic')
|
58
|
+
sync_thread=threading.Thread(target=periodic_sync,daemon=_A);sync_thread.start()
|
59
|
+
def add_function_call(self,record):
|
60
|
+
if not isinstance(record,dict):record={_K:str(record)}
|
61
|
+
with self.lock:self.buffer[_B].append(record);self._check_triggers(_B)
|
62
|
+
def add_api_request(self,record):
|
63
|
+
if not isinstance(record,dict):record={_K:str(record)}
|
64
|
+
with self.lock:self.buffer[_D].append(record);self._check_triggers(_D)
|
65
|
+
def add_rate_limit(self,record):
|
66
|
+
if not isinstance(record,dict):record={_K:str(record)}
|
67
|
+
with self.lock:self.buffer[_C].append(record);self._check_triggers(_C)
|
68
|
+
def _check_triggers(self,record_type:str):
|
69
|
+
current_time=time.time();should_trigger=_G;trigger_reason=_F;total_records=sum(len(buffer)for buffer in self.buffer.values())
|
70
|
+
if total_records>=self.buffer_size:should_trigger=_A;trigger_reason='buffer_full'
|
71
|
+
elif record_type==_C and self.buffer[_C]and any(item.get(_R)for item in self.buffer[_C]if isinstance(item,dict)):should_trigger=_A;trigger_reason='rate_limit_exceeded'
|
72
|
+
elif record_type==_B and self.buffer[_B]and any(not item.get(_S)for item in self.buffer[_B]if isinstance(item,dict)):should_trigger=_A;trigger_reason='function_error'
|
73
|
+
else:
|
74
|
+
time_factor=min(1.,(current_time-self.last_sync_time)/(self.sync_interval/2))
|
75
|
+
if random.random()<.05*time_factor:should_trigger=_A;trigger_reason='random_time_weighted'
|
76
|
+
if should_trigger:threading.Thread(target=self.dispatch,args=(trigger_reason,),daemon=_A).start()
|
77
|
+
def queue(self,package,priority=_F):
|
78
|
+
H='packages';G='commercial';F='system_info';E='rate_limit';D='free';C='system';B='type';A='segment'
|
79
|
+
try:from vnai.scope.promo import ContentManager;is_paid=ContentManager().is_paid_user;segment_val='paid'if is_paid else D
|
80
|
+
except Exception:segment_val=D
|
81
|
+
def ensure_segment(d):
|
82
|
+
if not isinstance(d,dict):return d
|
83
|
+
d=dict(d)
|
84
|
+
if A not in d:d[A]=segment_val
|
85
|
+
return d
|
86
|
+
if isinstance(package,dict)and A not in package:package[A]=segment_val
|
87
|
+
if isinstance(package,dict)and isinstance(package.get(_H),dict):
|
88
|
+
if A not in package[_H]:package[_H][A]=segment_val
|
89
|
+
if not package:return _G
|
90
|
+
if not isinstance(package,dict):self.add_function_call(ensure_segment({'message':str(package)}));return _A
|
91
|
+
if _E not in package:package[_E]=datetime.now().isoformat()
|
92
|
+
if B in package:
|
93
|
+
package_type=package[B];data=package.get(_H,{})
|
94
|
+
if isinstance(data,dict)and C in data:
|
95
|
+
machine_id=data[C].get(_I);data.pop(C)
|
96
|
+
if machine_id:data[_I]=machine_id
|
97
|
+
if package_type==_P:self.add_function_call(ensure_segment(data))
|
98
|
+
elif package_type=='api_request':self.add_api_request(ensure_segment(data))
|
99
|
+
elif package_type==E:self.add_rate_limit(ensure_segment(data))
|
100
|
+
elif package_type==F:self.add_function_call({B:F,G:data.get(G),H:data.get(H),_E:package.get(_E)})
|
101
|
+
elif package_type=='metrics':
|
102
|
+
metrics_data=data
|
103
|
+
for(metric_type,metrics_list)in metrics_data.items():
|
104
|
+
if isinstance(metrics_list,list):
|
105
|
+
if metric_type==_P:
|
106
|
+
for item in metrics_list:self.add_function_call(ensure_segment(item))
|
107
|
+
elif metric_type==E:
|
108
|
+
for item in metrics_list:self.add_rate_limit(ensure_segment(item))
|
109
|
+
elif metric_type=='request':
|
110
|
+
for item in metrics_list:self.add_api_request(ensure_segment(item))
|
111
|
+
elif isinstance(data,dict)and data is not package:self.add_function_call(ensure_segment(data))
|
112
|
+
else:self.add_function_call(ensure_segment(package))
|
113
|
+
else:self.add_function_call(ensure_segment(package))
|
114
|
+
if priority=='high':self.dispatch('high_priority')
|
115
|
+
return _A
|
116
|
+
def dispatch(self,reason=_T):
|
117
|
+
if not self.webhook_url:return _G
|
118
|
+
with self.lock:
|
119
|
+
if all(len(records)==0 for records in self.buffer.values()):return _G
|
120
|
+
data_to_send={_B:self.buffer[_B].copy(),_D:self.buffer[_D].copy(),_C:self.buffer[_C].copy()};self.buffer={_B:[],_D:[],_C:[]};self.last_sync_time=time.time();self.sync_count+=1;self._save_config()
|
121
|
+
try:from vnai.scope.profile import inspector;environment_info=inspector.examine();machine_id=environment_info.get(_I,self.machine_id)
|
122
|
+
except:environment_info={_I:self.machine_id};machine_id=self.machine_id
|
123
|
+
payload={'analytics_data':data_to_send,'metadata':{_E:datetime.now().isoformat(),_I:machine_id,_J:self.sync_count,'trigger_reason':reason,'environment':environment_info,'data_counts':{_B:len(data_to_send[_B]),_D:len(data_to_send[_D]),_C:len(data_to_send[_C])}}};success=self._send_data(payload)
|
124
|
+
if not success:
|
125
|
+
with self.lock:
|
126
|
+
self.failed_queue.append(payload)
|
127
|
+
if len(self.failed_queue)>10:self.failed_queue=self.failed_queue[-10:]
|
128
|
+
return success
|
129
|
+
def _send_data(self,payload):
|
130
|
+
if not self.webhook_url:return _G
|
131
|
+
try:response=requests.post(self.webhook_url,json=payload,timeout=5);return response.status_code==200
|
132
|
+
except:return _G
|
133
|
+
def retry_failed(self):
|
134
|
+
if not self.failed_queue:return 0
|
135
|
+
with self.lock:to_retry=self.failed_queue.copy();self.failed_queue=[]
|
136
|
+
success_count=0
|
137
|
+
for payload in to_retry:
|
138
|
+
if self._send_data(payload):success_count+=1
|
139
|
+
else:
|
140
|
+
with self.lock:self.failed_queue.append(payload)
|
141
|
+
return success_count
|
142
|
+
def configure(self,webhook_url):
|
143
|
+
with self.lock:self.webhook_url=webhook_url;self._save_config();return _A
|
144
|
+
conduit=Conduit()
|
145
|
+
def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
|
146
|
+
record={_P:function_name,_Q:source,_U:execution_time,_E:datetime.now().isoformat(),_S:success}
|
147
|
+
if error:record['error']=error
|
148
|
+
if args:
|
149
|
+
sanitized_args={}
|
150
|
+
if isinstance(args,dict):
|
151
|
+
for(key,value)in args.items():
|
152
|
+
if isinstance(value,(str,int,float,bool)):sanitized_args[key]=value
|
153
|
+
else:sanitized_args[key]=str(type(value))
|
154
|
+
else:sanitized_args={_K:str(args)}
|
155
|
+
record['args']=sanitized_args
|
156
|
+
conduit.add_function_call(record)
|
157
|
+
def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):record={_Q:source,'limit_type':limit_type,'limit_value':limit_value,'current_usage':current_usage,_R:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':current_usage/limit_value*100 if limit_value>0 else 0};conduit.add_rate_limit(record)
|
158
|
+
def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):record={'endpoint':endpoint,_Q:source,'method':method,'status_code':status_code,_U:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(record)
|
159
|
+
def configure(webhook_url):return conduit.configure(webhook_url)
|
160
|
+
def sync_now():return conduit.dispatch(_T)
|
161
|
+
def retry_failed():return conduit.retry_failed()
|