vnai 2.1.1__py3-none-any.whl → 2.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vnai/flow/queue.py CHANGED
@@ -1,55 +1,55 @@
1
- _C='category'
2
- _B=True
3
- _A=None
4
- import time,threading,json
5
- from datetime import datetime
6
- from pathlib import Path
7
- class Buffer:
8
- _instance=_A;_lock=threading.Lock()
9
- def __new__(cls):
10
- with cls._lock:
11
- if cls._instance is _A:cls._instance=super(Buffer,cls).__new__(cls);cls._instance._initialize()
12
- return cls._instance
13
- def _initialize(self):self.data=[];self.lock=threading.Lock();self.max_size=1000;self.backup_interval=300;self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_B);self.data_dir=self.project_dir/'data';self.data_dir.mkdir(exist_ok=_B);self.backup_path=self.data_dir/'buffer_backup.json';self._load_from_backup();self._start_backup_thread()
14
- def _load_from_backup(self):
15
- if self.backup_path.exists():
16
- try:
17
- with open(self.backup_path,'r')as f:backup_data=json.load(f)
18
- with self.lock:self.data=backup_data
19
- except:pass
20
- def _save_to_backup(self):
21
- with self.lock:
22
- if not self.data:return
23
- try:
24
- with open(self.backup_path,'w')as f:json.dump(self.data,f)
25
- except:pass
26
- def _start_backup_thread(self):
27
- def backup_task():
28
- while _B:time.sleep(self.backup_interval);self._save_to_backup()
29
- backup_thread=threading.Thread(target=backup_task,daemon=_B);backup_thread.start()
30
- def add(self,item,category=_A):
31
- A='timestamp'
32
- with self.lock:
33
- if isinstance(item,dict):
34
- if A not in item:item[A]=datetime.now().isoformat()
35
- if category:item[_C]=category
36
- self.data.append(item)
37
- if len(self.data)>self.max_size:self.data=self.data[-self.max_size:]
38
- if len(self.data)%100==0:self._save_to_backup()
39
- return len(self.data)
40
- def get(self,count=_A,category=_A):
41
- with self.lock:
42
- if category:filtered_data=[item for item in self.data if item.get(_C)==category]
43
- else:filtered_data=self.data.copy()
44
- if count:return filtered_data[:count]
45
- else:return filtered_data
46
- def clear(self,category=_A):
47
- with self.lock:
48
- if category:self.data=[item for item in self.data if item.get(_C)!=category]
49
- else:self.data=[]
50
- self._save_to_backup();return len(self.data)
51
- def size(self,category=_A):
52
- with self.lock:
53
- if category:return len([item for item in self.data if item.get(_C)==category])
54
- else:return len(self.data)
1
+ _C='category'
2
+ _B=True
3
+ _A=None
4
+ import time,threading,json
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+ class Buffer:
8
+ _instance=_A;_lock=threading.Lock()
9
+ def __new__(cls):
10
+ with cls._lock:
11
+ if cls._instance is _A:cls._instance=super(Buffer,cls).__new__(cls);cls._instance._initialize()
12
+ return cls._instance
13
+ def _initialize(self):self.data=[];self.lock=threading.Lock();self.max_size=1000;self.backup_interval=300;self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_B);self.data_dir=self.project_dir/'data';self.data_dir.mkdir(exist_ok=_B);self.backup_path=self.data_dir/'buffer_backup.json';self._load_from_backup();self._start_backup_thread()
14
+ def _load_from_backup(self):
15
+ if self.backup_path.exists():
16
+ try:
17
+ with open(self.backup_path,'r')as f:backup_data=json.load(f)
18
+ with self.lock:self.data=backup_data
19
+ except:pass
20
+ def _save_to_backup(self):
21
+ with self.lock:
22
+ if not self.data:return
23
+ try:
24
+ with open(self.backup_path,'w')as f:json.dump(self.data,f)
25
+ except:pass
26
+ def _start_backup_thread(self):
27
+ def backup_task():
28
+ while _B:time.sleep(self.backup_interval);self._save_to_backup()
29
+ backup_thread=threading.Thread(target=backup_task,daemon=_B);backup_thread.start()
30
+ def add(self,item,category=_A):
31
+ A='timestamp'
32
+ with self.lock:
33
+ if isinstance(item,dict):
34
+ if A not in item:item[A]=datetime.now().isoformat()
35
+ if category:item[_C]=category
36
+ self.data.append(item)
37
+ if len(self.data)>self.max_size:self.data=self.data[-self.max_size:]
38
+ if len(self.data)%100==0:self._save_to_backup()
39
+ return len(self.data)
40
+ def get(self,count=_A,category=_A):
41
+ with self.lock:
42
+ if category:filtered_data=[item for item in self.data if item.get(_C)==category]
43
+ else:filtered_data=self.data.copy()
44
+ if count:return filtered_data[:count]
45
+ else:return filtered_data
46
+ def clear(self,category=_A):
47
+ with self.lock:
48
+ if category:self.data=[item for item in self.data if item.get(_C)!=category]
49
+ else:self.data=[]
50
+ self._save_to_backup();return len(self.data)
51
+ def size(self,category=_A):
52
+ with self.lock:
53
+ if category:return len([item for item in self.data if item.get(_C)==category])
54
+ else:return len(self.data)
55
55
  buffer=Buffer()
vnai/flow/relay.py CHANGED
@@ -1,161 +1,161 @@
1
- _U='execution_time'
2
- _T='manual'
3
- _S='success'
4
- _R='is_exceeded'
5
- _Q='source'
6
- _P='function'
7
- _O='last_sync_time'
8
- _N='sync_interval'
9
- _M='buffer_size'
10
- _L='webhook_url'
11
- _K='value'
12
- _J='sync_count'
13
- _I='machine_id'
14
- _H='data'
15
- _G=False
16
- _F=None
17
- _E='timestamp'
18
- _D='api_requests'
19
- _C='rate_limits'
20
- _B='function_calls'
21
- _A=True
22
- import time,threading,json,random,requests
23
- from datetime import datetime
24
- from pathlib import Path
25
- from typing import Dict,List,Any,Optional
26
- class Conduit:
27
- _instance=_F;_lock=threading.Lock()
28
- def __new__(cls,webhook_url=_F,buffer_size=50,sync_interval=300):
29
- with cls._lock:
30
- if cls._instance is _F:cls._instance=super(Conduit,cls).__new__(cls);cls._instance._initialize(webhook_url,buffer_size,sync_interval)
31
- return cls._instance
32
- def _initialize(self,webhook_url,buffer_size,sync_interval):
33
- self.webhook_url=webhook_url;self.buffer_size=buffer_size;self.sync_interval=sync_interval;self.buffer={_B:[],_D:[],_C:[]};self.lock=threading.Lock();self.last_sync_time=time.time();self.sync_count=0;self.failed_queue=[];self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_A);self.data_dir=self.project_dir/_H;self.data_dir.mkdir(exist_ok=_A);self.config_path=self.data_dir/'relay_config.json'
34
- try:from vnai.scope.profile import inspector;self.machine_id=inspector.fingerprint()
35
- except:self.machine_id=self._generate_fallback_id()
36
- self._load_config();self._start_periodic_sync()
37
- def _generate_fallback_id(self)->str:
38
- try:import platform,hashlib,uuid;system_info=platform.node()+platform.platform()+platform.processor();return hashlib.md5(system_info.encode()).hexdigest()
39
- except:import uuid;return str(uuid.uuid4())
40
- def _load_config(self):
41
- if self.config_path.exists():
42
- try:
43
- with open(self.config_path,'r')as f:config=json.load(f)
44
- if not self.webhook_url and _L in config:self.webhook_url=config[_L]
45
- if _M in config:self.buffer_size=config[_M]
46
- if _N in config:self.sync_interval=config[_N]
47
- if _O in config:self.last_sync_time=config[_O]
48
- if _J in config:self.sync_count=config[_J]
49
- except:pass
50
- def _save_config(self):
51
- config={_L:self.webhook_url,_M:self.buffer_size,_N:self.sync_interval,_O:self.last_sync_time,_J:self.sync_count}
52
- try:
53
- with open(self.config_path,'w')as f:json.dump(config,f)
54
- except:pass
55
- def _start_periodic_sync(self):
56
- def periodic_sync():
57
- while _A:time.sleep(self.sync_interval);self.dispatch('periodic')
58
- sync_thread=threading.Thread(target=periodic_sync,daemon=_A);sync_thread.start()
59
- def add_function_call(self,record):
60
- if not isinstance(record,dict):record={_K:str(record)}
61
- with self.lock:self.buffer[_B].append(record);self._check_triggers(_B)
62
- def add_api_request(self,record):
63
- if not isinstance(record,dict):record={_K:str(record)}
64
- with self.lock:self.buffer[_D].append(record);self._check_triggers(_D)
65
- def add_rate_limit(self,record):
66
- if not isinstance(record,dict):record={_K:str(record)}
67
- with self.lock:self.buffer[_C].append(record);self._check_triggers(_C)
68
- def _check_triggers(self,record_type:str):
69
- current_time=time.time();should_trigger=_G;trigger_reason=_F;total_records=sum(len(buffer)for buffer in self.buffer.values())
70
- if total_records>=self.buffer_size:should_trigger=_A;trigger_reason='buffer_full'
71
- elif record_type==_C and self.buffer[_C]and any(item.get(_R)for item in self.buffer[_C]if isinstance(item,dict)):should_trigger=_A;trigger_reason='rate_limit_exceeded'
72
- elif record_type==_B and self.buffer[_B]and any(not item.get(_S)for item in self.buffer[_B]if isinstance(item,dict)):should_trigger=_A;trigger_reason='function_error'
73
- else:
74
- time_factor=min(1.,(current_time-self.last_sync_time)/(self.sync_interval/2))
75
- if random.random()<.05*time_factor:should_trigger=_A;trigger_reason='random_time_weighted'
76
- if should_trigger:threading.Thread(target=self.dispatch,args=(trigger_reason,),daemon=_A).start()
77
- def queue(self,package,priority=_F):
78
- H='packages';G='commercial';F='system_info';E='rate_limit';D='free';C='system';B='type';A='segment'
79
- try:from vnai.scope.promo import ContentManager;is_paid=ContentManager().is_paid_user;segment_val='paid'if is_paid else D
80
- except Exception:segment_val=D
81
- def ensure_segment(d):
82
- if not isinstance(d,dict):return d
83
- d=dict(d)
84
- if A not in d:d[A]=segment_val
85
- return d
86
- if isinstance(package,dict)and A not in package:package[A]=segment_val
87
- if isinstance(package,dict)and isinstance(package.get(_H),dict):
88
- if A not in package[_H]:package[_H][A]=segment_val
89
- if not package:return _G
90
- if not isinstance(package,dict):self.add_function_call(ensure_segment({'message':str(package)}));return _A
91
- if _E not in package:package[_E]=datetime.now().isoformat()
92
- if B in package:
93
- package_type=package[B];data=package.get(_H,{})
94
- if isinstance(data,dict)and C in data:
95
- machine_id=data[C].get(_I);data.pop(C)
96
- if machine_id:data[_I]=machine_id
97
- if package_type==_P:self.add_function_call(ensure_segment(data))
98
- elif package_type=='api_request':self.add_api_request(ensure_segment(data))
99
- elif package_type==E:self.add_rate_limit(ensure_segment(data))
100
- elif package_type==F:self.add_function_call({B:F,G:data.get(G),H:data.get(H),_E:package.get(_E)})
101
- elif package_type=='metrics':
102
- metrics_data=data
103
- for(metric_type,metrics_list)in metrics_data.items():
104
- if isinstance(metrics_list,list):
105
- if metric_type==_P:
106
- for item in metrics_list:self.add_function_call(ensure_segment(item))
107
- elif metric_type==E:
108
- for item in metrics_list:self.add_rate_limit(ensure_segment(item))
109
- elif metric_type=='request':
110
- for item in metrics_list:self.add_api_request(ensure_segment(item))
111
- elif isinstance(data,dict)and data is not package:self.add_function_call(ensure_segment(data))
112
- else:self.add_function_call(ensure_segment(package))
113
- else:self.add_function_call(ensure_segment(package))
114
- if priority=='high':self.dispatch('high_priority')
115
- return _A
116
- def dispatch(self,reason=_T):
117
- if not self.webhook_url:return _G
118
- with self.lock:
119
- if all(len(records)==0 for records in self.buffer.values()):return _G
120
- data_to_send={_B:self.buffer[_B].copy(),_D:self.buffer[_D].copy(),_C:self.buffer[_C].copy()};self.buffer={_B:[],_D:[],_C:[]};self.last_sync_time=time.time();self.sync_count+=1;self._save_config()
121
- try:from vnai.scope.profile import inspector;environment_info=inspector.examine();machine_id=environment_info.get(_I,self.machine_id)
122
- except:environment_info={_I:self.machine_id};machine_id=self.machine_id
123
- payload={'analytics_data':data_to_send,'metadata':{_E:datetime.now().isoformat(),_I:machine_id,_J:self.sync_count,'trigger_reason':reason,'environment':environment_info,'data_counts':{_B:len(data_to_send[_B]),_D:len(data_to_send[_D]),_C:len(data_to_send[_C])}}};success=self._send_data(payload)
124
- if not success:
125
- with self.lock:
126
- self.failed_queue.append(payload)
127
- if len(self.failed_queue)>10:self.failed_queue=self.failed_queue[-10:]
128
- return success
129
- def _send_data(self,payload):
130
- if not self.webhook_url:return _G
131
- try:response=requests.post(self.webhook_url,json=payload,timeout=5);return response.status_code==200
132
- except:return _G
133
- def retry_failed(self):
134
- if not self.failed_queue:return 0
135
- with self.lock:to_retry=self.failed_queue.copy();self.failed_queue=[]
136
- success_count=0
137
- for payload in to_retry:
138
- if self._send_data(payload):success_count+=1
139
- else:
140
- with self.lock:self.failed_queue.append(payload)
141
- return success_count
142
- def configure(self,webhook_url):
143
- with self.lock:self.webhook_url=webhook_url;self._save_config();return _A
144
- conduit=Conduit()
145
- def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
146
- record={_P:function_name,_Q:source,_U:execution_time,_E:datetime.now().isoformat(),_S:success}
147
- if error:record['error']=error
148
- if args:
149
- sanitized_args={}
150
- if isinstance(args,dict):
151
- for(key,value)in args.items():
152
- if isinstance(value,(str,int,float,bool)):sanitized_args[key]=value
153
- else:sanitized_args[key]=str(type(value))
154
- else:sanitized_args={_K:str(args)}
155
- record['args']=sanitized_args
156
- conduit.add_function_call(record)
157
- def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):record={_Q:source,'limit_type':limit_type,'limit_value':limit_value,'current_usage':current_usage,_R:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':current_usage/limit_value*100 if limit_value>0 else 0};conduit.add_rate_limit(record)
158
- def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):record={'endpoint':endpoint,_Q:source,'method':method,'status_code':status_code,_U:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(record)
159
- def configure(webhook_url):return conduit.configure(webhook_url)
160
- def sync_now():return conduit.dispatch(_T)
1
+ _U='execution_time'
2
+ _T='manual'
3
+ _S='success'
4
+ _R='is_exceeded'
5
+ _Q='source'
6
+ _P='function'
7
+ _O='last_sync_time'
8
+ _N='sync_interval'
9
+ _M='buffer_size'
10
+ _L='webhook_url'
11
+ _K='value'
12
+ _J='sync_count'
13
+ _I='machine_id'
14
+ _H='data'
15
+ _G=False
16
+ _F=None
17
+ _E='timestamp'
18
+ _D='api_requests'
19
+ _C='rate_limits'
20
+ _B='function_calls'
21
+ _A=True
22
+ import time,threading,json,random,requests
23
+ from datetime import datetime
24
+ from pathlib import Path
25
+ from typing import Dict,List,Any,Optional
26
+ class Conduit:
27
+ _instance=_F;_lock=threading.Lock()
28
+ def __new__(cls,webhook_url=_F,buffer_size=50,sync_interval=300):
29
+ with cls._lock:
30
+ if cls._instance is _F:cls._instance=super(Conduit,cls).__new__(cls);cls._instance._initialize(webhook_url,buffer_size,sync_interval)
31
+ return cls._instance
32
+ def _initialize(self,webhook_url,buffer_size,sync_interval):
33
+ self.webhook_url=webhook_url;self.buffer_size=buffer_size;self.sync_interval=sync_interval;self.buffer={_B:[],_D:[],_C:[]};self.lock=threading.Lock();self.last_sync_time=time.time();self.sync_count=0;self.failed_queue=[];self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_A);self.data_dir=self.project_dir/_H;self.data_dir.mkdir(exist_ok=_A);self.config_path=self.data_dir/'relay_config.json'
34
+ try:from vnai.scope.profile import inspector;self.machine_id=inspector.fingerprint()
35
+ except:self.machine_id=self._generate_fallback_id()
36
+ self._load_config();self._start_periodic_sync()
37
+ def _generate_fallback_id(self)->str:
38
+ try:import platform,hashlib,uuid;system_info=platform.node()+platform.platform()+platform.processor();return hashlib.md5(system_info.encode()).hexdigest()
39
+ except:import uuid;return str(uuid.uuid4())
40
+ def _load_config(self):
41
+ if self.config_path.exists():
42
+ try:
43
+ with open(self.config_path,'r')as f:config=json.load(f)
44
+ if not self.webhook_url and _L in config:self.webhook_url=config[_L]
45
+ if _M in config:self.buffer_size=config[_M]
46
+ if _N in config:self.sync_interval=config[_N]
47
+ if _O in config:self.last_sync_time=config[_O]
48
+ if _J in config:self.sync_count=config[_J]
49
+ except:pass
50
+ def _save_config(self):
51
+ config={_L:self.webhook_url,_M:self.buffer_size,_N:self.sync_interval,_O:self.last_sync_time,_J:self.sync_count}
52
+ try:
53
+ with open(self.config_path,'w')as f:json.dump(config,f)
54
+ except:pass
55
+ def _start_periodic_sync(self):
56
+ def periodic_sync():
57
+ while _A:time.sleep(self.sync_interval);self.dispatch('periodic')
58
+ sync_thread=threading.Thread(target=periodic_sync,daemon=_A);sync_thread.start()
59
+ def add_function_call(self,record):
60
+ if not isinstance(record,dict):record={_K:str(record)}
61
+ with self.lock:self.buffer[_B].append(record);self._check_triggers(_B)
62
+ def add_api_request(self,record):
63
+ if not isinstance(record,dict):record={_K:str(record)}
64
+ with self.lock:self.buffer[_D].append(record);self._check_triggers(_D)
65
+ def add_rate_limit(self,record):
66
+ if not isinstance(record,dict):record={_K:str(record)}
67
+ with self.lock:self.buffer[_C].append(record);self._check_triggers(_C)
68
+ def _check_triggers(self,record_type:str):
69
+ current_time=time.time();should_trigger=_G;trigger_reason=_F;total_records=sum(len(buffer)for buffer in self.buffer.values())
70
+ if total_records>=self.buffer_size:should_trigger=_A;trigger_reason='buffer_full'
71
+ elif record_type==_C and self.buffer[_C]and any(item.get(_R)for item in self.buffer[_C]if isinstance(item,dict)):should_trigger=_A;trigger_reason='rate_limit_exceeded'
72
+ elif record_type==_B and self.buffer[_B]and any(not item.get(_S)for item in self.buffer[_B]if isinstance(item,dict)):should_trigger=_A;trigger_reason='function_error'
73
+ else:
74
+ time_factor=min(1.,(current_time-self.last_sync_time)/(self.sync_interval/2))
75
+ if random.random()<.05*time_factor:should_trigger=_A;trigger_reason='random_time_weighted'
76
+ if should_trigger:threading.Thread(target=self.dispatch,args=(trigger_reason,),daemon=_A).start()
77
+ def queue(self,package,priority=_F):
78
+ H='packages';G='commercial';F='system_info';E='rate_limit';D='free';C='system';B='type';A='segment'
79
+ try:from vnai.scope.promo import ContentManager;is_paid=ContentManager().is_paid_user;segment_val='paid'if is_paid else D
80
+ except Exception:segment_val=D
81
+ def ensure_segment(d):
82
+ if not isinstance(d,dict):return d
83
+ d=dict(d)
84
+ if A not in d:d[A]=segment_val
85
+ return d
86
+ if isinstance(package,dict)and A not in package:package[A]=segment_val
87
+ if isinstance(package,dict)and isinstance(package.get(_H),dict):
88
+ if A not in package[_H]:package[_H][A]=segment_val
89
+ if not package:return _G
90
+ if not isinstance(package,dict):self.add_function_call(ensure_segment({'message':str(package)}));return _A
91
+ if _E not in package:package[_E]=datetime.now().isoformat()
92
+ if B in package:
93
+ package_type=package[B];data=package.get(_H,{})
94
+ if isinstance(data,dict)and C in data:
95
+ machine_id=data[C].get(_I);data.pop(C)
96
+ if machine_id:data[_I]=machine_id
97
+ if package_type==_P:self.add_function_call(ensure_segment(data))
98
+ elif package_type=='api_request':self.add_api_request(ensure_segment(data))
99
+ elif package_type==E:self.add_rate_limit(ensure_segment(data))
100
+ elif package_type==F:self.add_function_call({B:F,G:data.get(G),H:data.get(H),_E:package.get(_E)})
101
+ elif package_type=='metrics':
102
+ metrics_data=data
103
+ for(metric_type,metrics_list)in metrics_data.items():
104
+ if isinstance(metrics_list,list):
105
+ if metric_type==_P:
106
+ for item in metrics_list:self.add_function_call(ensure_segment(item))
107
+ elif metric_type==E:
108
+ for item in metrics_list:self.add_rate_limit(ensure_segment(item))
109
+ elif metric_type=='request':
110
+ for item in metrics_list:self.add_api_request(ensure_segment(item))
111
+ elif isinstance(data,dict)and data is not package:self.add_function_call(ensure_segment(data))
112
+ else:self.add_function_call(ensure_segment(package))
113
+ else:self.add_function_call(ensure_segment(package))
114
+ if priority=='high':self.dispatch('high_priority')
115
+ return _A
116
+ def dispatch(self,reason=_T):
117
+ if not self.webhook_url:return _G
118
+ with self.lock:
119
+ if all(len(records)==0 for records in self.buffer.values()):return _G
120
+ data_to_send={_B:self.buffer[_B].copy(),_D:self.buffer[_D].copy(),_C:self.buffer[_C].copy()};self.buffer={_B:[],_D:[],_C:[]};self.last_sync_time=time.time();self.sync_count+=1;self._save_config()
121
+ try:from vnai.scope.profile import inspector;environment_info=inspector.examine();machine_id=environment_info.get(_I,self.machine_id)
122
+ except:environment_info={_I:self.machine_id};machine_id=self.machine_id
123
+ payload={'analytics_data':data_to_send,'metadata':{_E:datetime.now().isoformat(),_I:machine_id,_J:self.sync_count,'trigger_reason':reason,'environment':environment_info,'data_counts':{_B:len(data_to_send[_B]),_D:len(data_to_send[_D]),_C:len(data_to_send[_C])}}};success=self._send_data(payload)
124
+ if not success:
125
+ with self.lock:
126
+ self.failed_queue.append(payload)
127
+ if len(self.failed_queue)>10:self.failed_queue=self.failed_queue[-10:]
128
+ return success
129
+ def _send_data(self,payload):
130
+ if not self.webhook_url:return _G
131
+ try:response=requests.post(self.webhook_url,json=payload,timeout=5);return response.status_code==200
132
+ except:return _G
133
+ def retry_failed(self):
134
+ if not self.failed_queue:return 0
135
+ with self.lock:to_retry=self.failed_queue.copy();self.failed_queue=[]
136
+ success_count=0
137
+ for payload in to_retry:
138
+ if self._send_data(payload):success_count+=1
139
+ else:
140
+ with self.lock:self.failed_queue.append(payload)
141
+ return success_count
142
+ def configure(self,webhook_url):
143
+ with self.lock:self.webhook_url=webhook_url;self._save_config();return _A
144
+ conduit=Conduit()
145
+ def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
146
+ record={_P:function_name,_Q:source,_U:execution_time,_E:datetime.now().isoformat(),_S:success}
147
+ if error:record['error']=error
148
+ if args:
149
+ sanitized_args={}
150
+ if isinstance(args,dict):
151
+ for(key,value)in args.items():
152
+ if isinstance(value,(str,int,float,bool)):sanitized_args[key]=value
153
+ else:sanitized_args[key]=str(type(value))
154
+ else:sanitized_args={_K:str(args)}
155
+ record['args']=sanitized_args
156
+ conduit.add_function_call(record)
157
+ def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):record={_Q:source,'limit_type':limit_type,'limit_value':limit_value,'current_usage':current_usage,_R:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':current_usage/limit_value*100 if limit_value>0 else 0};conduit.add_rate_limit(record)
158
+ def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):record={'endpoint':endpoint,_Q:source,'method':method,'status_code':status_code,_U:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(record)
159
+ def configure(webhook_url):return conduit.configure(webhook_url)
160
+ def sync_now():return conduit.dispatch(_T)
161
161
  def retry_failed():return conduit.retry_failed()
vnai/scope/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- from vnai.scope.profile import inspector
2
- from vnai.scope.state import tracker,record
3
- from vnai.scope.promo import manager as content_manager
1
+ from vnai.scope.profile import inspector
2
+ from vnai.scope.state import tracker,record
3
+ from vnai.scope.promo import manager as content_manager
4
4
  from vnai.scope.promo import present as present_content