vnai 2.0.7__py3-none-any.whl → 2.0.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vnai/scope/state.py CHANGED
@@ -1,223 +1,74 @@
1
- # vnai/scope/state.py
2
- # System state tracking
3
-
4
- import time
5
- import threading
6
- import json
7
- import os
1
+ _L='minimal'
2
+ _K='warnings'
3
+ _J='api_requests'
4
+ _I='last_error_time'
5
+ _H='startup_time'
6
+ _G='standard'
7
+ _F='function_calls'
8
+ _E='peak_memory'
9
+ _D='errors'
10
+ _C=True
11
+ _B=None
12
+ _A='execution_times'
13
+ import time,threading,json,os
8
14
  from datetime import datetime
9
15
  from pathlib import Path
10
-
11
16
  class Tracker:
12
- """Tracks system state and performance metrics"""
13
-
14
- _instance = None
15
- _lock = threading.Lock()
16
-
17
- def __new__(cls):
18
- with cls._lock:
19
- if cls._instance is None:
20
- cls._instance = super(Tracker, cls).__new__(cls)
21
- cls._instance._initialize()
22
- return cls._instance
23
-
24
- def _initialize(self):
25
- """Initialize tracker"""
26
- self.metrics = {
27
- "startup_time": datetime.now().isoformat(),
28
- "function_calls": 0,
29
- "api_requests": 0,
30
- "errors": 0,
31
- "warnings": 0
32
- }
33
-
34
- self.performance_metrics = {
35
- "execution_times": [],
36
- "last_error_time": None,
37
- "peak_memory": 0
38
- }
39
-
40
- self.privacy_level = "standard"
41
-
42
- # Setup data directory
43
- self.home_dir = Path.home()
44
- self.project_dir = self.home_dir / ".vnstock"
45
- self.project_dir.mkdir(exist_ok=True)
46
- self.data_dir = self.project_dir / 'data'
47
- self.data_dir.mkdir(exist_ok=True)
48
- self.metrics_path = self.data_dir / "usage_metrics.json"
49
- self.privacy_config_path = self.project_dir / 'config' / "privacy.json"
50
-
51
- # Create config directory if it doesn't exist
52
- os.makedirs(os.path.dirname(self.privacy_config_path), exist_ok=True)
53
-
54
- # Load existing metrics
55
- self._load_metrics()
56
-
57
- # Load privacy settings
58
- self._load_privacy_settings()
59
-
60
- # Start background metrics collector
61
- self._start_background_collector()
62
-
63
- def _load_metrics(self):
64
- """Load metrics from file"""
65
- if self.metrics_path.exists():
66
- try:
67
- with open(self.metrics_path, 'r') as f:
68
- stored_metrics = json.load(f)
69
-
70
- # Update metrics with stored values
71
- for key, value in stored_metrics.items():
72
- if key in self.metrics:
73
- self.metrics[key] = value
74
- except:
75
- pass
76
-
77
- def _save_metrics(self):
78
- """Save metrics to file"""
79
- try:
80
- with open(self.metrics_path, 'w') as f:
81
- json.dump(self.metrics, f)
82
- except:
83
- pass
84
-
85
- def _load_privacy_settings(self):
86
- """Load privacy settings"""
87
- if self.privacy_config_path.exists():
88
- try:
89
- with open(self.privacy_config_path, 'r') as f:
90
- settings = json.load(f)
91
- self.privacy_level = settings.get("level", "standard")
92
- except:
93
- pass
94
-
95
- def setup_privacy(self, level=None):
96
- """Configure privacy level for data collection"""
97
- privacy_levels = {
98
- "minimal": "Essential system data only",
99
- "standard": "Performance metrics and errors",
100
- "enhanced": "Detailed operation analytics"
101
- }
102
-
103
- if level is None:
104
- # Default level
105
- level = "standard"
106
-
107
- if level not in privacy_levels:
108
- raise ValueError(f"Invalid privacy level: {level}. Choose from {', '.join(privacy_levels.keys())}")
109
-
110
- # Store preference
111
- self.privacy_level = level
112
-
113
- # Store in configuration file
114
- with open(self.privacy_config_path, "w") as f:
115
- json.dump({"level": level}, f)
116
-
117
- return level
118
-
119
- def get_privacy_level(self):
120
- """Get current privacy level"""
121
- return self.privacy_level
122
-
123
- def _start_background_collector(self):
124
- """Start background metrics collection"""
125
- def collect_metrics():
126
- while True:
127
- try:
128
- import psutil
129
-
130
- # Update peak memory
131
- current_process = psutil.Process()
132
- memory_info = current_process.memory_info()
133
- memory_usage = memory_info.rss / (1024 * 1024) # MB
134
-
135
- if memory_usage > self.performance_metrics["peak_memory"]:
136
- self.performance_metrics["peak_memory"] = memory_usage
137
-
138
- # Save metrics periodically
139
- self._save_metrics()
140
-
141
- except:
142
- pass
143
-
144
- time.sleep(300) # Run every 5 minutes
145
-
146
- # Start thread
147
- thread = threading.Thread(target=collect_metrics, daemon=True)
148
- thread.start()
149
-
150
- def record(self, event_type, data=None):
151
- """Record an event"""
152
- # Check privacy level
153
- if self.privacy_level == "minimal" and event_type != "errors":
154
- # In minimal mode, only track errors
155
- return True
156
-
157
- # Update counts
158
- if event_type in self.metrics:
159
- self.metrics[event_type] += 1
160
- else:
161
- self.metrics[event_type] = 1
162
-
163
- # Special handling for errors
164
- if event_type == "errors":
165
- self.performance_metrics["last_error_time"] = datetime.now().isoformat()
166
-
167
- # Special handling for function calls with timing data
168
- if event_type == "function_calls" and data and "execution_time" in data:
169
- # Keep up to 100 latest execution times
170
- self.performance_metrics["execution_times"].append(data["execution_time"])
171
- if len(self.performance_metrics["execution_times"]) > 100:
172
- self.performance_metrics["execution_times"] = self.performance_metrics["execution_times"][-100:]
173
-
174
- # Save if metrics change significantly
175
- if self.metrics["function_calls"] % 100 == 0 or event_type == "errors":
176
- self._save_metrics()
177
-
178
- return True
179
-
180
- def get_metrics(self):
181
- """Get current metrics"""
182
- # Calculate derived metrics
183
- avg_execution_time = 0
184
- if self.performance_metrics["execution_times"]:
185
- avg_execution_time = sum(self.performance_metrics["execution_times"]) / len(self.performance_metrics["execution_times"])
186
-
187
- # Add derived metrics to output
188
- output = self.metrics.copy()
189
- output.update({
190
- "avg_execution_time": avg_execution_time,
191
- "peak_memory_mb": self.performance_metrics["peak_memory"],
192
- "uptime": (datetime.now() - datetime.fromisoformat(self.metrics["startup_time"])).total_seconds(),
193
- "privacy_level": self.privacy_level
194
- })
195
-
196
- return output
197
-
198
- def reset(self):
199
- """Reset metrics"""
200
- self.metrics = {
201
- "startup_time": datetime.now().isoformat(),
202
- "function_calls": 0,
203
- "api_requests": 0,
204
- "errors": 0,
205
- "warnings": 0
206
- }
207
-
208
- self.performance_metrics = {
209
- "execution_times": [],
210
- "last_error_time": None,
211
- "peak_memory": 0
212
- }
213
-
214
- self._save_metrics()
215
- return True
216
-
217
- # Create singleton instance
218
- tracker = Tracker()
219
-
220
-
221
- def record(event_type, data=None):
222
- """Record an event"""
223
- return tracker.record(event_type, data)
17
+ _instance=_B;_lock=threading.Lock()
18
+ def __new__(cls):
19
+ with cls._lock:
20
+ if cls._instance is _B:cls._instance=super(Tracker,cls).__new__(cls);cls._instance._initialize()
21
+ return cls._instance
22
+ def _initialize(self):self.metrics={_H:datetime.now().isoformat(),_F:0,_J:0,_D:0,_K:0};self.performance_metrics={_A:[],_I:_B,_E:0};self.privacy_level=_G;self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_C);self.data_dir=self.project_dir/'data';self.data_dir.mkdir(exist_ok=_C);self.metrics_path=self.data_dir/'usage_metrics.json';self.privacy_config_path=self.project_dir/'config'/'privacy.json';os.makedirs(os.path.dirname(self.privacy_config_path),exist_ok=_C);self._load_metrics();self._load_privacy_settings();self._start_background_collector()
23
+ def _load_metrics(self):
24
+ if self.metrics_path.exists():
25
+ try:
26
+ with open(self.metrics_path,'r')as f:stored_metrics=json.load(f)
27
+ for(key,value)in stored_metrics.items():
28
+ if key in self.metrics:self.metrics[key]=value
29
+ except:pass
30
+ def _save_metrics(self):
31
+ try:
32
+ with open(self.metrics_path,'w')as f:json.dump(self.metrics,f)
33
+ except:pass
34
+ def _load_privacy_settings(self):
35
+ if self.privacy_config_path.exists():
36
+ try:
37
+ with open(self.privacy_config_path,'r')as f:settings=json.load(f);self.privacy_level=settings.get('level',_G)
38
+ except:pass
39
+ def setup_privacy(self,level=_B):
40
+ privacy_levels={_L:'Essential system data only',_G:'Performance metrics and errors','enhanced':'Detailed operation analytics'}
41
+ if level is _B:level=_G
42
+ if level not in privacy_levels:raise ValueError(f"Invalid privacy level: {level}. Choose from {', '.join(privacy_levels.keys())}")
43
+ self.privacy_level=level
44
+ with open(self.privacy_config_path,'w')as f:json.dump({'level':level},f)
45
+ return level
46
+ def get_privacy_level(self):return self.privacy_level
47
+ def _start_background_collector(self):
48
+ def collect_metrics():
49
+ while _C:
50
+ try:
51
+ import psutil;current_process=psutil.Process();memory_info=current_process.memory_info();memory_usage=memory_info.rss/1048576
52
+ if memory_usage>self.performance_metrics[_E]:self.performance_metrics[_E]=memory_usage
53
+ self._save_metrics()
54
+ except:pass
55
+ time.sleep(300)
56
+ thread=threading.Thread(target=collect_metrics,daemon=_C);thread.start()
57
+ def record(self,event_type,data=_B):
58
+ A='execution_time'
59
+ if self.privacy_level==_L and event_type!=_D:return _C
60
+ if event_type in self.metrics:self.metrics[event_type]+=1
61
+ else:self.metrics[event_type]=1
62
+ if event_type==_D:self.performance_metrics[_I]=datetime.now().isoformat()
63
+ if event_type==_F and data and A in data:
64
+ self.performance_metrics[_A].append(data[A])
65
+ if len(self.performance_metrics[_A])>100:self.performance_metrics[_A]=self.performance_metrics[_A][-100:]
66
+ if self.metrics[_F]%100==0 or event_type==_D:self._save_metrics()
67
+ return _C
68
+ def get_metrics(self):
69
+ avg_execution_time=0
70
+ if self.performance_metrics[_A]:avg_execution_time=sum(self.performance_metrics[_A])/len(self.performance_metrics[_A])
71
+ output=self.metrics.copy();output.update({'avg_execution_time':avg_execution_time,'peak_memory_mb':self.performance_metrics[_E],'uptime':(datetime.now()-datetime.fromisoformat(self.metrics[_H])).total_seconds(),'privacy_level':self.privacy_level});return output
72
+ def reset(self):self.metrics={_H:datetime.now().isoformat(),_F:0,_J:0,_D:0,_K:0};self.performance_metrics={_A:[],_I:_B,_E:0};self._save_metrics();return _C
73
+ tracker=Tracker()
74
+ def record(event_type,data=_B):return tracker.record(event_type,data)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: vnai
3
- Version: 2.0.7
3
+ Version: 2.0.9
4
4
  Summary: System optimization and resource management toolkit
5
5
  Author-email: Vnstock HQ <support@vnstocks.com>
6
6
  License: MIT
@@ -0,0 +1,16 @@
1
+ vnai/__init__.py,sha256=CMpislNEQ5QTJkbY4pcI2or65l7kYzWdgDs7_RifvGA,5277
2
+ vnai/beam/__init__.py,sha256=MG_4FkhQZyuKeaoQQh-KaULhxcGu370lGR6FVV5blU4,129
3
+ vnai/beam/metrics.py,sha256=OoLhL4KWzuCo-bi-4hS62lqE5g75UPS-h_wkraJUAdg,3730
4
+ vnai/beam/pulse.py,sha256=jatge9Di-g_o1TjM3yuq8cWtOROs2GHUlLkExDhSM90,1885
5
+ vnai/beam/quota.py,sha256=Ix5PoRMr2-2Y-y5QMn_TmE3ECjb_JVyNYyoQ5qGttHE,10233
6
+ vnai/flow/__init__.py,sha256=BURTo8cXicmqqTbeB0qfXwVole0oGDVp_UxRSeh4qfA,80
7
+ vnai/flow/queue.py,sha256=2Eb1kyC7QhRp-uDdya1QMUWvtD21cv5vncqHzTvAvMo,2246
8
+ vnai/flow/relay.py,sha256=UNFxt2W79XNkgHeFCxkeYNVym-x-8GuI0adSDGUbdyY,8952
9
+ vnai/scope/__init__.py,sha256=o7N7JjgSqIfQeDojgnxzV9gthEWL3YxxljnvRO9AXkQ,196
10
+ vnai/scope/profile.py,sha256=BHgX2yUQOMoJeUp_AaUWuc635bT1AJfw-FShNVRPikw,14985
11
+ vnai/scope/promo.py,sha256=Ts8z04npfMl99F-xiSB6_kYBiYpK0EYO5AjX7-CGmJI,12893
12
+ vnai/scope/state.py,sha256=IoVCPrDmZX822tS9Pk_JQvXpKcN8WogA1yZGa1d476Q,3841
13
+ vnai-2.0.9.dist-info/METADATA,sha256=xjW_MGzoS3b1cqzpF6zx14N2sEGcf_KvsYe4ao6EfFQ,666
14
+ vnai-2.0.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
+ vnai-2.0.9.dist-info/top_level.txt,sha256=4zI0qZHePCwvgSqXl4420sBcd0VzZn4MEcRsAIFae3k,5
16
+ vnai-2.0.9.dist-info/RECORD,,
@@ -1,16 +0,0 @@
1
- vnai/__init__.py,sha256=t27DlC7AGj-c4MrunqFne0bohTHE4NfZQL_nEsvWmlc,9106
2
- vnai/beam/__init__.py,sha256=xKb_iu9aAPXCulI7dENrvqVIhelSD1mIqKE9Go3GAHw,200
3
- vnai/beam/metrics.py,sha256=Yjht8nMLxm0JaRSVcHUwHyPkfWReIzgD5uuaXAFNjlE,7472
4
- vnai/beam/pulse.py,sha256=jp1YwjLaMhne2nYhM5PofveDsdrSp2YtewQ2jjE78Is,3470
5
- vnai/beam/quota.py,sha256=Ob_IoVpDKL6IdxxivkU1Z5x2nvIf-X1DQeQXdCiRUiU,21424
6
- vnai/flow/__init__.py,sha256=K3OeabzAWGrdPgTAOlDqrJh2y9aQW2pgLZg8tblN3ho,147
7
- vnai/flow/queue.py,sha256=b9YKUbiXDZRC3fVgEnA77EO0EMXAi8eCoBkHnAUI5Sc,4162
8
- vnai/flow/relay.py,sha256=XA4dognPrZ7IQbrgckeEjFw80IgBoK7i8LRmd1A4vR8,17058
9
- vnai/scope/__init__.py,sha256=overJZ_UiEfBRNcSieE1GPU_9X3oS4C5l6JeBaFFVxk,267
10
- vnai/scope/profile.py,sha256=6LL7Djke9F1HVA9eEExud2jZ5yGUfy9_NYt68nIj2-8,30737
11
- vnai/scope/promo.py,sha256=eiLYSo5UTKt4W1tz4NuMFWKHIxEIpq4yrueFC_Y8keQ,19054
12
- vnai/scope/state.py,sha256=LlcZNKBy2mcAnD765BO2Tlv3Zzbak2TOEz4RUPMCFZ8,7490
13
- vnai-2.0.7.dist-info/METADATA,sha256=zuMT82A8fd1MGEE__nwuGIeN_XWDzbMMEj8EEy4oGTM,666
14
- vnai-2.0.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
15
- vnai-2.0.7.dist-info/top_level.txt,sha256=4zI0qZHePCwvgSqXl4420sBcd0VzZn4MEcRsAIFae3k,5
16
- vnai-2.0.7.dist-info/RECORD,,
File without changes