vnai 0.1.4__py3-none-any.whl → 2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +79 -319
- vnai/beam/__init__.py +3 -0
- vnai/beam/metrics.py +59 -0
- vnai/beam/pulse.py +31 -0
- vnai/beam/quota.py +37 -0
- vnai/flow/__init__.py +2 -0
- vnai/flow/queue.py +58 -0
- vnai/flow/relay.py +152 -0
- vnai/scope/__init__.py +4 -0
- vnai/scope/profile.py +223 -0
- vnai/scope/promo.py +56 -0
- vnai/scope/state.py +74 -0
- vnai-2.0.dist-info/METADATA +30 -0
- vnai-2.0.dist-info/RECORD +16 -0
- {vnai-0.1.4.dist-info → vnai-2.0.dist-info}/WHEEL +1 -1
- vnai-0.1.4.dist-info/METADATA +0 -19
- vnai-0.1.4.dist-info/RECORD +0 -5
- {vnai-0.1.4.dist-info → vnai-2.0.dist-info}/top_level.txt +0 -0
vnai/__init__.py
CHANGED
@@ -1,319 +1,79 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
elif 'GITPOD_WORKSPACE_CLUSTER_HOST' in os.environ:
|
81
|
-
hosting_service = "Gitpod"
|
82
|
-
elif 'REPLIT_USER' in os.environ:
|
83
|
-
hosting_service = "Replit"
|
84
|
-
elif 'KAGGLE_CONTAINER_NAME' in os.environ:
|
85
|
-
hosting_service = "Kaggle"
|
86
|
-
elif '.hf.space' in os.environ['SPACE_HOST']:
|
87
|
-
hosting_service = "Hugging Face Spaces"
|
88
|
-
except:
|
89
|
-
hosting_service = "Local or Unknown"
|
90
|
-
|
91
|
-
# System information
|
92
|
-
os_info = platform.uname()
|
93
|
-
|
94
|
-
# CPU information
|
95
|
-
cpu_arch = platform.processor()
|
96
|
-
cpu_logical_cores = psutil.cpu_count(logical=True)
|
97
|
-
cpu_cores = psutil.cpu_count(logical=False)
|
98
|
-
|
99
|
-
# Memory information
|
100
|
-
ram_total = psutil.virtual_memory().total / (1024**3) # GB
|
101
|
-
ram_available = psutil.virtual_memory().available / (1024**3) # GB
|
102
|
-
|
103
|
-
try:
|
104
|
-
hostname = socket.gethostname()
|
105
|
-
IPAddr = socket.gethostbyname(hostname)
|
106
|
-
except socket.gaierror:
|
107
|
-
hostname = "unknown"
|
108
|
-
IPAddr = "127.0.0.1" # Fallback to localhost
|
109
|
-
|
110
|
-
mac = ':'.join(['{:02x}'.format((uuid.getnode() >> elements) & 0xff) for elements in range(0, 2 * 6, 2)])
|
111
|
-
|
112
|
-
# Combine information into a dictionary
|
113
|
-
info = {
|
114
|
-
"uuid": machine_id,
|
115
|
-
"environment": environment,
|
116
|
-
"hosting_service": hosting_service,
|
117
|
-
"python_version": platform.python_version(),
|
118
|
-
"os_name": os_info.system,
|
119
|
-
"os_version": os_info.version,
|
120
|
-
"machine": os_info.machine,
|
121
|
-
"cpu_model": cpu_arch,
|
122
|
-
"cpu_cores": cpu_cores,
|
123
|
-
"cpu_logical_cores": cpu_logical_cores,
|
124
|
-
"ram_total": round(ram_total, 1),
|
125
|
-
"ram_available": round(ram_available, 1),
|
126
|
-
"local_ip": IPAddr,
|
127
|
-
"mac_address": mac,
|
128
|
-
}
|
129
|
-
|
130
|
-
return info
|
131
|
-
|
132
|
-
def show_terms_and_conditions(self):
|
133
|
-
"""
|
134
|
-
Displays terms and conditions and asks for acceptance.
|
135
|
-
"""
|
136
|
-
print(self.terms_and_conditions)
|
137
|
-
|
138
|
-
# check if os.environ[TC_VAR] exist and equal to tôi đồng ý
|
139
|
-
if TC_VAR in os.environ and os.environ[TC_VAR] == TC_VAL:
|
140
|
-
response = TC_VAL
|
141
|
-
else:
|
142
|
-
response = TC_VAL
|
143
|
-
os.environ[TC_VAR] = TC_VAL
|
144
|
-
|
145
|
-
from datetime import datetime
|
146
|
-
# get now time in string
|
147
|
-
now = datetime.now()
|
148
|
-
HARDWARE = self.system_info()
|
149
|
-
# VERSION = pkg_resources.get_distribution('vnstock').version
|
150
|
-
|
151
|
-
VERSION = None
|
152
|
-
try:
|
153
|
-
VERSION = importlib.metadata.version('vnstock')
|
154
|
-
except importlib.metadata.PackageNotFoundError:
|
155
|
-
# print("Package 'vnstock' not found")
|
156
|
-
pass
|
157
|
-
|
158
|
-
# parse HARDWARE to string to store in the file
|
159
|
-
signed_aggreement = f"MÔ TẢ:\nNgười dùng có mã nhận dạng {HARDWARE['uuid']} đã chấp nhận điều khoản & điều kiện sử dụng Vnstock lúc {now}\n---\n\nTHÔNG TIN THIẾT BỊ: {str(HARDWARE)}\n\nĐính kèm bản sao nội dung bạn đã đọc, hiểu rõ và đồng ý dưới đây:\n{self.terms_and_conditions}"
|
160
|
-
|
161
|
-
# Store the acceptance
|
162
|
-
with open(self.terms_file_path, "w", encoding="utf-8") as f:
|
163
|
-
f.write(signed_aggreement)
|
164
|
-
return True
|
165
|
-
|
166
|
-
def log_analytics_data(self):
|
167
|
-
"""
|
168
|
-
Sends analytics data to a webhook.
|
169
|
-
"""
|
170
|
-
HARDWARE = self.system_info()
|
171
|
-
EP = 'gAAAAABmOPNX4DJAsImlkzvtcyezBxr4UcK_HpCOgz-GOF9yBDP99tWNFYM_ZjeC22kNqmX3urZa467BC1D2fPLJrUkp6rQizYEMK4m196ZlOzUhwCbfjdvURXesL3LC7DofOgwWjNyltPQ8AnPyB4YUMnnAwnFooQ=='
|
172
|
-
TGE = self.cph.decrypt(self.target).decode('utf-8')
|
173
|
-
WH = f"{self.cph.decrypt(((self.RH+EP+self.RH)[30:-30]).encode()).decode('utf-8')}{TGE}"
|
174
|
-
|
175
|
-
data = {
|
176
|
-
"systems": HARDWARE,
|
177
|
-
"accepted_agreement": True,
|
178
|
-
"installed_packages": self.packages_installed(),
|
179
|
-
}
|
180
|
-
|
181
|
-
# save data to a json file in id folder
|
182
|
-
with open(self.env_config, "w", encoding="utf-8") as f:
|
183
|
-
f.write(json.dumps(data, indent=4))
|
184
|
-
|
185
|
-
try:
|
186
|
-
response = requests.post(WH, json=data)
|
187
|
-
except:
|
188
|
-
raise SystemExit("Không thể gửi dữ liệu phân tích. Vui lòng kiểm tra kết nối mạng và thử lại sau.")
|
189
|
-
|
190
|
-
def check_terms_accepted(self):
|
191
|
-
"""
|
192
|
-
Checks if terms and conditions are accepted.
|
193
|
-
"""
|
194
|
-
if not self.env_config.exists() or not self.terms_file_path.exists():
|
195
|
-
# If not, ask for acceptance
|
196
|
-
accepted = self.show_terms_and_conditions()
|
197
|
-
if not accepted:
|
198
|
-
raise SystemExit("Điều khoản và điều kiện không được chấp nhận. Không thể tiếp tục.")
|
199
|
-
else:
|
200
|
-
self.log_analytics_data()
|
201
|
-
|
202
|
-
def packages_installed(self):
|
203
|
-
"""
|
204
|
-
Checks installed packages and returns a dictionary.
|
205
|
-
"""
|
206
|
-
# Define package mapping
|
207
|
-
package_mapping = {
|
208
|
-
"vnstock_family": [
|
209
|
-
"vnstock",
|
210
|
-
"vnstock3",
|
211
|
-
"vnstock_ezchart",
|
212
|
-
"vnstock_data_pro"
|
213
|
-
"vnstock_market_data_pipeline",
|
214
|
-
"vnstock_ta",
|
215
|
-
"vnii",
|
216
|
-
"vnai",
|
217
|
-
],
|
218
|
-
"analytics": [
|
219
|
-
"openbb",
|
220
|
-
"pandas_ta"
|
221
|
-
],
|
222
|
-
"static_charts": [
|
223
|
-
"matplotlib",
|
224
|
-
"seaborn",
|
225
|
-
"altair"
|
226
|
-
],
|
227
|
-
"dashboard": [
|
228
|
-
"streamlit",
|
229
|
-
"voila",
|
230
|
-
"panel",
|
231
|
-
"shiny",
|
232
|
-
"dash",
|
233
|
-
],
|
234
|
-
"interactive_charts": [
|
235
|
-
"mplfinance",
|
236
|
-
"plotly",
|
237
|
-
"plotline",
|
238
|
-
"bokeh",
|
239
|
-
"pyecharts",
|
240
|
-
"highcharts-core",
|
241
|
-
"highcharts-stock",
|
242
|
-
"mplchart",
|
243
|
-
],
|
244
|
-
"datafeed": [
|
245
|
-
"yfinance",
|
246
|
-
"alpha_vantage",
|
247
|
-
"pandas-datareader",
|
248
|
-
"investpy",
|
249
|
-
],
|
250
|
-
"official_api": [
|
251
|
-
"ssi-fc-data",
|
252
|
-
"ssi-fctrading"
|
253
|
-
],
|
254
|
-
"risk_return": [
|
255
|
-
"pyfolio",
|
256
|
-
"empyrical",
|
257
|
-
"quantstats",
|
258
|
-
"financetoolkit",
|
259
|
-
],
|
260
|
-
"machine_learning": [
|
261
|
-
"scipy",
|
262
|
-
"sklearn",
|
263
|
-
"statsmodels",
|
264
|
-
"pytorch",
|
265
|
-
"tensorflow",
|
266
|
-
"keras",
|
267
|
-
"xgboost"
|
268
|
-
],
|
269
|
-
"indicators": [
|
270
|
-
"stochastic",
|
271
|
-
"talib",
|
272
|
-
"tqdm",
|
273
|
-
"finta",
|
274
|
-
"financetoolkit",
|
275
|
-
"tulipindicators"
|
276
|
-
],
|
277
|
-
"backtesting": [
|
278
|
-
"vectorbt",
|
279
|
-
"backtesting",
|
280
|
-
"bt",
|
281
|
-
"zipline",
|
282
|
-
"pyalgotrade",
|
283
|
-
"backtrader",
|
284
|
-
"pybacktest",
|
285
|
-
"fastquant",
|
286
|
-
"lean",
|
287
|
-
"ta",
|
288
|
-
"finmarketpy",
|
289
|
-
"qstrader",
|
290
|
-
],
|
291
|
-
"server": [
|
292
|
-
"fastapi",
|
293
|
-
"flask",
|
294
|
-
"uvicorn",
|
295
|
-
"gunicorn"
|
296
|
-
],
|
297
|
-
"framework": [
|
298
|
-
"lightgbm",
|
299
|
-
"catboost",
|
300
|
-
"django",
|
301
|
-
]
|
302
|
-
}
|
303
|
-
|
304
|
-
installed_packages = {}
|
305
|
-
|
306
|
-
for category, packages in package_mapping.items():
|
307
|
-
installed_packages[category] = []
|
308
|
-
for pkg in packages:
|
309
|
-
try:
|
310
|
-
version = importlib.metadata.version(pkg)
|
311
|
-
installed_packages[category].append((pkg, version))
|
312
|
-
except importlib.metadata.PackageNotFoundError:
|
313
|
-
pass
|
314
|
-
|
315
|
-
return installed_packages
|
316
|
-
|
317
|
-
def tc_init():
|
318
|
-
vnstock_initializer = VnstockInitializer(TG)
|
319
|
-
vnstock_initializer.check_terms_accepted()
|
1
|
+
_K='standard'
|
2
|
+
_J='accepted_agreement'
|
3
|
+
_I='environment.json'
|
4
|
+
_H='terms_agreement.txt'
|
5
|
+
_G='timestamp'
|
6
|
+
_F=False
|
7
|
+
_E='id'
|
8
|
+
_D='.vnstock'
|
9
|
+
_C='machine_id'
|
10
|
+
_B=None
|
11
|
+
_A=True
|
12
|
+
import os,pathlib,json,time,threading,functools
|
13
|
+
from datetime import datetime
|
14
|
+
from vnai.beam.quota import guardian,optimize
|
15
|
+
from vnai.beam.metrics import collector,capture
|
16
|
+
from vnai.beam.pulse import monitor
|
17
|
+
from vnai.flow.relay import conduit,configure
|
18
|
+
from vnai.flow.queue import buffer
|
19
|
+
from vnai.scope.profile import inspector
|
20
|
+
from vnai.scope.state import tracker,record
|
21
|
+
from vnai.scope.promo import present
|
22
|
+
TC_VAR='ACCEPT_TC'
|
23
|
+
TC_VAL='tôi đồng ý'
|
24
|
+
TC_PATH=pathlib.Path.home()/_D/_E/_H
|
25
|
+
TERMS_AND_CONDITIONS='\nKhi tiếp tục sử dụng Vnstock, bạn xác nhận rằng bạn đã đọc, hiểu và đồng ý với Chính sách quyền riêng tư và Điều khoản, điều kiện về giấy phép sử dụng Vnstock.\n\nChi tiết:\n- Giấy phép sử dụng phần mềm: https://vnstocks.com/docs/tai-lieu/giay-phep-su-dung\n- Chính sách quyền riêng tư: https://vnstocks.com/docs/tai-lieu/chinh-sach-quyen-rieng-tu\n'
|
26
|
+
class Core:
|
27
|
+
def __init__(A):A.initialized=_F;A.webhook_url=_B;A.init_time=datetime.now().isoformat();A.home_dir=pathlib.Path.home();A.project_dir=A.home_dir/_D;A.id_dir=A.project_dir/_E;A.terms_file_path=TC_PATH;A.system_info=_B;A.project_dir.mkdir(exist_ok=_A);A.id_dir.mkdir(exist_ok=_A);A.initialize()
|
28
|
+
def initialize(A,webhook_url=_B):
|
29
|
+
C=webhook_url
|
30
|
+
if A.initialized:return _A
|
31
|
+
if not A._check_terms():A._accept_terms()
|
32
|
+
from vnai.scope.profile import inspector as B;B.setup_vnstock_environment();present()
|
33
|
+
if C:A.webhook_url=C;configure(C)
|
34
|
+
record('initialization',{_G:datetime.now().isoformat()});A.system_info=B.examine();conduit.queue({'type':'system_info','data':{'commercial':B.detect_commercial_usage(),'packages':B.scan_packages()}},priority='high');A.initialized=_A;return _A
|
35
|
+
def _check_terms(A):return os.path.exists(A.terms_file_path)
|
36
|
+
def _accept_terms(C):
|
37
|
+
A=inspector.examine()
|
38
|
+
if TC_VAR in os.environ and os.environ[TC_VAR]==TC_VAL:E=TC_VAL
|
39
|
+
else:E=TC_VAL;os.environ[TC_VAR]=TC_VAL
|
40
|
+
D=datetime.now();F=f"""Người dùng có mã nhận dạng {A[_C]} đã chấp nhận điều khoản & điều kiện sử dụng Vnstock lúc {D}
|
41
|
+
---
|
42
|
+
|
43
|
+
THÔNG TIN THIẾT BỊ: {json.dumps(A,indent=2)}
|
44
|
+
|
45
|
+
Đính kèm bản sao nội dung bạn đã đọc, hiểu rõ và đồng ý dưới đây:
|
46
|
+
{TERMS_AND_CONDITIONS}"""
|
47
|
+
with open(C.terms_file_path,'w',encoding='utf-8')as B:B.write(F)
|
48
|
+
G=C.id_dir/_I;H={_J:_A,_G:D.isoformat(),_C:A[_C]}
|
49
|
+
with open(G,'w')as B:json.dump(H,B)
|
50
|
+
return _A
|
51
|
+
def status(A):return{'initialized':A.initialized,'health':monitor.report(),'metrics':tracker.get_metrics()}
|
52
|
+
def configure_privacy(B,level=_K):from vnai.scope.state import tracker as A;return A.setup_privacy(level)
|
53
|
+
core=Core()
|
54
|
+
def tc_init(webhook_url=_B):return core.initialize(webhook_url)
|
55
|
+
def setup(webhook_url=_B):return core.initialize(webhook_url)
|
56
|
+
def optimize_execution(resource_type='default'):return optimize(resource_type)
|
57
|
+
def measure_performance(module_type='function'):return capture(module_type)
|
58
|
+
def accept_license_terms(terms_text=_B):
|
59
|
+
A=terms_text
|
60
|
+
if A is _B:A=TERMS_AND_CONDITIONS
|
61
|
+
D=inspector.examine();C=pathlib.Path.home()/_D/_E/_H;os.makedirs(os.path.dirname(C),exist_ok=_A)
|
62
|
+
with open(C,'w',encoding='utf-8')as B:B.write(f"Terms accepted at {datetime.now().isoformat()}\n");B.write(f"System: {json.dumps(D)}\n\n");B.write(A)
|
63
|
+
return _A
|
64
|
+
def accept_vnstock_terms():
|
65
|
+
from vnai.scope.profile import inspector as C;D=C.examine();E=pathlib.Path.home();A=E/_D;A.mkdir(exist_ok=_A);B=A/_E;B.mkdir(exist_ok=_A);F=B/_I;G={_J:_A,_G:datetime.now().isoformat(),_C:D[_C]}
|
66
|
+
try:
|
67
|
+
with open(F,'w')as H:json.dump(G,H)
|
68
|
+
print('Vnstock terms accepted successfully.');return _A
|
69
|
+
except Exception as I:print(f"Error accepting terms: {I}");return _F
|
70
|
+
def setup_for_colab():from vnai.scope.profile import inspector as A;A.detect_colab_with_delayed_auth(immediate=_A);A.setup_vnstock_environment();return'Environment set up for Google Colab'
|
71
|
+
def display_content():return present()
|
72
|
+
def configure_privacy(level=_K):from vnai.scope.state import tracker as A;return A.setup_privacy(level)
|
73
|
+
def check_commercial_usage():from vnai.scope.profile import inspector as A;return A.detect_commercial_usage()
|
74
|
+
def authenticate_for_persistence():from vnai.scope.profile import inspector as A;return A.get_or_create_user_id()
|
75
|
+
def configure_webhook(webhook_id='80b8832b694a75c8ddc811ac7882a3de'):
|
76
|
+
A=webhook_id
|
77
|
+
if not A:return _F
|
78
|
+
from vnai.flow.relay import configure as B;C=f"https://botbuilder.larksuite.com/api/trigger-webhook/{A}";return B(C)
|
79
|
+
configure_webhook()
|
vnai/beam/__init__.py
ADDED
vnai/beam/metrics.py
ADDED
@@ -0,0 +1,59 @@
|
|
1
|
+
_K='success'
|
2
|
+
_J='buffer_size'
|
3
|
+
_I='request'
|
4
|
+
_H='rate_limit'
|
5
|
+
_G='execution_time'
|
6
|
+
_F='timestamp'
|
7
|
+
_E=False
|
8
|
+
_D=True
|
9
|
+
_C='error'
|
10
|
+
_B=None
|
11
|
+
_A='function'
|
12
|
+
import sys,time,threading
|
13
|
+
from datetime import datetime
|
14
|
+
class Collector:
|
15
|
+
_instance=_B;_lock=threading.Lock()
|
16
|
+
def __new__(A):
|
17
|
+
with A._lock:
|
18
|
+
if A._instance is _B:A._instance=super(Collector,A).__new__(A);A._instance._initialize()
|
19
|
+
return A._instance
|
20
|
+
def _initialize(A):A.metrics={_A:[],_H:[],_I:[],_C:[]};A.thresholds={_J:50,'error_threshold':.1,'performance_threshold':5.};A.function_count=0;A.colab_auth_triggered=_E
|
21
|
+
def record(A,metric_type,data,priority=_B):
|
22
|
+
D='system';C=metric_type;B=data
|
23
|
+
if not isinstance(B,dict):B={'value':str(B)}
|
24
|
+
if _F not in B:B[_F]=datetime.now().isoformat()
|
25
|
+
if C!='system_info'and isinstance(B,dict):
|
26
|
+
if D in B:del B[D]
|
27
|
+
from vnai.scope.profile import inspector as E;B['machine_id']=E.fingerprint()
|
28
|
+
if C in A.metrics:A.metrics[C].append(B)
|
29
|
+
else:A.metrics[_A].append(B)
|
30
|
+
if C==_A:
|
31
|
+
A.function_count+=1
|
32
|
+
if A.function_count>10 and not A.colab_auth_triggered and'google.colab'in sys.modules:A.colab_auth_triggered=_D;threading.Thread(target=A._trigger_colab_auth,daemon=_D).start()
|
33
|
+
if sum(len(A)for A in A.metrics.values())>=A.thresholds[_J]:A._send_metrics()
|
34
|
+
if priority=='high'or C==_C:A._send_metrics()
|
35
|
+
def _trigger_colab_auth(B):
|
36
|
+
try:from vnai.scope.profile import inspector as A;A.get_or_create_user_id()
|
37
|
+
except:pass
|
38
|
+
def _send_metrics(F):
|
39
|
+
E='vnai';D='source';C='unknown';from vnai.flow.relay import track_function_call as H,track_rate_limit as I,track_api_request as J
|
40
|
+
for(B,G)in F.metrics.items():
|
41
|
+
if not G:continue
|
42
|
+
for A in G:
|
43
|
+
try:
|
44
|
+
if B==_A:H(function_name=A.get(_A,C),source=A.get(D,E),execution_time=A.get(_G,0),success=A.get(_K,_D),error=A.get(_C),args=A.get('args'))
|
45
|
+
elif B==_H:I(source=A.get(D,E),limit_type=A.get('limit_type',C),limit_value=A.get('limit_value',0),current_usage=A.get('current_usage',0),is_exceeded=A.get('is_exceeded',_E))
|
46
|
+
elif B==_I:J(endpoint=A.get('endpoint',C),source=A.get(D,E),method=A.get('method','GET'),status_code=A.get('status_code',200),execution_time=A.get(_G,0),request_size=A.get('request_size',0),response_size=A.get('response_size',0))
|
47
|
+
except Exception as K:continue
|
48
|
+
F.metrics[B]=[]
|
49
|
+
def get_metrics_summary(A):return{A:len(B)for(A,B)in A.metrics.items()}
|
50
|
+
collector=Collector()
|
51
|
+
def capture(module_type=_A):
|
52
|
+
def A(func):
|
53
|
+
def A(*A,**D):
|
54
|
+
E=time.time();B=_E;C=_B
|
55
|
+
try:F=func(*A,**D);B=_D;return F
|
56
|
+
except Exception as G:C=str(G);raise
|
57
|
+
finally:H=time.time()-E;collector.record(module_type,{_A:func.__name__,_G:H,_K:B,_C:C,_F:datetime.now().isoformat(),'args':str(A)[:100]if A else _B})
|
58
|
+
return A
|
59
|
+
return A
|
vnai/beam/pulse.py
ADDED
@@ -0,0 +1,31 @@
|
|
1
|
+
_B='status'
|
2
|
+
_A='healthy'
|
3
|
+
import threading,time
|
4
|
+
from datetime import datetime
|
5
|
+
class Monitor:
|
6
|
+
_instance=None;_lock=threading.Lock()
|
7
|
+
def __new__(A):
|
8
|
+
with A._lock:
|
9
|
+
if A._instance is None:A._instance=super(Monitor,A).__new__(A);A._instance._initialize()
|
10
|
+
return A._instance
|
11
|
+
def _initialize(A):A.health_status=_A;A.last_check=time.time();A.check_interval=300;A.error_count=0;A.warning_count=0;A.status_history=[];A._start_background_check()
|
12
|
+
def _start_background_check(A):
|
13
|
+
def B():
|
14
|
+
while True:
|
15
|
+
try:A.check_health()
|
16
|
+
except:pass
|
17
|
+
time.sleep(A.check_interval)
|
18
|
+
C=threading.Thread(target=B,daemon=True);C.start()
|
19
|
+
def check_health(A):
|
20
|
+
from vnai.beam.metrics import collector as F;from vnai.beam.quota import guardian as G;A.last_check=time.time();B=F.get_metrics_summary();C=B.get('error',0)>0;D=G.usage();E=D>80
|
21
|
+
if C and E:A.health_status='critical';A.error_count+=1
|
22
|
+
elif C or E:A.health_status='warning';A.warning_count+=1
|
23
|
+
else:A.health_status=_A
|
24
|
+
A.status_history.append({'timestamp':datetime.now().isoformat(),_B:A.health_status,'metrics':B,'resource_usage':D})
|
25
|
+
if len(A.status_history)>10:A.status_history=A.status_history[-10:]
|
26
|
+
return A.health_status
|
27
|
+
def report(A):
|
28
|
+
if time.time()-A.last_check>A.check_interval:A.check_health()
|
29
|
+
return{_B:A.health_status,'last_check':datetime.fromtimestamp(A.last_check).isoformat(),'error_count':A.error_count,'warning_count':A.warning_count,'history':A.status_history[-3:]}
|
30
|
+
def reset(A):A.health_status=_A;A.error_count=0;A.warning_count=0;A.status_history=[];A.last_check=time.time()
|
31
|
+
monitor=Monitor()
|
vnai/beam/quota.py
ADDED
@@ -0,0 +1,37 @@
|
|
1
|
+
_E=False
|
2
|
+
_D='resource_type'
|
3
|
+
_C='default'
|
4
|
+
_B='hour'
|
5
|
+
_A='min'
|
6
|
+
import time,threading,functools
|
7
|
+
from collections import defaultdict
|
8
|
+
from datetime import datetime
|
9
|
+
class Guardian:
|
10
|
+
_instance=None;_lock=threading.Lock()
|
11
|
+
def __new__(A):
|
12
|
+
with A._lock:
|
13
|
+
if A._instance is None:A._instance=super(Guardian,A).__new__(A);A._instance._initialize()
|
14
|
+
return A._instance
|
15
|
+
def _initialize(A):A.resource_limits=defaultdict(lambda:defaultdict(int));A.usage_counters=defaultdict(lambda:defaultdict(list));A.resource_limits[_C]={_A:60,_B:2000};A.resource_limits['TCBS']={_A:60,_B:2000};A.resource_limits['VCI']={_A:60,_B:2000}
|
16
|
+
def verify(B,operation_id,resource_type=_C):
|
17
|
+
M='is_exceeded';L='current_usage';K='limit_value';J='limit_type';I='rate_limit';A=resource_type;E=time.time();C=B.resource_limits.get(A,B.resource_limits[_C]);N=E-60;B.usage_counters[A][_A]=[A for A in B.usage_counters[A][_A]if A>N];F=len(B.usage_counters[A][_A]);O=F>=C[_A]
|
18
|
+
if O:from vnai.beam.metrics import collector as G;G.record(I,{_D:A,J:_A,K:C[_A],L:F,M:True});return _E
|
19
|
+
P=E-3600;B.usage_counters[A][_B]=[A for A in B.usage_counters[A][_B]if A>P];H=len(B.usage_counters[A][_B]);D=H>=C[_B];from vnai.beam.metrics import collector as G;G.record(I,{_D:A,J:_B if D else _A,K:C[_B]if D else C[_A],L:H if D else F,M:D})
|
20
|
+
if D:return _E
|
21
|
+
B.usage_counters[A][_A].append(E);B.usage_counters[A][_B].append(E);return True
|
22
|
+
def usage(A,resource_type=_C):B=resource_type;D=time.time();C=A.resource_limits.get(B,A.resource_limits[_C]);E=D-60;F=D-3600;A.usage_counters[B][_A]=[A for A in A.usage_counters[B][_A]if A>E];A.usage_counters[B][_B]=[A for A in A.usage_counters[B][_B]if A>F];G=len(A.usage_counters[B][_A]);H=len(A.usage_counters[B][_B]);I=G/C[_A]*100 if C[_A]>0 else 0;J=H/C[_B]*100 if C[_B]>0 else 0;return max(I,J)
|
23
|
+
guardian=Guardian()
|
24
|
+
def optimize(resource_type):
|
25
|
+
B=resource_type
|
26
|
+
def A(func):
|
27
|
+
A=func
|
28
|
+
@functools.wraps(A)
|
29
|
+
def C(*F,**G):
|
30
|
+
E='function'
|
31
|
+
if not guardian.verify(B):raise RuntimeError(f"Resource constraints detected. Please try again later.")
|
32
|
+
H=time.time();C=_E;D=None
|
33
|
+
try:I=A(*F,**G);C=True;return I
|
34
|
+
except Exception as J:D=str(J);raise
|
35
|
+
finally:K=time.time()-H;from vnai.beam.metrics import collector as L;L.record(E,{E:A.__name__,_D:B,'execution_time':K,'success':C,'error':D,'timestamp':datetime.now().isoformat()})
|
36
|
+
return C
|
37
|
+
return A
|
vnai/flow/__init__.py
ADDED
vnai/flow/queue.py
ADDED
@@ -0,0 +1,58 @@
|
|
1
|
+
_C='category'
|
2
|
+
_B=True
|
3
|
+
_A=None
|
4
|
+
import time,threading,json
|
5
|
+
from datetime import datetime
|
6
|
+
from pathlib import Path
|
7
|
+
class Buffer:
|
8
|
+
_instance=_A;_lock=threading.Lock()
|
9
|
+
def __new__(A):
|
10
|
+
with A._lock:
|
11
|
+
if A._instance is _A:A._instance=super(Buffer,A).__new__(A);A._instance._initialize()
|
12
|
+
return A._instance
|
13
|
+
def _initialize(A):A.data=[];A.lock=threading.Lock();A.max_size=1000;A.backup_interval=300;A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_B);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_B);A.backup_path=A.data_dir/'buffer_backup.json';A._load_from_backup();A._start_backup_thread()
|
14
|
+
def _load_from_backup(A):
|
15
|
+
if A.backup_path.exists():
|
16
|
+
try:
|
17
|
+
with open(A.backup_path,'r')as B:C=json.load(B)
|
18
|
+
with A.lock:A.data=C
|
19
|
+
except:pass
|
20
|
+
def _save_to_backup(A):
|
21
|
+
with A.lock:
|
22
|
+
if not A.data:return
|
23
|
+
try:
|
24
|
+
with open(A.backup_path,'w')as B:json.dump(A.data,B)
|
25
|
+
except:pass
|
26
|
+
def _start_backup_thread(A):
|
27
|
+
def B():
|
28
|
+
while _B:time.sleep(A.backup_interval);A._save_to_backup()
|
29
|
+
C=threading.Thread(target=B,daemon=_B);C.start()
|
30
|
+
def add(A,item,category=_A):
|
31
|
+
D='timestamp';C=category;B=item
|
32
|
+
with A.lock:
|
33
|
+
if isinstance(B,dict):
|
34
|
+
if D not in B:B[D]=datetime.now().isoformat()
|
35
|
+
if C:B[_C]=C
|
36
|
+
A.data.append(B)
|
37
|
+
if len(A.data)>A.max_size:A.data=A.data[-A.max_size:]
|
38
|
+
if len(A.data)%100==0:A._save_to_backup()
|
39
|
+
return len(A.data)
|
40
|
+
def get(A,count=_A,category=_A):
|
41
|
+
D=category;C=count
|
42
|
+
with A.lock:
|
43
|
+
if D:B=[A for A in A.data if A.get(_C)==D]
|
44
|
+
else:B=A.data.copy()
|
45
|
+
if C:return B[:C]
|
46
|
+
else:return B
|
47
|
+
def clear(A,category=_A):
|
48
|
+
B=category
|
49
|
+
with A.lock:
|
50
|
+
if B:A.data=[A for A in A.data if A.get(_C)!=B]
|
51
|
+
else:A.data=[]
|
52
|
+
A._save_to_backup();return len(A.data)
|
53
|
+
def size(A,category=_A):
|
54
|
+
B=category
|
55
|
+
with A.lock:
|
56
|
+
if B:return len([A for A in A.data if A.get(_C)==B])
|
57
|
+
else:return len(A.data)
|
58
|
+
buffer=Buffer()
|
vnai/flow/relay.py
ADDED
@@ -0,0 +1,152 @@
|
|
1
|
+
_T='execution_time'
|
2
|
+
_S='manual'
|
3
|
+
_R='success'
|
4
|
+
_Q='is_exceeded'
|
5
|
+
_P='source'
|
6
|
+
_O='function'
|
7
|
+
_N='last_sync_time'
|
8
|
+
_M='sync_interval'
|
9
|
+
_L='buffer_size'
|
10
|
+
_K='webhook_url'
|
11
|
+
_J='value'
|
12
|
+
_I='sync_count'
|
13
|
+
_H='machine_id'
|
14
|
+
_G=False
|
15
|
+
_F=None
|
16
|
+
_E='timestamp'
|
17
|
+
_D='api_requests'
|
18
|
+
_C='rate_limits'
|
19
|
+
_B='function_calls'
|
20
|
+
_A=True
|
21
|
+
import time,threading,json,random,requests
|
22
|
+
from datetime import datetime
|
23
|
+
from pathlib import Path
|
24
|
+
from typing import Dict,List,Any,Optional
|
25
|
+
class Conduit:
|
26
|
+
_instance=_F;_lock=threading.Lock()
|
27
|
+
def __new__(A,webhook_url=_F,buffer_size=50,sync_interval=300):
|
28
|
+
with A._lock:
|
29
|
+
if A._instance is _F:A._instance=super(Conduit,A).__new__(A);A._instance._initialize(webhook_url,buffer_size,sync_interval)
|
30
|
+
return A._instance
|
31
|
+
def _initialize(A,webhook_url,buffer_size,sync_interval):
|
32
|
+
A.webhook_url=webhook_url;A.buffer_size=buffer_size;A.sync_interval=sync_interval;A.buffer={_B:[],_D:[],_C:[]};A.lock=threading.Lock();A.last_sync_time=time.time();A.sync_count=0;A.failed_queue=[];A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_A);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_A);A.config_path=A.data_dir/'relay_config.json'
|
33
|
+
try:from vnai.scope.profile import inspector as B;A.machine_id=B.fingerprint()
|
34
|
+
except:A.machine_id=A._generate_fallback_id()
|
35
|
+
A._load_config();A._start_periodic_sync()
|
36
|
+
def _generate_fallback_id(D)->str:
|
37
|
+
try:import platform as A,hashlib as B,uuid;C=A.node()+A.platform()+A.processor();return B.md5(C.encode()).hexdigest()
|
38
|
+
except:import uuid;return str(uuid.uuid4())
|
39
|
+
def _load_config(B):
|
40
|
+
if B.config_path.exists():
|
41
|
+
try:
|
42
|
+
with open(B.config_path,'r')as C:A=json.load(C)
|
43
|
+
if not B.webhook_url and _K in A:B.webhook_url=A[_K]
|
44
|
+
if _L in A:B.buffer_size=A[_L]
|
45
|
+
if _M in A:B.sync_interval=A[_M]
|
46
|
+
if _N in A:B.last_sync_time=A[_N]
|
47
|
+
if _I in A:B.sync_count=A[_I]
|
48
|
+
except:pass
|
49
|
+
def _save_config(A):
|
50
|
+
B={_K:A.webhook_url,_L:A.buffer_size,_M:A.sync_interval,_N:A.last_sync_time,_I:A.sync_count}
|
51
|
+
try:
|
52
|
+
with open(A.config_path,'w')as C:json.dump(B,C)
|
53
|
+
except:pass
|
54
|
+
def _start_periodic_sync(A):
|
55
|
+
def B():
|
56
|
+
while _A:time.sleep(A.sync_interval);A.dispatch('periodic')
|
57
|
+
C=threading.Thread(target=B,daemon=_A);C.start()
|
58
|
+
def add_function_call(B,record):
|
59
|
+
A=record
|
60
|
+
if not isinstance(A,dict):A={_J:str(A)}
|
61
|
+
with B.lock:B.buffer[_B].append(A);B._check_triggers(_B)
|
62
|
+
def add_api_request(B,record):
|
63
|
+
A=record
|
64
|
+
if not isinstance(A,dict):A={_J:str(A)}
|
65
|
+
with B.lock:B.buffer[_D].append(A);B._check_triggers(_D)
|
66
|
+
def add_rate_limit(B,record):
|
67
|
+
A=record
|
68
|
+
if not isinstance(A,dict):A={_J:str(A)}
|
69
|
+
with B.lock:B.buffer[_C].append(A);B._check_triggers(_C)
|
70
|
+
def _check_triggers(A,record_type:str):
|
71
|
+
D=record_type;E=time.time();B=_G;C=_F;F=sum(len(A)for A in A.buffer.values())
|
72
|
+
if F>=A.buffer_size:B=_A;C='buffer_full'
|
73
|
+
elif D==_C and A.buffer[_C]and any(A.get(_Q)for A in A.buffer[_C]if isinstance(A,dict)):B=_A;C='rate_limit_exceeded'
|
74
|
+
elif D==_B and A.buffer[_B]and any(not A.get(_R)for A in A.buffer[_B]if isinstance(A,dict)):B=_A;C='function_error'
|
75
|
+
else:
|
76
|
+
G=min(1.,(E-A.last_sync_time)/(A.sync_interval/2))
|
77
|
+
if random.random()<.05*G:B=_A;C='random_time_weighted'
|
78
|
+
if B:threading.Thread(target=A.dispatch,args=(C,),daemon=_A).start()
|
79
|
+
def queue(B,package,priority=_F):
|
80
|
+
N='packages';M='commercial';L='system_info';K='rate_limit';I='system';H='type';C=package
|
81
|
+
if not C:return _G
|
82
|
+
if not isinstance(C,dict):B.add_function_call({'message':str(C)});return _A
|
83
|
+
if _E not in C:C[_E]=datetime.now().isoformat()
|
84
|
+
if H in C:
|
85
|
+
D=C[H];A=C.get('data',{})
|
86
|
+
if isinstance(A,dict)and I in A:
|
87
|
+
J=A[I].get(_H);A.pop(I)
|
88
|
+
if J:A[_H]=J
|
89
|
+
if D==_O:B.add_function_call(A)
|
90
|
+
elif D=='api_request':B.add_api_request(A)
|
91
|
+
elif D==K:B.add_rate_limit(A)
|
92
|
+
elif D==L:B.add_function_call({H:L,M:A.get(M),N:A.get(N),_E:C.get(_E)})
|
93
|
+
elif D=='metrics':
|
94
|
+
O=A
|
95
|
+
for(G,F)in O.items():
|
96
|
+
if isinstance(F,list):
|
97
|
+
if G==_O:
|
98
|
+
for E in F:B.add_function_call(E)
|
99
|
+
elif G==K:
|
100
|
+
for E in F:B.add_rate_limit(E)
|
101
|
+
elif G=='request':
|
102
|
+
for E in F:B.add_api_request(E)
|
103
|
+
else:B.add_function_call(A)
|
104
|
+
else:B.add_function_call(C)
|
105
|
+
if priority=='high':B.dispatch('high_priority')
|
106
|
+
return _A
|
107
|
+
def dispatch(A,reason=_S):
|
108
|
+
if not A.webhook_url:return _G
|
109
|
+
with A.lock:
|
110
|
+
if all(len(A)==0 for A in A.buffer.values()):return _G
|
111
|
+
B={_B:A.buffer[_B].copy(),_D:A.buffer[_D].copy(),_C:A.buffer[_C].copy()};A.buffer={_B:[],_D:[],_C:[]};A.last_sync_time=time.time();A.sync_count+=1;A._save_config()
|
112
|
+
try:from vnai.scope.profile import inspector as G;C=G.examine();D=C.get(_H,A.machine_id)
|
113
|
+
except:C={_H:A.machine_id};D=A.machine_id
|
114
|
+
E={'analytics_data':B,'metadata':{_E:datetime.now().isoformat(),_H:D,_I:A.sync_count,'trigger_reason':reason,'environment':C,'data_counts':{_B:len(B[_B]),_D:len(B[_D]),_C:len(B[_C])}}};F=A._send_data(E)
|
115
|
+
if not F:
|
116
|
+
with A.lock:
|
117
|
+
A.failed_queue.append(E)
|
118
|
+
if len(A.failed_queue)>10:A.failed_queue=A.failed_queue[-10:]
|
119
|
+
return F
|
120
|
+
def _send_data(A,payload):
|
121
|
+
if not A.webhook_url:return _G
|
122
|
+
try:B=requests.post(A.webhook_url,json=payload,timeout=5);return B.status_code==200
|
123
|
+
except:return _G
|
124
|
+
def retry_failed(A):
|
125
|
+
if not A.failed_queue:return 0
|
126
|
+
with A.lock:D=A.failed_queue.copy();A.failed_queue=[]
|
127
|
+
B=0
|
128
|
+
for C in D:
|
129
|
+
if A._send_data(C):B+=1
|
130
|
+
else:
|
131
|
+
with A.lock:A.failed_queue.append(C)
|
132
|
+
return B
|
133
|
+
def configure(A,webhook_url):
|
134
|
+
with A.lock:A.webhook_url=webhook_url;A._save_config();return _A
|
135
|
+
conduit=Conduit()
|
136
|
+
def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
|
137
|
+
E=error;A=args;C={_O:function_name,_P:source,_T:execution_time,_E:datetime.now().isoformat(),_R:success}
|
138
|
+
if E:C['error']=E
|
139
|
+
if A:
|
140
|
+
B={}
|
141
|
+
if isinstance(A,dict):
|
142
|
+
for(F,D)in A.items():
|
143
|
+
if isinstance(D,(str,int,float,bool)):B[F]=D
|
144
|
+
else:B[F]=str(type(D))
|
145
|
+
else:B={_J:str(A)}
|
146
|
+
C['args']=B
|
147
|
+
conduit.add_function_call(C)
|
148
|
+
def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):B=current_usage;A=limit_value;C={_P:source,'limit_type':limit_type,'limit_value':A,'current_usage':B,_Q:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':B/A*100 if A>0 else 0};conduit.add_rate_limit(C)
|
149
|
+
def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):A={'endpoint':endpoint,_P:source,'method':method,'status_code':status_code,_T:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(A)
|
150
|
+
def configure(webhook_url):return conduit.configure(webhook_url)
|
151
|
+
def sync_now():return conduit.dispatch(_S)
|
152
|
+
def retry_failed():return conduit.retry_failed()
|
vnai/scope/__init__.py
ADDED
vnai/scope/profile.py
ADDED
@@ -0,0 +1,223 @@
|
|
1
|
+
_W='type_confidence'
|
2
|
+
_V='detected_type'
|
3
|
+
_U='commercial_app'
|
4
|
+
_T='version'
|
5
|
+
_S='django'
|
6
|
+
_R='fastapi'
|
7
|
+
_Q='streamlit'
|
8
|
+
_P='indicators'
|
9
|
+
_O='commercial_indicators'
|
10
|
+
_N='likely_commercial'
|
11
|
+
_M='KAGGLE_KERNEL_RUN_TYPE'
|
12
|
+
_L='machine_id'
|
13
|
+
_K='domain'
|
14
|
+
_J='.git'
|
15
|
+
_I='backtesting'
|
16
|
+
_H='commercial_probability'
|
17
|
+
_G='timestamp'
|
18
|
+
_F='business_hours_usage'
|
19
|
+
_E='google.colab'
|
20
|
+
_D='unknown'
|
21
|
+
_C=False
|
22
|
+
_B=None
|
23
|
+
_A=True
|
24
|
+
import os,sys,platform,uuid,hashlib,psutil,threading,time,importlib.metadata
|
25
|
+
from datetime import datetime
|
26
|
+
import subprocess
|
27
|
+
from pathlib import Path
|
28
|
+
class Inspector:
|
29
|
+
_instance=_B;_lock=_B
|
30
|
+
def __new__(cls):
|
31
|
+
import threading
|
32
|
+
if cls._lock is _B:cls._lock=threading.Lock()
|
33
|
+
with cls._lock:
|
34
|
+
if cls._instance is _B:cls._instance=super(Inspector,cls).__new__(cls);cls._instance._initialize()
|
35
|
+
return cls._instance
|
36
|
+
def _initialize(self):self.cache={};self.cache_ttl=3600;self.last_examination=0;self.machine_id=_B;self._colab_auth_triggered=_C;self.home_dir=Path.home();self.project_dir=self.home_dir/'.vnstock';self.project_dir.mkdir(exist_ok=_A);self.id_dir=self.project_dir/'id';self.id_dir.mkdir(exist_ok=_A);self.machine_id_path=self.id_dir/'machine_id.txt';self.examine()
|
37
|
+
def examine(self,force_refresh=_C):
|
38
|
+
D='script';C='terminal';B='hosting_service';A='environment';current_time=time.time()
|
39
|
+
if not force_refresh and current_time-self.last_examination<self.cache_ttl:return self.cache
|
40
|
+
info={_G:datetime.now().isoformat(),'python_version':platform.python_version(),'os_name':platform.system(),'platform':platform.platform()};info[_L]=self.fingerprint()
|
41
|
+
try:
|
42
|
+
import importlib.util;ipython_spec=importlib.util.find_spec('IPython')
|
43
|
+
if ipython_spec:
|
44
|
+
from IPython import get_ipython;ipython=get_ipython()
|
45
|
+
if ipython is not _B:
|
46
|
+
info[A]='jupyter'
|
47
|
+
if _E in sys.modules:info[B]='colab'
|
48
|
+
elif _M in os.environ:info[B]='kaggle'
|
49
|
+
else:info[B]='local_jupyter'
|
50
|
+
elif sys.stdout.isatty():info[A]=C
|
51
|
+
else:info[A]=D
|
52
|
+
elif sys.stdout.isatty():info[A]=C
|
53
|
+
else:info[A]=D
|
54
|
+
except:info[A]=_D
|
55
|
+
try:info['cpu_count']=os.cpu_count();info['memory_gb']=round(psutil.virtual_memory().total/1024**3,1)
|
56
|
+
except:pass
|
57
|
+
is_colab=_E in sys.modules
|
58
|
+
if is_colab:info['is_colab']=_A;self.detect_colab_with_delayed_auth()
|
59
|
+
try:info['commercial_usage']=self.enhanced_commercial_detection();info['project_context']=self.analyze_project_structure();info['git_info']=self.analyze_git_info();info['usage_pattern']=self.detect_usage_pattern();info['dependencies']=self.analyze_dependencies()
|
60
|
+
except Exception as e:info['detection_error']=str(e)
|
61
|
+
self.cache=info;self.last_examination=current_time;return info
|
62
|
+
def fingerprint(self):
|
63
|
+
if self.machine_id:return self.machine_id
|
64
|
+
if self.machine_id_path.exists():
|
65
|
+
try:
|
66
|
+
with open(self.machine_id_path,'r')as f:self.machine_id=f.read().strip();return self.machine_id
|
67
|
+
except:pass
|
68
|
+
is_colab=self.detect_colab_with_delayed_auth()
|
69
|
+
try:system_info=platform.node()+platform.platform()+platform.machine();self.machine_id=hashlib.md5(system_info.encode()).hexdigest()
|
70
|
+
except:self.machine_id=str(uuid.uuid4())
|
71
|
+
try:
|
72
|
+
with open(self.machine_id_path,'w')as f:f.write(self.machine_id)
|
73
|
+
except:pass
|
74
|
+
return self.machine_id
|
75
|
+
def detect_hosting(self):
|
76
|
+
A='Google Colab';hosting_markers={'COLAB_GPU':A,_M:'Kaggle','BINDER_SERVICE_HOST':'Binder','CODESPACE_NAME':'GitHub Codespaces','STREAMLIT_SERVER_HEADLESS':'Streamlit Cloud','CLOUD_SHELL':'Cloud Shell'}
|
77
|
+
for(env_var,host_name)in hosting_markers.items():
|
78
|
+
if env_var in os.environ:return host_name
|
79
|
+
if _E in sys.modules:return A
|
80
|
+
return'local'
|
81
|
+
def detect_commercial_usage(self):
|
82
|
+
F='client';E='enterprise';D='dir_patterns';C='env_vars';B='file_patterns';A='env_domains';commercial_indicators={A:['.com','.io','.co',E,'corp','inc'],B:['invoice','payment','customer',F,'product','sale'],C:['COMPANY','BUSINESS','ENTERPRISE','CORPORATE'],D:['company','business',E,'corporate',F]};env_values=' '.join(os.environ.values()).lower();domain_match=any(domain in env_values for domain in commercial_indicators[A]);env_var_match=any(var in os.environ for var in commercial_indicators[C]);current_dir=os.getcwd().lower();dir_match=any(pattern in current_dir for pattern in commercial_indicators[D])
|
83
|
+
try:files=[f.lower()for f in os.listdir()if os.path.isfile(f)];file_match=any(any(pattern in f for pattern in commercial_indicators[B])for f in files)
|
84
|
+
except:file_match=_C
|
85
|
+
indicators=[domain_match,env_var_match,dir_match,file_match];commercial_probability=sum(indicators)/len(indicators);return{_N:commercial_probability>.3,_H:commercial_probability,_O:{'domain_match':domain_match,'env_var_match':env_var_match,'dir_match':dir_match,'file_match':file_match}}
|
86
|
+
def scan_packages(self):
|
87
|
+
A='financetoolkit';package_groups={'vnstock_family':['vnstock','vnstock3','vnstock_ezchart','vnstock_data_pro','vnstock_market_data_pipeline','vnstock_ta','vnii','vnai'],'analytics':['openbb','pandas_ta'],'static_charts':['matplotlib','seaborn','altair'],'dashboard':[_Q,'voila','panel','shiny','dash'],'interactive_charts':['mplfinance','plotly','plotline','bokeh','pyecharts','highcharts-core','highcharts-stock','mplchart'],'datafeed':['yfinance','alpha_vantage','pandas-datareader','investpy'],'official_api':['ssi-fc-data','ssi-fctrading'],'risk_return':['pyfolio','empyrical','quantstats',A],'machine_learning':['scipy','sklearn','statsmodels','pytorch','tensorflow','keras','xgboost'],_P:['stochastic','talib','tqdm','finta',A,'tulipindicators'],_I:['vectorbt',_I,'bt','zipline','pyalgotrade','backtrader','pybacktest','fastquant','lean','ta','finmarketpy','qstrader'],'server':[_R,'flask','uvicorn','gunicorn'],'framework':['lightgbm','catboost',_S]};installed={}
|
88
|
+
for(category,packages)in package_groups.items():
|
89
|
+
installed[category]=[]
|
90
|
+
for pkg in packages:
|
91
|
+
try:version=importlib.metadata.version(pkg);installed[category].append({'name':pkg,_T:version})
|
92
|
+
except:pass
|
93
|
+
return installed
|
94
|
+
def setup_vnstock_environment(self):
|
95
|
+
env_file=self.id_dir/'environment.json';env_data={'accepted_agreement':_A,_G:datetime.now().isoformat(),_L:self.fingerprint()}
|
96
|
+
try:
|
97
|
+
with open(env_file,'w')as f:import json;json.dump(env_data,f)
|
98
|
+
return _A
|
99
|
+
except Exception as e:print(f"Failed to set up vnstock environment: {e}");return _C
|
100
|
+
def detect_colab_with_delayed_auth(self,immediate=_C):
|
101
|
+
is_colab=_E in sys.modules
|
102
|
+
if is_colab and not self._colab_auth_triggered:
|
103
|
+
if immediate:
|
104
|
+
self._colab_auth_triggered=_A;user_id=self.get_or_create_user_id()
|
105
|
+
if user_id and user_id!=self.machine_id:
|
106
|
+
self.machine_id=user_id
|
107
|
+
try:
|
108
|
+
with open(self.machine_id_path,'w')as f:f.write(user_id)
|
109
|
+
except:pass
|
110
|
+
else:
|
111
|
+
def delayed_auth():
|
112
|
+
time.sleep(300);user_id=self.get_or_create_user_id()
|
113
|
+
if user_id and user_id!=self.machine_id:
|
114
|
+
self.machine_id=user_id
|
115
|
+
try:
|
116
|
+
with open(self.machine_id_path,'w')as f:f.write(user_id)
|
117
|
+
except:pass
|
118
|
+
thread=threading.Thread(target=delayed_auth,daemon=_A);thread.start()
|
119
|
+
return is_colab
|
120
|
+
def get_or_create_user_id(self):
|
121
|
+
if self._colab_auth_triggered:return self.machine_id
|
122
|
+
try:
|
123
|
+
from google.colab import drive;print('\n📋 Authenticating to save your settings. This helps maintain your preferences across sessions.');print('This is a one-time setup for better experience with vnstock.\n');self._colab_auth_triggered=_A;drive.mount('/content/drive');id_path='/content/drive/MyDrive/.vnstock/user_id.txt'
|
124
|
+
if os.path.exists(id_path):
|
125
|
+
with open(id_path,'r')as f:return f.read().strip()
|
126
|
+
else:
|
127
|
+
user_id=str(uuid.uuid4());os.makedirs(os.path.dirname(id_path),exist_ok=_A)
|
128
|
+
with open(id_path,'w')as f:f.write(user_id)
|
129
|
+
return user_id
|
130
|
+
except Exception as e:return self.machine_id
|
131
|
+
def analyze_project_structure(self):
|
132
|
+
E='root_dirs';D='manage.py';C='wsgi.py';B='data_science';A='app.py';current_dir=os.getcwd();project_indicators={_U:['app','services','products','customers','billing'],'financial_tool':['portfolio',_I,'trading','strategy'],B:['models','notebooks','datasets','visualization'],'educational':['examples','lectures','assignments','slides']};project_type={}
|
133
|
+
for(category,markers)in project_indicators.items():
|
134
|
+
match_count=0
|
135
|
+
for marker in markers:
|
136
|
+
if os.path.exists(os.path.join(current_dir,marker)):match_count+=1
|
137
|
+
if len(markers)>0:project_type[category]=match_count/len(markers)
|
138
|
+
try:
|
139
|
+
root_files=[f for f in os.listdir(current_dir)if os.path.isfile(os.path.join(current_dir,f))];root_dirs=[d for d in os.listdir(current_dir)if os.path.isdir(os.path.join(current_dir,d))];file_markers={'python_project':['setup.py','pyproject.toml','requirements.txt'],B:['notebook.ipynb','.ipynb_checkpoints'],'web_app':[A,C,D,'server.py'],'finance_app':['portfolio.py','trading.py','backtest.py']};file_project_type=_D
|
140
|
+
for(ptype,markers)in file_markers.items():
|
141
|
+
if any(marker in root_files for marker in markers):file_project_type=ptype;break
|
142
|
+
frameworks=[];framework_markers={_S:[D,'settings.py'],'flask':[A,C],_Q:['streamlit_app.py',A],_R:['main.py',A]}
|
143
|
+
for(framework,markers)in framework_markers.items():
|
144
|
+
if any(marker in root_files for marker in markers):frameworks.append(framework)
|
145
|
+
except Exception as e:root_files=[];root_dirs=[];file_project_type=_D;frameworks=[]
|
146
|
+
return{'project_dir':current_dir,_V:max(project_type.items(),key=lambda x:x[1])[0]if project_type else _D,'file_type':file_project_type,'is_git_repo':_J in(root_dirs if E in locals()else[]),'frameworks':frameworks,'file_count':len(root_files)if'root_files'in locals()else 0,'directory_count':len(root_dirs)if E in locals()else 0,_W:project_type}
|
147
|
+
def analyze_git_info(self):
|
148
|
+
I='license_type';H='has_license';G='repo_path';F='rev-parse';E='/';D='has_git';C=':';B='git';A='@'
|
149
|
+
try:
|
150
|
+
result=subprocess.run([B,F,'--is-inside-work-tree'],capture_output=_A,text=_A)
|
151
|
+
if result.returncode!=0:return{D:_C}
|
152
|
+
repo_root=subprocess.run([B,F,'--show-toplevel'],capture_output=_A,text=_A);repo_path=repo_root.stdout.strip()if repo_root.stdout else _B;repo_name=os.path.basename(repo_path)if repo_path else _B;has_license=_C;license_type=_D
|
153
|
+
if repo_path:
|
154
|
+
license_files=[os.path.join(repo_path,'LICENSE'),os.path.join(repo_path,'LICENSE.txt'),os.path.join(repo_path,'LICENSE.md')]
|
155
|
+
for license_file in license_files:
|
156
|
+
if os.path.exists(license_file):
|
157
|
+
has_license=_A
|
158
|
+
try:
|
159
|
+
with open(license_file,'r')as f:
|
160
|
+
content=f.read().lower()
|
161
|
+
if'mit license'in content:license_type='MIT'
|
162
|
+
elif'apache license'in content:license_type='Apache'
|
163
|
+
elif'gnu general public'in content:license_type='GPL'
|
164
|
+
elif'bsd 'in content:license_type='BSD'
|
165
|
+
except:pass
|
166
|
+
break
|
167
|
+
remote=subprocess.run([B,'config','--get','remote.origin.url'],capture_output=_A,text=_A);remote_url=remote.stdout.strip()if remote.stdout else _B
|
168
|
+
if remote_url:
|
169
|
+
remote_url=remote_url.strip();domain=_B
|
170
|
+
if remote_url:
|
171
|
+
if remote_url.startswith('git@')or A in remote_url and C in remote_url.split(A)[1]:domain=remote_url.split(A)[1].split(C)[0]
|
172
|
+
elif remote_url.startswith('http'):
|
173
|
+
url_parts=remote_url.split('//')
|
174
|
+
if len(url_parts)>1:
|
175
|
+
auth_and_domain=url_parts[1].split(E,1)[0]
|
176
|
+
if A in auth_and_domain:domain=auth_and_domain.split(A)[-1]
|
177
|
+
else:domain=auth_and_domain
|
178
|
+
else:
|
179
|
+
import re;domain_match=re.search('@([^:/]+)|https?://(?:[^@/]+@)?([^/]+)',remote_url)
|
180
|
+
if domain_match:domain=domain_match.group(1)or domain_match.group(2)
|
181
|
+
owner=_B;repo_name=_B
|
182
|
+
if domain:
|
183
|
+
if'github'in domain:
|
184
|
+
if C in remote_url and A in remote_url:
|
185
|
+
parts=remote_url.split(C)[-1].split(E)
|
186
|
+
if len(parts)>=2:owner=parts[0];repo_name=parts[1].replace(_J,'')
|
187
|
+
else:
|
188
|
+
url_parts=remote_url.split('//')
|
189
|
+
if len(url_parts)>1:
|
190
|
+
path_parts=url_parts[1].split(E)
|
191
|
+
if len(path_parts)>=3:
|
192
|
+
domain_part=path_parts[0]
|
193
|
+
if A in domain_part:owner_index=1
|
194
|
+
else:owner_index=1
|
195
|
+
if len(path_parts)>owner_index:owner=path_parts[owner_index]
|
196
|
+
if len(path_parts)>owner_index+1:repo_name=path_parts[owner_index+1].replace(_J,'')
|
197
|
+
commit_count=subprocess.run([B,'rev-list','--count','HEAD'],capture_output=_A,text=_A);branch_count=subprocess.run([B,'branch','--list'],capture_output=_A,text=_A);branch_count=len(branch_count.stdout.strip().split('\n'))if branch_count.stdout else 0;return{_K:domain,'owner':owner,'commit_count':int(commit_count.stdout.strip())if commit_count.stdout else 0,'branch_count':branch_count,D:_A,G:repo_path if G in locals()else _B,'repo_name':repo_name,H:has_license if H in locals()else _C,I:license_type if I in locals()else _D}
|
198
|
+
except Exception as e:pass
|
199
|
+
return{D:_C}
|
200
|
+
def detect_usage_pattern(self):current_time=datetime.now();is_weekday=current_time.weekday()<5;hour=current_time.hour;is_business_hours=9<=hour<=18;return{_F:is_weekday and is_business_hours,'weekday':is_weekday,'hour':hour,_G:current_time.isoformat()}
|
201
|
+
def enhanced_commercial_detection(self):
|
202
|
+
basic=self.detect_commercial_usage()
|
203
|
+
try:
|
204
|
+
project_files=os.listdir(os.getcwd());commercial_frameworks=['django-oscar','opencart','magento','saleor','odoo','shopify','woocommerce'];framework_match=_C
|
205
|
+
for framework in commercial_frameworks:
|
206
|
+
if any(framework in f for f in project_files):framework_match=_A;break
|
207
|
+
db_files=[f for f in project_files if'database'in f.lower()or'db_config'in f.lower()or f.endswith('.db')];has_database=len(db_files)>0
|
208
|
+
except:framework_match=_C;has_database=_C
|
209
|
+
domain_check=self.analyze_git_info();domain_is_commercial=_C
|
210
|
+
if domain_check and domain_check.get(_K):commercial_tlds=['.com','.io','.co','.org','.net'];domain_is_commercial=any(tld in domain_check[_K]for tld in commercial_tlds)
|
211
|
+
project_structure=self.analyze_project_structure();indicators=[basic[_H],framework_match,has_database,domain_is_commercial,project_structure.get(_W,{}).get(_U,0),self.detect_usage_pattern()[_F]];indicators=[i for i in indicators if i is not _B]
|
212
|
+
if indicators:score=sum(1. if isinstance(i,bool)and i else i if isinstance(i,(int,float))else 0 for i in indicators)/len(indicators)
|
213
|
+
else:score=0
|
214
|
+
return{_H:score,_N:score>.4,_P:{'basic_indicators':basic[_O],'framework_match':framework_match,'has_database':has_database,'domain_is_commercial':domain_is_commercial,'project_structure':project_structure.get(_V),_F:self.detect_usage_pattern()[_F]}}
|
215
|
+
def analyze_dependencies(self):
|
216
|
+
A='has_commercial_deps'
|
217
|
+
try:
|
218
|
+
import pkg_resources;enterprise_packages=['snowflake-connector-python','databricks','azure','aws','google-cloud','stripe','atlassian','salesforce','bigquery','tableau','sap'];commercial_deps=[]
|
219
|
+
for pkg in pkg_resources.working_set:
|
220
|
+
if any(ent in pkg.key for ent in enterprise_packages):commercial_deps.append({'name':pkg.key,_T:pkg.version})
|
221
|
+
return{A:len(commercial_deps)>0,'commercial_deps_count':len(commercial_deps),'commercial_deps':commercial_deps}
|
222
|
+
except:return{A:_C}
|
223
|
+
inspector=Inspector()
|
vnai/scope/promo.py
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
_A=None
|
2
|
+
import requests
|
3
|
+
from datetime import datetime
|
4
|
+
import random,threading,time
|
5
|
+
class ContentManager:
|
6
|
+
_instance=_A;_lock=threading.Lock()
|
7
|
+
def __new__(A):
|
8
|
+
with A._lock:
|
9
|
+
if A._instance is _A:A._instance=super(ContentManager,A).__new__(A);A._instance._initialize()
|
10
|
+
return A._instance
|
11
|
+
def _initialize(A):A.last_display=0;A.display_interval=86400;A.content_url='https://vnstocks.com/files/package_ads.html';A.target_url='https://vnstocks.com/lp-khoa-hoc-python-chung-khoan';A.image_url='https://course.learn-anything.vn/wp-content/uploads/2025/03/cta-python-chung-khoan-k10-simple.jpg';A._start_periodic_display()
|
12
|
+
def _start_periodic_display(A):
|
13
|
+
def B():
|
14
|
+
while True:
|
15
|
+
B=random.randint(7200,21600);time.sleep(B);C=time.time()
|
16
|
+
if C-A.last_display>=A.display_interval:A.present_content()
|
17
|
+
C=threading.Thread(target=B,daemon=True);C.start()
|
18
|
+
def fetch_remote_content(B):
|
19
|
+
try:
|
20
|
+
A=requests.get(B.content_url,timeout=3)
|
21
|
+
if A.status_code==200:return A.text
|
22
|
+
return
|
23
|
+
except:return
|
24
|
+
def present_content(A,environment=_A):
|
25
|
+
B=environment;A.last_display=time.time()
|
26
|
+
if B is _A:from vnai.scope.profile import inspector as F;B=F.examine().get('environment','unknown')
|
27
|
+
D=A.fetch_remote_content();G=f'''
|
28
|
+
<a href="{A.target_url}">
|
29
|
+
<img src="{A.image_url}"
|
30
|
+
alt="Khóa học Python Chứng khoán" style="max-width: 100%; border-radius: 4px;">
|
31
|
+
</a>
|
32
|
+
''';H=f"[]({A.target_url})";I=f"""
|
33
|
+
╔══════════════════════════════════════════════════════════╗
|
34
|
+
║ ║
|
35
|
+
║ 🚀 Phân tích dữ liệu & tạo bot chứng khoán K10 ║
|
36
|
+
║ ║
|
37
|
+
║ ✓ Khai giảng khóa mới từ 23/3/2025 ║
|
38
|
+
║ ✓ Tạo bot python đầu tư từ số 0 ║
|
39
|
+
║ ✓ Học 10 buổi chiều Chủ Nhật ║
|
40
|
+
║ ║
|
41
|
+
║ → Đăng ký ngay: {A.target_url} ║
|
42
|
+
║ ║
|
43
|
+
╚══════════════════════════════════════════════════════════╝
|
44
|
+
"""
|
45
|
+
if B=='jupyter':
|
46
|
+
try:
|
47
|
+
from IPython.display import display as C,HTML as E,Markdown as J
|
48
|
+
if D:C(E(D))
|
49
|
+
else:
|
50
|
+
try:C(J(H))
|
51
|
+
except:C(E(G))
|
52
|
+
except:pass
|
53
|
+
elif B=='terminal':print(I)
|
54
|
+
else:print(f"🚀 Phân tích dữ liệu & tạo bot chứng khoán K10 - Đăng ký: {A.target_url}")
|
55
|
+
manager=ContentManager()
|
56
|
+
def present():return manager.present_content()
|
vnai/scope/state.py
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
_L='minimal'
|
2
|
+
_K='warnings'
|
3
|
+
_J='api_requests'
|
4
|
+
_I='last_error_time'
|
5
|
+
_H='startup_time'
|
6
|
+
_G='standard'
|
7
|
+
_F='function_calls'
|
8
|
+
_E='peak_memory'
|
9
|
+
_D='errors'
|
10
|
+
_C=True
|
11
|
+
_B=None
|
12
|
+
_A='execution_times'
|
13
|
+
import time,threading,json,os
|
14
|
+
from datetime import datetime
|
15
|
+
from pathlib import Path
|
16
|
+
class Tracker:
|
17
|
+
_instance=_B;_lock=threading.Lock()
|
18
|
+
def __new__(A):
|
19
|
+
with A._lock:
|
20
|
+
if A._instance is _B:A._instance=super(Tracker,A).__new__(A);A._instance._initialize()
|
21
|
+
return A._instance
|
22
|
+
def _initialize(A):A.metrics={_H:datetime.now().isoformat(),_F:0,_J:0,_D:0,_K:0};A.performance_metrics={_A:[],_I:_B,_E:0};A.privacy_level=_G;A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_C);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_C);A.metrics_path=A.data_dir/'usage_metrics.json';A.privacy_config_path=A.project_dir/'config'/'privacy.json';os.makedirs(os.path.dirname(A.privacy_config_path),exist_ok=_C);A._load_metrics();A._load_privacy_settings();A._start_background_collector()
|
23
|
+
def _load_metrics(A):
|
24
|
+
if A.metrics_path.exists():
|
25
|
+
try:
|
26
|
+
with open(A.metrics_path,'r')as C:D=json.load(C)
|
27
|
+
for(B,E)in D.items():
|
28
|
+
if B in A.metrics:A.metrics[B]=E
|
29
|
+
except:pass
|
30
|
+
def _save_metrics(A):
|
31
|
+
try:
|
32
|
+
with open(A.metrics_path,'w')as B:json.dump(A.metrics,B)
|
33
|
+
except:pass
|
34
|
+
def _load_privacy_settings(A):
|
35
|
+
if A.privacy_config_path.exists():
|
36
|
+
try:
|
37
|
+
with open(A.privacy_config_path,'r')as B:C=json.load(B);A.privacy_level=C.get('level',_G)
|
38
|
+
except:pass
|
39
|
+
def setup_privacy(B,level=_B):
|
40
|
+
A=level;C={_L:'Essential system data only',_G:'Performance metrics and errors','enhanced':'Detailed operation analytics'}
|
41
|
+
if A is _B:A=_G
|
42
|
+
if A not in C:raise ValueError(f"Invalid privacy level: {A}. Choose from {', '.join(C.keys())}")
|
43
|
+
B.privacy_level=A
|
44
|
+
with open(B.privacy_config_path,'w')as D:json.dump({'level':A},D)
|
45
|
+
return A
|
46
|
+
def get_privacy_level(A):return A.privacy_level
|
47
|
+
def _start_background_collector(A):
|
48
|
+
def B():
|
49
|
+
while _C:
|
50
|
+
try:
|
51
|
+
import psutil as C;D=C.Process();E=D.memory_info();B=E.rss/1048576
|
52
|
+
if B>A.performance_metrics[_E]:A.performance_metrics[_E]=B
|
53
|
+
A._save_metrics()
|
54
|
+
except:pass
|
55
|
+
time.sleep(300)
|
56
|
+
C=threading.Thread(target=B,daemon=_C);C.start()
|
57
|
+
def record(A,event_type,data=_B):
|
58
|
+
D='execution_time';C=data;B=event_type
|
59
|
+
if A.privacy_level==_L and B!=_D:return _C
|
60
|
+
if B in A.metrics:A.metrics[B]+=1
|
61
|
+
else:A.metrics[B]=1
|
62
|
+
if B==_D:A.performance_metrics[_I]=datetime.now().isoformat()
|
63
|
+
if B==_F and C and D in C:
|
64
|
+
A.performance_metrics[_A].append(C[D])
|
65
|
+
if len(A.performance_metrics[_A])>100:A.performance_metrics[_A]=A.performance_metrics[_A][-100:]
|
66
|
+
if A.metrics[_F]%100==0 or B==_D:A._save_metrics()
|
67
|
+
return _C
|
68
|
+
def get_metrics(A):
|
69
|
+
B=0
|
70
|
+
if A.performance_metrics[_A]:B=sum(A.performance_metrics[_A])/len(A.performance_metrics[_A])
|
71
|
+
C=A.metrics.copy();C.update({'avg_execution_time':B,'peak_memory_mb':A.performance_metrics[_E],'uptime':(datetime.now()-datetime.fromisoformat(A.metrics[_H])).total_seconds(),'privacy_level':A.privacy_level});return C
|
72
|
+
def reset(A):A.metrics={_H:datetime.now().isoformat(),_F:0,_J:0,_D:0,_K:0};A.performance_metrics={_A:[],_I:_B,_E:0};A._save_metrics();return _C
|
73
|
+
tracker=Tracker()
|
74
|
+
def record(event_type,data=_B):return tracker.record(event_type,data)
|
@@ -0,0 +1,30 @@
|
|
1
|
+
Metadata-Version: 2.2
|
2
|
+
Name: vnai
|
3
|
+
Version: 2.0
|
4
|
+
Summary: System optimization and resource management toolkit
|
5
|
+
Home-page:
|
6
|
+
Author:
|
7
|
+
Author-email:
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
10
|
+
Classifier: Operating System :: OS Independent
|
11
|
+
Classifier: Development Status :: 4 - Beta
|
12
|
+
Classifier: Intended Audience :: Developers
|
13
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
14
|
+
Requires-Python: >=3.7
|
15
|
+
Description-Content-Type: text/markdown
|
16
|
+
Requires-Dist: requests>=2.25.0
|
17
|
+
Requires-Dist: psutil>=5.8.0
|
18
|
+
Requires-Dist: cryptography>=3.4.0
|
19
|
+
Provides-Extra: dev
|
20
|
+
Requires-Dist: pytest>=6.0.0; extra == "dev"
|
21
|
+
Requires-Dist: black>=21.5b2; extra == "dev"
|
22
|
+
Dynamic: classifier
|
23
|
+
Dynamic: description
|
24
|
+
Dynamic: description-content-type
|
25
|
+
Dynamic: provides-extra
|
26
|
+
Dynamic: requires-dist
|
27
|
+
Dynamic: requires-python
|
28
|
+
Dynamic: summary
|
29
|
+
|
30
|
+
System resource management and performance optimization toolkit
|
@@ -0,0 +1,16 @@
|
|
1
|
+
vnai/__init__.py,sha256=htWSy730Vm-viAxYTJ2tsq4J7_-jOJsJ9dEl1r8AtFI,4668
|
2
|
+
vnai/beam/__init__.py,sha256=MG_4FkhQZyuKeaoQQh-KaULhxcGu370lGR6FVV5blU4,129
|
3
|
+
vnai/beam/metrics.py,sha256=nVKoe0SJg0TNCPvAOaE99ZyBJ9Tw5WSRydyAObH8FrA,2709
|
4
|
+
vnai/beam/pulse.py,sha256=5e21Ky6rVKD57bf8BzSA2lGzwHhohansoQzggrnEpIE,1580
|
5
|
+
vnai/beam/quota.py,sha256=arII5Dl0hLpAfvThpOHJNRFAJnMbtEmfAMJOLemBiKg,2345
|
6
|
+
vnai/flow/__init__.py,sha256=BURTo8cXicmqqTbeB0qfXwVole0oGDVp_UxRSeh4qfA,80
|
7
|
+
vnai/flow/queue.py,sha256=pvC_HSjctR62Uzt4b3h3EANJXmrkKBm3iiNVIrlTnJA,1912
|
8
|
+
vnai/flow/relay.py,sha256=RU-paE3HVkgodPmNlAI8fAoVUcwufegY1WmsGL-sWpY,6676
|
9
|
+
vnai/scope/__init__.py,sha256=o7N7JjgSqIfQeDojgnxzV9gthEWL3YxxljnvRO9AXkQ,196
|
10
|
+
vnai/scope/profile.py,sha256=Oi8kZ_7wnSAyW7MkU2NZuw5cxzSRrRv8v61SkgX0zrc,14941
|
11
|
+
vnai/scope/promo.py,sha256=eOySqZ4XnxLbRyD6uKhYnJFSIwaDJ7T8hbJynuqvfho,2958
|
12
|
+
vnai/scope/state.py,sha256=JkVwJv8l_-ef201I_O1PHqFyp8KJ3VWyfmZnCltH18c,3283
|
13
|
+
vnai-2.0.dist-info/METADATA,sha256=ibGNnz6wpZ9xoGw0T45707a5_WWGpz6uCA3hieYlbCM,947
|
14
|
+
vnai-2.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
|
15
|
+
vnai-2.0.dist-info/top_level.txt,sha256=4zI0qZHePCwvgSqXl4420sBcd0VzZn4MEcRsAIFae3k,5
|
16
|
+
vnai-2.0.dist-info/RECORD,,
|
vnai-0.1.4.dist-info/METADATA
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: vnai
|
3
|
-
Version: 0.1.4
|
4
|
-
Summary: :))
|
5
|
-
Author: Vnstock HQ
|
6
|
-
Author-email: support@vnstock.site
|
7
|
-
Classifier: Programming Language :: Python :: 3
|
8
|
-
Classifier: Programming Language :: Python :: 3.10
|
9
|
-
Classifier: Programming Language :: Python :: 3.11
|
10
|
-
Classifier: Programming Language :: Python :: 3.12
|
11
|
-
Classifier: License :: OSI Approved :: MIT License
|
12
|
-
Classifier: Operating System :: OS Independent
|
13
|
-
Requires-Python: >=3.10
|
14
|
-
Requires-Dist: requests
|
15
|
-
Requires-Dist: cryptography
|
16
|
-
Requires-Dist: psutil
|
17
|
-
Provides-Extra: dev
|
18
|
-
Requires-Dist: pytest; extra == "dev"
|
19
|
-
Requires-Dist: pytest-cov; extra == "dev"
|
vnai-0.1.4.dist-info/RECORD
DELETED
@@ -1,5 +0,0 @@
|
|
1
|
-
vnai/__init__.py,sha256=k6Qn1-1DeiTMHs0TZk4tLwLzAb35qxcjhjg4fJQ046A,12245
|
2
|
-
vnai-0.1.4.dist-info/METADATA,sha256=tosZBcT5xYtgGkkKCyPf77H9LsqA0ja04vg-zByIcrQ,631
|
3
|
-
vnai-0.1.4.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
4
|
-
vnai-0.1.4.dist-info/top_level.txt,sha256=4zI0qZHePCwvgSqXl4420sBcd0VzZn4MEcRsAIFae3k,5
|
5
|
-
vnai-0.1.4.dist-info/RECORD,,
|
File without changes
|