vnai 2.0.1__py3-none-any.whl → 2.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +72 -261
- vnai/beam/__init__.py +2 -5
- vnai/beam/metrics.py +57 -182
- vnai/beam/pulse.py +29 -107
- vnai/beam/quota.py +102 -473
- vnai/flow/__init__.py +2 -5
- vnai/flow/queue.py +55 -131
- vnai/flow/relay.py +149 -439
- vnai/scope/__init__.py +2 -5
- vnai/scope/profile.py +219 -763
- vnai/scope/promo.py +55 -215
- vnai/scope/state.py +71 -220
- {vnai-2.0.1.dist-info → vnai-2.0.2.dist-info}/METADATA +3 -2
- vnai-2.0.2.dist-info/RECORD +16 -0
- {vnai-2.0.1.dist-info → vnai-2.0.2.dist-info}/WHEEL +1 -1
- vnai-2.0.1.dist-info/RECORD +0 -16
- {vnai-2.0.1.dist-info → vnai-2.0.2.dist-info}/top_level.txt +0 -0
vnai/flow/queue.py
CHANGED
@@ -1,134 +1,58 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
import time
|
5
|
-
import threading
|
6
|
-
import json
|
1
|
+
_C='category'
|
2
|
+
_B=True
|
3
|
+
_A=None
|
4
|
+
import time,threading,json
|
7
5
|
from datetime import datetime
|
8
6
|
from pathlib import Path
|
9
|
-
|
10
7
|
class Buffer:
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
try:
|
63
|
-
with open(self.backup_path, 'w') as f:
|
64
|
-
json.dump(self.data, f)
|
65
|
-
except:
|
66
|
-
pass
|
67
|
-
|
68
|
-
def _start_backup_thread(self):
|
69
|
-
"""Start background backup thread"""
|
70
|
-
def backup_task():
|
71
|
-
while True:
|
72
|
-
time.sleep(self.backup_interval)
|
73
|
-
self._save_to_backup()
|
74
|
-
|
75
|
-
backup_thread = threading.Thread(target=backup_task, daemon=True)
|
76
|
-
backup_thread.start()
|
77
|
-
|
78
|
-
def add(self, item, category=None):
|
79
|
-
"""Add item to buffer"""
|
80
|
-
with self.lock:
|
81
|
-
# Add metadata
|
82
|
-
if isinstance(item, dict):
|
83
|
-
if "timestamp" not in item:
|
84
|
-
item["timestamp"] = datetime.now().isoformat()
|
85
|
-
if category:
|
86
|
-
item["category"] = category
|
87
|
-
|
88
|
-
# Add to buffer
|
89
|
-
self.data.append(item)
|
90
|
-
|
91
|
-
# Trim if exceeds max size
|
92
|
-
if len(self.data) > self.max_size:
|
93
|
-
self.data = self.data[-self.max_size:]
|
94
|
-
|
95
|
-
# Save to backup if buffer gets large
|
96
|
-
if len(self.data) % 100 == 0:
|
97
|
-
self._save_to_backup()
|
98
|
-
|
99
|
-
return len(self.data)
|
100
|
-
|
101
|
-
def get(self, count=None, category=None):
|
102
|
-
"""Get items from buffer with optional filtering"""
|
103
|
-
with self.lock:
|
104
|
-
if category:
|
105
|
-
filtered_data = [item for item in self.data if item.get("category") == category]
|
106
|
-
else:
|
107
|
-
filtered_data = self.data.copy()
|
108
|
-
|
109
|
-
if count:
|
110
|
-
return filtered_data[:count]
|
111
|
-
else:
|
112
|
-
return filtered_data
|
113
|
-
|
114
|
-
def clear(self, category=None):
|
115
|
-
"""Clear buffer, optionally by category"""
|
116
|
-
with self.lock:
|
117
|
-
if category:
|
118
|
-
self.data = [item for item in self.data if item.get("category") != category]
|
119
|
-
else:
|
120
|
-
self.data = []
|
121
|
-
|
122
|
-
self._save_to_backup()
|
123
|
-
return len(self.data)
|
124
|
-
|
125
|
-
def size(self, category=None):
|
126
|
-
"""Get buffer size, optionally by category"""
|
127
|
-
with self.lock:
|
128
|
-
if category:
|
129
|
-
return len([item for item in self.data if item.get("category") == category])
|
130
|
-
else:
|
131
|
-
return len(self.data)
|
132
|
-
|
133
|
-
# Create singleton instance
|
134
|
-
buffer = Buffer()
|
8
|
+
_instance=_A;_lock=threading.Lock()
|
9
|
+
def __new__(A):
|
10
|
+
with A._lock:
|
11
|
+
if A._instance is _A:A._instance=super(Buffer,A).__new__(A);A._instance._initialize()
|
12
|
+
return A._instance
|
13
|
+
def _initialize(A):A.data=[];A.lock=threading.Lock();A.max_size=1000;A.backup_interval=300;A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_B);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_B);A.backup_path=A.data_dir/'buffer_backup.json';A._load_from_backup();A._start_backup_thread()
|
14
|
+
def _load_from_backup(A):
|
15
|
+
if A.backup_path.exists():
|
16
|
+
try:
|
17
|
+
with open(A.backup_path,'r')as B:C=json.load(B)
|
18
|
+
with A.lock:A.data=C
|
19
|
+
except:pass
|
20
|
+
def _save_to_backup(A):
|
21
|
+
with A.lock:
|
22
|
+
if not A.data:return
|
23
|
+
try:
|
24
|
+
with open(A.backup_path,'w')as B:json.dump(A.data,B)
|
25
|
+
except:pass
|
26
|
+
def _start_backup_thread(A):
|
27
|
+
def B():
|
28
|
+
while _B:time.sleep(A.backup_interval);A._save_to_backup()
|
29
|
+
C=threading.Thread(target=B,daemon=_B);C.start()
|
30
|
+
def add(A,item,category=_A):
|
31
|
+
D='timestamp';C=category;B=item
|
32
|
+
with A.lock:
|
33
|
+
if isinstance(B,dict):
|
34
|
+
if D not in B:B[D]=datetime.now().isoformat()
|
35
|
+
if C:B[_C]=C
|
36
|
+
A.data.append(B)
|
37
|
+
if len(A.data)>A.max_size:A.data=A.data[-A.max_size:]
|
38
|
+
if len(A.data)%100==0:A._save_to_backup()
|
39
|
+
return len(A.data)
|
40
|
+
def get(A,count=_A,category=_A):
|
41
|
+
D=category;C=count
|
42
|
+
with A.lock:
|
43
|
+
if D:B=[A for A in A.data if A.get(_C)==D]
|
44
|
+
else:B=A.data.copy()
|
45
|
+
if C:return B[:C]
|
46
|
+
else:return B
|
47
|
+
def clear(A,category=_A):
|
48
|
+
B=category
|
49
|
+
with A.lock:
|
50
|
+
if B:A.data=[A for A in A.data if A.get(_C)!=B]
|
51
|
+
else:A.data=[]
|
52
|
+
A._save_to_backup();return len(A.data)
|
53
|
+
def size(A,category=_A):
|
54
|
+
B=category
|
55
|
+
with A.lock:
|
56
|
+
if B:return len([A for A in A.data if A.get(_C)==B])
|
57
|
+
else:return len(A.data)
|
58
|
+
buffer=Buffer()
|