vnai 2.1.8__py3-none-any.whl → 2.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +37 -110
- vnai/beam/__init__.py +0 -2
- vnai/beam/metrics.py +48 -99
- vnai/beam/pulse.py +24 -53
- vnai/beam/quota.py +94 -247
- vnai/flow/__init__.py +1 -4
- vnai/flow/queue.py +17 -50
- vnai/flow/relay.py +98 -204
- vnai/scope/__init__.py +1 -4
- vnai/scope/profile.py +231 -417
- vnai/scope/promo.py +41 -123
- vnai/scope/state.py +52 -119
- {vnai-2.1.8.dist-info → vnai-2.1.9.dist-info}/METADATA +1 -1
- vnai-2.1.9.dist-info/RECORD +16 -0
- vnai-2.1.8.dist-info/RECORD +0 -16
- {vnai-2.1.8.dist-info → vnai-2.1.9.dist-info}/WHEEL +0 -0
- {vnai-2.1.8.dist-info → vnai-2.1.9.dist-info}/top_level.txt +0 -0
vnai/flow/queue.py
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
# vnai/flow/queue.py
|
2
|
-
|
3
1
|
import time
|
4
2
|
import threading
|
5
3
|
import json
|
@@ -7,127 +5,96 @@ from datetime import datetime
|
|
7
5
|
from pathlib import Path
|
8
6
|
|
9
7
|
class Buffer:
|
10
|
-
"""Manages data buffering with persistence"""
|
11
|
-
|
12
8
|
_instance = None
|
13
9
|
_lock = threading.Lock()
|
14
|
-
|
10
|
+
|
15
11
|
def __new__(cls):
|
16
12
|
with cls._lock:
|
17
13
|
if cls._instance is None:
|
18
14
|
cls._instance = super(Buffer, cls).__new__(cls)
|
19
15
|
cls._instance._initialize()
|
20
16
|
return cls._instance
|
21
|
-
|
17
|
+
|
22
18
|
def _initialize(self):
|
23
|
-
"""Initialize buffer"""
|
24
19
|
self.data = []
|
25
20
|
self.lock = threading.Lock()
|
26
21
|
self.max_size = 1000
|
27
|
-
self.backup_interval = 300
|
28
|
-
|
29
|
-
# Setup data directory
|
22
|
+
self.backup_interval = 300
|
30
23
|
self.home_dir = Path.home()
|
31
|
-
self.project_dir = self.home_dir /
|
24
|
+
self.project_dir = self.home_dir /".vnstock"
|
32
25
|
self.project_dir.mkdir(exist_ok=True)
|
33
|
-
self.data_dir = self.project_dir /
|
26
|
+
self.data_dir = self.project_dir /'data'
|
34
27
|
self.data_dir.mkdir(exist_ok=True)
|
35
|
-
self.backup_path = self.data_dir /
|
36
|
-
|
37
|
-
# Load from backup if exists
|
28
|
+
self.backup_path = self.data_dir /"buffer_backup.json"
|
38
29
|
self._load_from_backup()
|
39
|
-
|
40
|
-
# Start backup thread
|
41
30
|
self._start_backup_thread()
|
42
|
-
|
31
|
+
|
43
32
|
def _load_from_backup(self):
|
44
|
-
"""Load data from backup file"""
|
45
33
|
if self.backup_path.exists():
|
46
34
|
try:
|
47
|
-
with open(self.backup_path,
|
35
|
+
with open(self.backup_path,'r') as f:
|
48
36
|
backup_data = json.load(f)
|
49
|
-
|
50
37
|
with self.lock:
|
51
38
|
self.data = backup_data
|
52
39
|
except:
|
53
40
|
pass
|
54
|
-
|
41
|
+
|
55
42
|
def _save_to_backup(self):
|
56
|
-
"""Save data to backup file"""
|
57
43
|
with self.lock:
|
58
44
|
if not self.data:
|
59
45
|
return
|
60
|
-
|
61
46
|
try:
|
62
|
-
with open(self.backup_path,
|
47
|
+
with open(self.backup_path,'w') as f:
|
63
48
|
json.dump(self.data, f)
|
64
49
|
except:
|
65
50
|
pass
|
66
|
-
|
51
|
+
|
67
52
|
def _start_backup_thread(self):
|
68
|
-
"""Start background backup thread"""
|
69
53
|
def backup_task():
|
70
54
|
while True:
|
71
55
|
time.sleep(self.backup_interval)
|
72
56
|
self._save_to_backup()
|
73
|
-
|
74
57
|
backup_thread = threading.Thread(target=backup_task, daemon=True)
|
75
58
|
backup_thread.start()
|
76
|
-
|
59
|
+
|
77
60
|
def add(self, item, category=None):
|
78
|
-
"""Add item to buffer"""
|
79
61
|
with self.lock:
|
80
|
-
# Add metadata
|
81
62
|
if isinstance(item, dict):
|
82
|
-
if
|
63
|
+
if"timestamp" not in item:
|
83
64
|
item["timestamp"] = datetime.now().isoformat()
|
84
65
|
if category:
|
85
66
|
item["category"] = category
|
86
|
-
|
87
|
-
# Add to buffer
|
88
67
|
self.data.append(item)
|
89
|
-
|
90
|
-
# Trim if exceeds max size
|
91
68
|
if len(self.data) > self.max_size:
|
92
69
|
self.data = self.data[-self.max_size:]
|
93
|
-
|
94
|
-
# Save to backup if buffer gets large
|
95
70
|
if len(self.data) % 100 == 0:
|
96
71
|
self._save_to_backup()
|
97
|
-
|
98
72
|
return len(self.data)
|
99
|
-
|
73
|
+
|
100
74
|
def get(self, count=None, category=None):
|
101
|
-
"""Get items from buffer with optional filtering"""
|
102
75
|
with self.lock:
|
103
76
|
if category:
|
104
77
|
filtered_data = [item for item in self.data if item.get("category") == category]
|
105
78
|
else:
|
106
79
|
filtered_data = self.data.copy()
|
107
|
-
|
108
80
|
if count:
|
109
81
|
return filtered_data[:count]
|
110
82
|
else:
|
111
83
|
return filtered_data
|
112
|
-
|
84
|
+
|
113
85
|
def clear(self, category=None):
|
114
|
-
"""Clear buffer, optionally by category"""
|
115
86
|
with self.lock:
|
116
87
|
if category:
|
117
88
|
self.data = [item for item in self.data if item.get("category") != category]
|
118
89
|
else:
|
119
90
|
self.data = []
|
120
|
-
|
121
91
|
self._save_to_backup()
|
122
92
|
return len(self.data)
|
123
|
-
|
93
|
+
|
124
94
|
def size(self, category=None):
|
125
|
-
"""Get buffer size, optionally by category"""
|
126
95
|
with self.lock:
|
127
96
|
if category:
|
128
97
|
return len([item for item in self.data if item.get("category") == category])
|
129
98
|
else:
|
130
99
|
return len(self.data)
|
131
|
-
|
132
|
-
# Create singleton instance
|
133
|
-
buffer = Buffer()
|
100
|
+
buffer = Buffer()
|