vnai 2.1.7__py3-none-any.whl → 2.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vnai/flow/__init__.py CHANGED
@@ -1,2 +1,5 @@
1
- from vnai.flow.relay import conduit, configure
2
- from vnai.flow.queue import buffer
1
+ # vnai/flow/__init__.py
2
+ # Data flow and transmission management
3
+
4
+ from vnai.flow.relay import conduit
5
+ from vnai.flow.queue import buffer
vnai/flow/queue.py CHANGED
@@ -1,100 +1,133 @@
1
- import time
2
- import threading
3
- import json
4
- from datetime import datetime
5
- from pathlib import Path
6
-
7
- class Buffer:
8
- _instance = None
9
- _lock = threading.Lock()
10
-
11
- def __new__(cls):
12
- with cls._lock:
13
- if cls._instance is None:
14
- cls._instance = super(Buffer, cls).__new__(cls)
15
- cls._instance._initialize()
16
- return cls._instance
17
-
18
- def _initialize(self):
19
- self.data = []
20
- self.lock = threading.Lock()
21
- self.max_size = 1000
22
- self.backup_interval = 300
23
- self.home_dir = Path.home()
24
- self.project_dir = self.home_dir /".vnstock"
25
- self.project_dir.mkdir(exist_ok=True)
26
- self.data_dir = self.project_dir /'data'
27
- self.data_dir.mkdir(exist_ok=True)
28
- self.backup_path = self.data_dir /"buffer_backup.json"
29
- self._load_from_backup()
30
- self._start_backup_thread()
31
-
32
- def _load_from_backup(self):
33
- if self.backup_path.exists():
34
- try:
35
- with open(self.backup_path,'r') as f:
36
- backup_data = json.load(f)
37
- with self.lock:
38
- self.data = backup_data
39
- except:
40
- pass
41
-
42
- def _save_to_backup(self):
43
- with self.lock:
44
- if not self.data:
45
- return
46
- try:
47
- with open(self.backup_path,'w') as f:
48
- json.dump(self.data, f)
49
- except:
50
- pass
51
-
52
- def _start_backup_thread(self):
53
- def backup_task():
54
- while True:
55
- time.sleep(self.backup_interval)
56
- self._save_to_backup()
57
- backup_thread = threading.Thread(target=backup_task, daemon=True)
58
- backup_thread.start()
59
-
60
- def add(self, item, category=None):
61
- with self.lock:
62
- if isinstance(item, dict):
63
- if"timestamp" not in item:
64
- item["timestamp"] = datetime.now().isoformat()
65
- if category:
66
- item["category"] = category
67
- self.data.append(item)
68
- if len(self.data) > self.max_size:
69
- self.data = self.data[-self.max_size:]
70
- if len(self.data) % 100 == 0:
71
- self._save_to_backup()
72
- return len(self.data)
73
-
74
- def get(self, count=None, category=None):
75
- with self.lock:
76
- if category:
77
- filtered_data = [item for item in self.data if item.get("category") == category]
78
- else:
79
- filtered_data = self.data.copy()
80
- if count:
81
- return filtered_data[:count]
82
- else:
83
- return filtered_data
84
-
85
- def clear(self, category=None):
86
- with self.lock:
87
- if category:
88
- self.data = [item for item in self.data if item.get("category") != category]
89
- else:
90
- self.data = []
91
- self._save_to_backup()
92
- return len(self.data)
93
-
94
- def size(self, category=None):
95
- with self.lock:
96
- if category:
97
- return len([item for item in self.data if item.get("category") == category])
98
- else:
99
- return len(self.data)
100
- buffer = Buffer()
1
+ # vnai/flow/queue.py
2
+
3
+ import time
4
+ import threading
5
+ import json
6
+ from datetime import datetime
7
+ from pathlib import Path
8
+
9
+ class Buffer:
10
+ """Manages data buffering with persistence"""
11
+
12
+ _instance = None
13
+ _lock = threading.Lock()
14
+
15
+ def __new__(cls):
16
+ with cls._lock:
17
+ if cls._instance is None:
18
+ cls._instance = super(Buffer, cls).__new__(cls)
19
+ cls._instance._initialize()
20
+ return cls._instance
21
+
22
+ def _initialize(self):
23
+ """Initialize buffer"""
24
+ self.data = []
25
+ self.lock = threading.Lock()
26
+ self.max_size = 1000
27
+ self.backup_interval = 300 # 5 minutes
28
+
29
+ # Setup data directory
30
+ self.home_dir = Path.home()
31
+ self.project_dir = self.home_dir / ".vnstock"
32
+ self.project_dir.mkdir(exist_ok=True)
33
+ self.data_dir = self.project_dir / 'data'
34
+ self.data_dir.mkdir(exist_ok=True)
35
+ self.backup_path = self.data_dir / "buffer_backup.json"
36
+
37
+ # Load from backup if exists
38
+ self._load_from_backup()
39
+
40
+ # Start backup thread
41
+ self._start_backup_thread()
42
+
43
+ def _load_from_backup(self):
44
+ """Load data from backup file"""
45
+ if self.backup_path.exists():
46
+ try:
47
+ with open(self.backup_path, 'r') as f:
48
+ backup_data = json.load(f)
49
+
50
+ with self.lock:
51
+ self.data = backup_data
52
+ except:
53
+ pass
54
+
55
+ def _save_to_backup(self):
56
+ """Save data to backup file"""
57
+ with self.lock:
58
+ if not self.data:
59
+ return
60
+
61
+ try:
62
+ with open(self.backup_path, 'w') as f:
63
+ json.dump(self.data, f)
64
+ except:
65
+ pass
66
+
67
+ def _start_backup_thread(self):
68
+ """Start background backup thread"""
69
+ def backup_task():
70
+ while True:
71
+ time.sleep(self.backup_interval)
72
+ self._save_to_backup()
73
+
74
+ backup_thread = threading.Thread(target=backup_task, daemon=True)
75
+ backup_thread.start()
76
+
77
+ def add(self, item, category=None):
78
+ """Add item to buffer"""
79
+ with self.lock:
80
+ # Add metadata
81
+ if isinstance(item, dict):
82
+ if "timestamp" not in item:
83
+ item["timestamp"] = datetime.now().isoformat()
84
+ if category:
85
+ item["category"] = category
86
+
87
+ # Add to buffer
88
+ self.data.append(item)
89
+
90
+ # Trim if exceeds max size
91
+ if len(self.data) > self.max_size:
92
+ self.data = self.data[-self.max_size:]
93
+
94
+ # Save to backup if buffer gets large
95
+ if len(self.data) % 100 == 0:
96
+ self._save_to_backup()
97
+
98
+ return len(self.data)
99
+
100
+ def get(self, count=None, category=None):
101
+ """Get items from buffer with optional filtering"""
102
+ with self.lock:
103
+ if category:
104
+ filtered_data = [item for item in self.data if item.get("category") == category]
105
+ else:
106
+ filtered_data = self.data.copy()
107
+
108
+ if count:
109
+ return filtered_data[:count]
110
+ else:
111
+ return filtered_data
112
+
113
+ def clear(self, category=None):
114
+ """Clear buffer, optionally by category"""
115
+ with self.lock:
116
+ if category:
117
+ self.data = [item for item in self.data if item.get("category") != category]
118
+ else:
119
+ self.data = []
120
+
121
+ self._save_to_backup()
122
+ return len(self.data)
123
+
124
+ def size(self, category=None):
125
+ """Get buffer size, optionally by category"""
126
+ with self.lock:
127
+ if category:
128
+ return len([item for item in self.data if item.get("category") == category])
129
+ else:
130
+ return len(self.data)
131
+
132
+ # Create singleton instance
133
+ buffer = Buffer()