vnai 2.0.2__py3-none-any.whl → 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vnai/flow/queue.py CHANGED
@@ -1,58 +1,134 @@
1
- _C='category'
2
- _B=True
3
- _A=None
4
- import time,threading,json
1
+ # vnai/flow/queue.py
2
+ # Data buffering system
3
+
4
+ import time
5
+ import threading
6
+ import json
5
7
  from datetime import datetime
6
8
  from pathlib import Path
9
+
7
10
  class Buffer:
8
- _instance=_A;_lock=threading.Lock()
9
- def __new__(A):
10
- with A._lock:
11
- if A._instance is _A:A._instance=super(Buffer,A).__new__(A);A._instance._initialize()
12
- return A._instance
13
- def _initialize(A):A.data=[];A.lock=threading.Lock();A.max_size=1000;A.backup_interval=300;A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_B);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_B);A.backup_path=A.data_dir/'buffer_backup.json';A._load_from_backup();A._start_backup_thread()
14
- def _load_from_backup(A):
15
- if A.backup_path.exists():
16
- try:
17
- with open(A.backup_path,'r')as B:C=json.load(B)
18
- with A.lock:A.data=C
19
- except:pass
20
- def _save_to_backup(A):
21
- with A.lock:
22
- if not A.data:return
23
- try:
24
- with open(A.backup_path,'w')as B:json.dump(A.data,B)
25
- except:pass
26
- def _start_backup_thread(A):
27
- def B():
28
- while _B:time.sleep(A.backup_interval);A._save_to_backup()
29
- C=threading.Thread(target=B,daemon=_B);C.start()
30
- def add(A,item,category=_A):
31
- D='timestamp';C=category;B=item
32
- with A.lock:
33
- if isinstance(B,dict):
34
- if D not in B:B[D]=datetime.now().isoformat()
35
- if C:B[_C]=C
36
- A.data.append(B)
37
- if len(A.data)>A.max_size:A.data=A.data[-A.max_size:]
38
- if len(A.data)%100==0:A._save_to_backup()
39
- return len(A.data)
40
- def get(A,count=_A,category=_A):
41
- D=category;C=count
42
- with A.lock:
43
- if D:B=[A for A in A.data if A.get(_C)==D]
44
- else:B=A.data.copy()
45
- if C:return B[:C]
46
- else:return B
47
- def clear(A,category=_A):
48
- B=category
49
- with A.lock:
50
- if B:A.data=[A for A in A.data if A.get(_C)!=B]
51
- else:A.data=[]
52
- A._save_to_backup();return len(A.data)
53
- def size(A,category=_A):
54
- B=category
55
- with A.lock:
56
- if B:return len([A for A in A.data if A.get(_C)==B])
57
- else:return len(A.data)
58
- buffer=Buffer()
11
+ """Manages data buffering with persistence"""
12
+
13
+ _instance = None
14
+ _lock = threading.Lock()
15
+
16
+ def __new__(cls):
17
+ with cls._lock:
18
+ if cls._instance is None:
19
+ cls._instance = super(Buffer, cls).__new__(cls)
20
+ cls._instance._initialize()
21
+ return cls._instance
22
+
23
+ def _initialize(self):
24
+ """Initialize buffer"""
25
+ self.data = []
26
+ self.lock = threading.Lock()
27
+ self.max_size = 1000
28
+ self.backup_interval = 300 # 5 minutes
29
+
30
+ # Setup data directory
31
+ self.home_dir = Path.home()
32
+ self.project_dir = self.home_dir / ".vnstock"
33
+ self.project_dir.mkdir(exist_ok=True)
34
+ self.data_dir = self.project_dir / 'data'
35
+ self.data_dir.mkdir(exist_ok=True)
36
+ self.backup_path = self.data_dir / "buffer_backup.json"
37
+
38
+ # Load from backup if exists
39
+ self._load_from_backup()
40
+
41
+ # Start backup thread
42
+ self._start_backup_thread()
43
+
44
+ def _load_from_backup(self):
45
+ """Load data from backup file"""
46
+ if self.backup_path.exists():
47
+ try:
48
+ with open(self.backup_path, 'r') as f:
49
+ backup_data = json.load(f)
50
+
51
+ with self.lock:
52
+ self.data = backup_data
53
+ except:
54
+ pass
55
+
56
+ def _save_to_backup(self):
57
+ """Save data to backup file"""
58
+ with self.lock:
59
+ if not self.data:
60
+ return
61
+
62
+ try:
63
+ with open(self.backup_path, 'w') as f:
64
+ json.dump(self.data, f)
65
+ except:
66
+ pass
67
+
68
+ def _start_backup_thread(self):
69
+ """Start background backup thread"""
70
+ def backup_task():
71
+ while True:
72
+ time.sleep(self.backup_interval)
73
+ self._save_to_backup()
74
+
75
+ backup_thread = threading.Thread(target=backup_task, daemon=True)
76
+ backup_thread.start()
77
+
78
+ def add(self, item, category=None):
79
+ """Add item to buffer"""
80
+ with self.lock:
81
+ # Add metadata
82
+ if isinstance(item, dict):
83
+ if "timestamp" not in item:
84
+ item["timestamp"] = datetime.now().isoformat()
85
+ if category:
86
+ item["category"] = category
87
+
88
+ # Add to buffer
89
+ self.data.append(item)
90
+
91
+ # Trim if exceeds max size
92
+ if len(self.data) > self.max_size:
93
+ self.data = self.data[-self.max_size:]
94
+
95
+ # Save to backup if buffer gets large
96
+ if len(self.data) % 100 == 0:
97
+ self._save_to_backup()
98
+
99
+ return len(self.data)
100
+
101
+ def get(self, count=None, category=None):
102
+ """Get items from buffer with optional filtering"""
103
+ with self.lock:
104
+ if category:
105
+ filtered_data = [item for item in self.data if item.get("category") == category]
106
+ else:
107
+ filtered_data = self.data.copy()
108
+
109
+ if count:
110
+ return filtered_data[:count]
111
+ else:
112
+ return filtered_data
113
+
114
+ def clear(self, category=None):
115
+ """Clear buffer, optionally by category"""
116
+ with self.lock:
117
+ if category:
118
+ self.data = [item for item in self.data if item.get("category") != category]
119
+ else:
120
+ self.data = []
121
+
122
+ self._save_to_backup()
123
+ return len(self.data)
124
+
125
+ def size(self, category=None):
126
+ """Get buffer size, optionally by category"""
127
+ with self.lock:
128
+ if category:
129
+ return len([item for item in self.data if item.get("category") == category])
130
+ else:
131
+ return len(self.data)
132
+
133
+ # Create singleton instance
134
+ buffer = Buffer()