vnai 2.1.7__py3-none-any.whl → 2.1.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +240 -178
- vnai/beam/__init__.py +4 -2
- vnai/beam/metrics.py +218 -167
- vnai/beam/pulse.py +108 -79
- vnai/beam/quota.py +486 -333
- vnai/flow/__init__.py +5 -2
- vnai/flow/queue.py +133 -100
- vnai/flow/relay.py +447 -356
- vnai/scope/__init__.py +7 -4
- vnai/scope/profile.py +765 -579
- vnai/scope/promo.py +375 -278
- vnai/scope/state.py +222 -155
- {vnai-2.1.7.dist-info → vnai-2.1.8.dist-info}/METADATA +20 -20
- vnai-2.1.8.dist-info/RECORD +16 -0
- vnai-2.1.7.dist-info/RECORD +0 -16
- {vnai-2.1.7.dist-info → vnai-2.1.8.dist-info}/WHEEL +0 -0
- {vnai-2.1.7.dist-info → vnai-2.1.8.dist-info}/top_level.txt +0 -0
vnai/flow/relay.py
CHANGED
@@ -1,356 +1,447 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
import
|
4
|
-
import
|
5
|
-
import
|
6
|
-
|
7
|
-
|
8
|
-
from
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
"
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
self.
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
self.
|
38
|
-
self.
|
39
|
-
self.
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
self.
|
46
|
-
self.
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
import
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
def
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
self.
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
if
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
if
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
self.
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
"
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
"
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
"
|
248
|
-
|
249
|
-
"
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
"
|
264
|
-
|
265
|
-
|
266
|
-
"
|
267
|
-
|
268
|
-
|
269
|
-
"
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
self.
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
"
|
304
|
-
"
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
"
|
343
|
-
"
|
344
|
-
"
|
345
|
-
"
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
1
|
+
# vnai/flow/relay.py
|
2
|
+
|
3
|
+
import time
|
4
|
+
import threading
|
5
|
+
import json
|
6
|
+
import random
|
7
|
+
import requests
|
8
|
+
from datetime import datetime
|
9
|
+
from pathlib import Path
|
10
|
+
from typing import Dict, List, Any, Optional
|
11
|
+
|
12
|
+
class Conduit:
|
13
|
+
"""Handles system telemetry flow"""
|
14
|
+
|
15
|
+
_instance = None
|
16
|
+
_lock = threading.Lock()
|
17
|
+
|
18
|
+
def __new__(cls, buffer_size=50, sync_interval=300):
|
19
|
+
with cls._lock:
|
20
|
+
if cls._instance is None:
|
21
|
+
cls._instance = super(Conduit, cls).__new__(cls)
|
22
|
+
cls._instance._initialize(buffer_size, sync_interval)
|
23
|
+
return cls._instance
|
24
|
+
|
25
|
+
def _initialize(self, buffer_size, sync_interval):
|
26
|
+
"""Initialize conduit"""
|
27
|
+
self.buffer_size = buffer_size
|
28
|
+
self.sync_interval = sync_interval
|
29
|
+
|
30
|
+
# Separate buffers for different data types
|
31
|
+
self.buffer = {
|
32
|
+
"function_calls": [],
|
33
|
+
"api_requests": [],
|
34
|
+
"rate_limits": []
|
35
|
+
}
|
36
|
+
|
37
|
+
self.lock = threading.Lock()
|
38
|
+
self.last_sync_time = time.time()
|
39
|
+
self.sync_count = 0
|
40
|
+
self.failed_queue = []
|
41
|
+
|
42
|
+
# Home directory setup
|
43
|
+
self.home_dir = Path.home()
|
44
|
+
self.project_dir = self.home_dir / ".vnstock"
|
45
|
+
self.project_dir.mkdir(exist_ok=True)
|
46
|
+
self.data_dir = self.project_dir / 'data'
|
47
|
+
self.data_dir.mkdir(exist_ok=True)
|
48
|
+
self.config_path = self.data_dir / "relay_config.json"
|
49
|
+
|
50
|
+
# Get machine identifier from system profile
|
51
|
+
try:
|
52
|
+
from vnai.scope.profile import inspector
|
53
|
+
self.machine_id = inspector.fingerprint()
|
54
|
+
except:
|
55
|
+
self.machine_id = self._generate_fallback_id()
|
56
|
+
|
57
|
+
# Load config if exists
|
58
|
+
self._load_config()
|
59
|
+
|
60
|
+
# Start periodic sync
|
61
|
+
self._start_periodic_sync()
|
62
|
+
|
63
|
+
def _generate_fallback_id(self) -> str:
|
64
|
+
"""Generate a fallback machine identifier if profile is unavailable"""
|
65
|
+
try:
|
66
|
+
import platform
|
67
|
+
import hashlib
|
68
|
+
import uuid
|
69
|
+
|
70
|
+
# Try to get machine-specific information
|
71
|
+
system_info = platform.node() + platform.platform() + platform.processor()
|
72
|
+
return hashlib.md5(system_info.encode()).hexdigest()
|
73
|
+
except:
|
74
|
+
import uuid
|
75
|
+
return str(uuid.uuid4())
|
76
|
+
|
77
|
+
def _load_config(self):
|
78
|
+
"""Load configuration from file"""
|
79
|
+
if self.config_path.exists():
|
80
|
+
try:
|
81
|
+
with open(self.config_path, 'r') as f:
|
82
|
+
config = json.load(f)
|
83
|
+
|
84
|
+
|
85
|
+
if 'buffer_size' in config:
|
86
|
+
self.buffer_size = config['buffer_size']
|
87
|
+
if 'sync_interval' in config:
|
88
|
+
self.sync_interval = config['sync_interval']
|
89
|
+
if 'last_sync_time' in config:
|
90
|
+
self.last_sync_time = config['last_sync_time']
|
91
|
+
if 'sync_count' in config:
|
92
|
+
self.sync_count = config['sync_count']
|
93
|
+
except:
|
94
|
+
pass
|
95
|
+
|
96
|
+
def _save_config(self):
|
97
|
+
"""Save configuration to file"""
|
98
|
+
config = {
|
99
|
+
|
100
|
+
'buffer_size': self.buffer_size,
|
101
|
+
'sync_interval': self.sync_interval,
|
102
|
+
'last_sync_time': self.last_sync_time,
|
103
|
+
'sync_count': self.sync_count
|
104
|
+
}
|
105
|
+
|
106
|
+
try:
|
107
|
+
with open(self.config_path, 'w') as f:
|
108
|
+
json.dump(config, f)
|
109
|
+
except:
|
110
|
+
pass
|
111
|
+
|
112
|
+
def _start_periodic_sync(self):
|
113
|
+
"""Start periodic sync thread"""
|
114
|
+
def periodic_sync():
|
115
|
+
while True:
|
116
|
+
time.sleep(self.sync_interval)
|
117
|
+
self.dispatch("periodic")
|
118
|
+
|
119
|
+
sync_thread = threading.Thread(target=periodic_sync, daemon=True)
|
120
|
+
sync_thread.start()
|
121
|
+
|
122
|
+
def add_function_call(self, record):
|
123
|
+
"""Add function call record"""
|
124
|
+
# Ensure record is a dictionary
|
125
|
+
if not isinstance(record, dict):
|
126
|
+
record = {"value": str(record)}
|
127
|
+
|
128
|
+
with self.lock:
|
129
|
+
self.buffer["function_calls"].append(record)
|
130
|
+
self._check_triggers("function_calls")
|
131
|
+
|
132
|
+
def add_api_request(self, record):
|
133
|
+
"""Add API request record"""
|
134
|
+
# Ensure record is a dictionary
|
135
|
+
if not isinstance(record, dict):
|
136
|
+
record = {"value": str(record)}
|
137
|
+
|
138
|
+
with self.lock:
|
139
|
+
self.buffer["api_requests"].append(record)
|
140
|
+
self._check_triggers("api_requests")
|
141
|
+
|
142
|
+
def add_rate_limit(self, record):
|
143
|
+
"""Add rate limit record"""
|
144
|
+
# Ensure record is a dictionary
|
145
|
+
if not isinstance(record, dict):
|
146
|
+
record = {"value": str(record)}
|
147
|
+
|
148
|
+
with self.lock:
|
149
|
+
self.buffer["rate_limits"].append(record)
|
150
|
+
self._check_triggers("rate_limits")
|
151
|
+
|
152
|
+
def _check_triggers(self, record_type: str):
|
153
|
+
"""Check if any sync triggers are met"""
|
154
|
+
current_time = time.time()
|
155
|
+
should_trigger = False
|
156
|
+
trigger_reason = None
|
157
|
+
|
158
|
+
# Get total buffer size
|
159
|
+
total_records = sum(len(buffer) for buffer in self.buffer.values())
|
160
|
+
|
161
|
+
# SIZE TRIGGER: Buffer size threshold reached
|
162
|
+
if total_records >= self.buffer_size:
|
163
|
+
should_trigger = True
|
164
|
+
trigger_reason = "buffer_full"
|
165
|
+
|
166
|
+
# EVENT TRIGGER: Critical events (errors, rate limit warnings)
|
167
|
+
elif record_type == "rate_limits" and self.buffer["rate_limits"] and \
|
168
|
+
any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
|
169
|
+
should_trigger = True
|
170
|
+
trigger_reason = "rate_limit_exceeded"
|
171
|
+
elif record_type == "function_calls" and self.buffer["function_calls"] and \
|
172
|
+
any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
|
173
|
+
should_trigger = True
|
174
|
+
trigger_reason = "function_error"
|
175
|
+
|
176
|
+
# TIME-WEIGHTED RANDOM TRIGGER: More likely as time since last sync increases
|
177
|
+
else:
|
178
|
+
time_factor = min(1.0, (current_time - self.last_sync_time) / (self.sync_interval / 2))
|
179
|
+
if random.random() < 0.05 * time_factor: # 0-5% chance based on time
|
180
|
+
should_trigger = True
|
181
|
+
trigger_reason = "random_time_weighted"
|
182
|
+
|
183
|
+
if should_trigger:
|
184
|
+
threading.Thread(
|
185
|
+
target=self.dispatch,
|
186
|
+
args=(trigger_reason,),
|
187
|
+
daemon=True
|
188
|
+
).start()
|
189
|
+
|
190
|
+
def queue(self, package, priority=None):
|
191
|
+
# --- Auto add 'segment' field to every payload ---
|
192
|
+
try:
|
193
|
+
from vnai.scope.promo import ContentManager
|
194
|
+
is_paid = ContentManager().is_paid_user
|
195
|
+
segment_val = "paid" if is_paid else "free"
|
196
|
+
except Exception:
|
197
|
+
segment_val = "free"
|
198
|
+
|
199
|
+
def ensure_segment(d):
|
200
|
+
if not isinstance(d, dict):
|
201
|
+
return d
|
202
|
+
d = dict(d) # tạo bản sao để không ảnh hưởng dict gốc
|
203
|
+
if "segment" not in d:
|
204
|
+
d["segment"] = segment_val
|
205
|
+
return d
|
206
|
+
# Add segment to package if not present
|
207
|
+
if isinstance(package, dict) and "segment" not in package:
|
208
|
+
# API key is base64-encoded for obfuscation
|
209
|
+
import base64
|
210
|
+
api_key = base64.b64decode("MXlJOEtnYXJudFFyMHB0cmlzZUhoYjRrZG9ta2VueU5JOFZQaXlrNWFvVQ==").decode()
|
211
|
+
package["segment"] = segment_val
|
212
|
+
# Add segment to data if exists and is dict
|
213
|
+
if isinstance(package, dict) and isinstance(package.get("data"), dict):
|
214
|
+
if "segment" not in package["data"]:
|
215
|
+
package["data"]["segment"] = segment_val
|
216
|
+
# --- End auto segment ---
|
217
|
+
|
218
|
+
"""Queue data package"""
|
219
|
+
if not package:
|
220
|
+
return False
|
221
|
+
|
222
|
+
# Handle non-dictionary packages
|
223
|
+
if not isinstance(package, dict):
|
224
|
+
self.add_function_call(ensure_segment({"message": str(package)}))
|
225
|
+
return True
|
226
|
+
|
227
|
+
# Add timestamp if not present
|
228
|
+
if "timestamp" not in package:
|
229
|
+
package["timestamp"] = datetime.now().isoformat()
|
230
|
+
|
231
|
+
# Route based on package type
|
232
|
+
if "type" in package:
|
233
|
+
package_type = package["type"]
|
234
|
+
data = package.get("data", {})
|
235
|
+
|
236
|
+
# Remove system info if present to avoid duplication
|
237
|
+
if isinstance(data, dict) and "system" in data:
|
238
|
+
# Get machine_id for reference but don't duplicate the whole system info
|
239
|
+
machine_id = data["system"].get("machine_id")
|
240
|
+
data.pop("system")
|
241
|
+
if machine_id:
|
242
|
+
data["machine_id"] = machine_id
|
243
|
+
if package_type == "function":
|
244
|
+
self.add_function_call(ensure_segment(data))
|
245
|
+
elif package_type == "api_request":
|
246
|
+
self.add_api_request(ensure_segment(data))
|
247
|
+
elif package_type == "rate_limit":
|
248
|
+
self.add_rate_limit(ensure_segment(data))
|
249
|
+
elif package_type == "system_info":
|
250
|
+
# For system info, we'll add it as a special function call
|
251
|
+
# but remove duplicated data
|
252
|
+
self.add_function_call({
|
253
|
+
"type": "system_info",
|
254
|
+
"commercial": data.get("commercial"),
|
255
|
+
"packages": data.get("packages"),
|
256
|
+
"timestamp": package.get("timestamp")
|
257
|
+
})
|
258
|
+
elif package_type == "metrics":
|
259
|
+
# Handle metrics package with multiple categories
|
260
|
+
metrics_data = data
|
261
|
+
for metric_type, metrics_list in metrics_data.items():
|
262
|
+
if isinstance(metrics_list, list):
|
263
|
+
if metric_type == "function":
|
264
|
+
for item in metrics_list:
|
265
|
+
self.add_function_call(ensure_segment(item))
|
266
|
+
elif metric_type == "rate_limit":
|
267
|
+
for item in metrics_list:
|
268
|
+
self.add_rate_limit(ensure_segment(item))
|
269
|
+
elif metric_type == "request":
|
270
|
+
for item in metrics_list:
|
271
|
+
self.add_api_request(ensure_segment(item))
|
272
|
+
else:
|
273
|
+
# Default to function calls
|
274
|
+
if isinstance(data, dict) and data is not package:
|
275
|
+
self.add_function_call(ensure_segment(data))
|
276
|
+
else:
|
277
|
+
self.add_function_call(ensure_segment(package))
|
278
|
+
else:
|
279
|
+
# No type specified, default to function call
|
280
|
+
self.add_function_call(ensure_segment(package))
|
281
|
+
|
282
|
+
# Handle high priority items
|
283
|
+
if priority == "high":
|
284
|
+
self.dispatch("high_priority")
|
285
|
+
|
286
|
+
return True
|
287
|
+
|
288
|
+
def _send_data(self, payload):
|
289
|
+
"""Send analytics data to the configured endpoint with required headers."""
|
290
|
+
import base64
|
291
|
+
api_key = base64.b64decode("MXlJOEtnYXJudFFyMHB0cmlzZUhoYjRrZG9ta2VueU5JOFZQaXlrNWFvVQ==").decode()
|
292
|
+
url = "https://hq.vnstocks.com/analytics"
|
293
|
+
headers = {
|
294
|
+
"x-api-key": api_key,
|
295
|
+
"Content-Type": "application/json"
|
296
|
+
}
|
297
|
+
try:
|
298
|
+
response = requests.post(url, json=payload, headers=headers, timeout=5)
|
299
|
+
return response.status_code == 200
|
300
|
+
except Exception:
|
301
|
+
return False
|
302
|
+
|
303
|
+
def dispatch(self, reason="manual"):
|
304
|
+
"""Send queued data"""
|
305
|
+
# (webhook_url logic removed, always proceed)
|
306
|
+
with self.lock:
|
307
|
+
# Check if all buffers are empty
|
308
|
+
if all(len(records) == 0 for records in self.buffer.values()):
|
309
|
+
return False
|
310
|
+
|
311
|
+
# Create a copy of the buffer for sending
|
312
|
+
data_to_send = {
|
313
|
+
"function_calls": self.buffer["function_calls"].copy(),
|
314
|
+
"api_requests": self.buffer["api_requests"].copy(),
|
315
|
+
"rate_limits": self.buffer["rate_limits"].copy()
|
316
|
+
}
|
317
|
+
|
318
|
+
# Clear buffer
|
319
|
+
self.buffer = {
|
320
|
+
"function_calls": [],
|
321
|
+
"api_requests": [],
|
322
|
+
"rate_limits": []
|
323
|
+
}
|
324
|
+
|
325
|
+
# Update sync time and count
|
326
|
+
self.last_sync_time = time.time()
|
327
|
+
self.sync_count += 1
|
328
|
+
self._save_config()
|
329
|
+
|
330
|
+
# Get environment information ONCE
|
331
|
+
try:
|
332
|
+
from vnai.scope.profile import inspector
|
333
|
+
environment_info = inspector.examine()
|
334
|
+
machine_id = environment_info.get("machine_id", self.machine_id)
|
335
|
+
except:
|
336
|
+
# Fallback if environment info isn't available
|
337
|
+
environment_info = {"machine_id": self.machine_id}
|
338
|
+
machine_id = self.machine_id
|
339
|
+
|
340
|
+
# Create payload with environment info only in metadata
|
341
|
+
payload = {
|
342
|
+
"analytics_data": data_to_send,
|
343
|
+
"metadata": {
|
344
|
+
"timestamp": datetime.now().isoformat(),
|
345
|
+
"machine_id": machine_id,
|
346
|
+
"sync_count": self.sync_count,
|
347
|
+
"trigger_reason": reason,
|
348
|
+
"environment": environment_info,
|
349
|
+
"data_counts": {
|
350
|
+
"function_calls": len(data_to_send["function_calls"]),
|
351
|
+
"api_requests": len(data_to_send["api_requests"]),
|
352
|
+
"rate_limits": len(data_to_send["rate_limits"])
|
353
|
+
}
|
354
|
+
}
|
355
|
+
}
|
356
|
+
|
357
|
+
# Send data
|
358
|
+
success = self._send_data(payload)
|
359
|
+
|
360
|
+
if not success:
|
361
|
+
with self.lock:
|
362
|
+
self.failed_queue.append(payload)
|
363
|
+
if len(self.failed_queue) > 10:
|
364
|
+
self.failed_queue = self.failed_queue[-10:]
|
365
|
+
with self.lock:
|
366
|
+
to_retry = self.failed_queue.copy()
|
367
|
+
self.failed_queue = []
|
368
|
+
|
369
|
+
success_count = 0
|
370
|
+
for payload in to_retry:
|
371
|
+
if self._send_data(payload):
|
372
|
+
success_count += 1
|
373
|
+
else:
|
374
|
+
with self.lock:
|
375
|
+
self.failed_queue.append(payload)
|
376
|
+
|
377
|
+
return success_count
|
378
|
+
|
379
|
+
# Create singleton instance
|
380
|
+
conduit = Conduit()
|
381
|
+
|
382
|
+
# Exposed functions that match sync.py naming pattern
|
383
|
+
def track_function_call(function_name, source, execution_time, success=True, error=None, args=None):
|
384
|
+
"""Track function call (bridge to add_function_call)"""
|
385
|
+
record = {
|
386
|
+
"function": function_name,
|
387
|
+
"source": source,
|
388
|
+
"execution_time": execution_time,
|
389
|
+
"timestamp": datetime.now().isoformat(),
|
390
|
+
"success": success
|
391
|
+
}
|
392
|
+
|
393
|
+
if error:
|
394
|
+
record["error"] = error
|
395
|
+
|
396
|
+
if args:
|
397
|
+
# Sanitize arguments
|
398
|
+
sanitized_args = {}
|
399
|
+
if isinstance(args, dict):
|
400
|
+
for key, value in args.items():
|
401
|
+
if isinstance(value, (str, int, float, bool)):
|
402
|
+
sanitized_args[key] = value
|
403
|
+
else:
|
404
|
+
sanitized_args[key] = str(type(value))
|
405
|
+
else:
|
406
|
+
sanitized_args = {"value": str(args)}
|
407
|
+
record["args"] = sanitized_args
|
408
|
+
|
409
|
+
conduit.add_function_call(record)
|
410
|
+
|
411
|
+
def track_rate_limit(source, limit_type, limit_value, current_usage, is_exceeded):
|
412
|
+
"""Track rate limit checks (bridge to add_rate_limit)"""
|
413
|
+
record = {
|
414
|
+
"source": source,
|
415
|
+
"limit_type": limit_type,
|
416
|
+
"limit_value": limit_value,
|
417
|
+
"current_usage": current_usage,
|
418
|
+
"is_exceeded": is_exceeded,
|
419
|
+
"timestamp": datetime.now().isoformat(),
|
420
|
+
"usage_percentage": (current_usage / limit_value) * 100 if limit_value > 0 else 0
|
421
|
+
}
|
422
|
+
|
423
|
+
conduit.add_rate_limit(record)
|
424
|
+
|
425
|
+
def track_api_request(endpoint, source, method, status_code, execution_time, request_size=0, response_size=0):
|
426
|
+
"""Track API requests (bridge to add_api_request)"""
|
427
|
+
record = {
|
428
|
+
"endpoint": endpoint,
|
429
|
+
"source": source,
|
430
|
+
"method": method,
|
431
|
+
"status_code": status_code,
|
432
|
+
"execution_time": execution_time,
|
433
|
+
"timestamp": datetime.now().isoformat(),
|
434
|
+
"request_size": request_size,
|
435
|
+
"response_size": response_size
|
436
|
+
}
|
437
|
+
|
438
|
+
conduit.add_api_request(record)
|
439
|
+
|
440
|
+
|
441
|
+
def sync_now():
|
442
|
+
"""Manually trigger synchronization"""
|
443
|
+
return conduit.dispatch("manual")
|
444
|
+
|
445
|
+
def retry_failed():
|
446
|
+
"""Retry failed synchronizations"""
|
447
|
+
return conduit.retry_failed()
|