vnai 2.1.8__py3-none-any.whl → 2.1.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vnai/__init__.py +37 -110
- vnai/beam/__init__.py +0 -2
- vnai/beam/metrics.py +48 -99
- vnai/beam/pulse.py +24 -53
- vnai/beam/quota.py +94 -247
- vnai/flow/__init__.py +1 -4
- vnai/flow/queue.py +17 -50
- vnai/flow/relay.py +98 -204
- vnai/scope/__init__.py +1 -4
- vnai/scope/profile.py +231 -417
- vnai/scope/promo.py +41 -123
- vnai/scope/state.py +52 -119
- {vnai-2.1.8.dist-info → vnai-2.1.9.dist-info}/METADATA +1 -1
- vnai-2.1.9.dist-info/RECORD +16 -0
- vnai-2.1.8.dist-info/RECORD +0 -16
- {vnai-2.1.8.dist-info → vnai-2.1.9.dist-info}/WHEEL +0 -0
- {vnai-2.1.8.dist-info → vnai-2.1.9.dist-info}/top_level.txt +0 -0
vnai/flow/relay.py
CHANGED
@@ -1,5 +1,3 @@
|
|
1
|
-
# vnai/flow/relay.py
|
2
|
-
|
3
1
|
import time
|
4
2
|
import threading
|
5
3
|
import json
|
@@ -10,289 +8,219 @@ from pathlib import Path
|
|
10
8
|
from typing import Dict, List, Any, Optional
|
11
9
|
|
12
10
|
class Conduit:
|
13
|
-
"""Handles system telemetry flow"""
|
14
|
-
|
15
11
|
_instance = None
|
16
12
|
_lock = threading.Lock()
|
17
|
-
|
13
|
+
|
18
14
|
def __new__(cls, buffer_size=50, sync_interval=300):
|
19
15
|
with cls._lock:
|
20
16
|
if cls._instance is None:
|
21
17
|
cls._instance = super(Conduit, cls).__new__(cls)
|
22
18
|
cls._instance._initialize(buffer_size, sync_interval)
|
23
19
|
return cls._instance
|
24
|
-
|
20
|
+
|
25
21
|
def _initialize(self, buffer_size, sync_interval):
|
26
|
-
"""Initialize conduit"""
|
27
22
|
self.buffer_size = buffer_size
|
28
23
|
self.sync_interval = sync_interval
|
29
|
-
|
30
|
-
# Separate buffers for different data types
|
31
24
|
self.buffer = {
|
32
|
-
|
33
|
-
|
34
|
-
|
25
|
+
"function_calls": [],
|
26
|
+
"api_requests": [],
|
27
|
+
"rate_limits": []
|
35
28
|
}
|
36
|
-
|
37
29
|
self.lock = threading.Lock()
|
38
30
|
self.last_sync_time = time.time()
|
39
31
|
self.sync_count = 0
|
40
32
|
self.failed_queue = []
|
41
|
-
|
42
|
-
# Home directory setup
|
43
33
|
self.home_dir = Path.home()
|
44
|
-
self.project_dir = self.home_dir /
|
34
|
+
self.project_dir = self.home_dir /".vnstock"
|
45
35
|
self.project_dir.mkdir(exist_ok=True)
|
46
|
-
self.data_dir = self.project_dir /
|
36
|
+
self.data_dir = self.project_dir /'data'
|
47
37
|
self.data_dir.mkdir(exist_ok=True)
|
48
|
-
self.config_path = self.data_dir /
|
49
|
-
|
50
|
-
# Get machine identifier from system profile
|
38
|
+
self.config_path = self.data_dir /"relay_config.json"
|
51
39
|
try:
|
52
40
|
from vnai.scope.profile import inspector
|
53
41
|
self.machine_id = inspector.fingerprint()
|
54
42
|
except:
|
55
43
|
self.machine_id = self._generate_fallback_id()
|
56
|
-
|
57
|
-
# Load config if exists
|
58
44
|
self._load_config()
|
59
|
-
|
60
|
-
# Start periodic sync
|
61
45
|
self._start_periodic_sync()
|
62
|
-
|
46
|
+
|
63
47
|
def _generate_fallback_id(self) -> str:
|
64
|
-
"""Generate a fallback machine identifier if profile is unavailable"""
|
65
48
|
try:
|
66
49
|
import platform
|
67
50
|
import hashlib
|
68
51
|
import uuid
|
69
|
-
|
70
|
-
# Try to get machine-specific information
|
71
52
|
system_info = platform.node() + platform.platform() + platform.processor()
|
72
53
|
return hashlib.md5(system_info.encode()).hexdigest()
|
73
54
|
except:
|
74
55
|
import uuid
|
75
56
|
return str(uuid.uuid4())
|
76
|
-
|
57
|
+
|
77
58
|
def _load_config(self):
|
78
|
-
"""Load configuration from file"""
|
79
59
|
if self.config_path.exists():
|
80
60
|
try:
|
81
|
-
with open(self.config_path,
|
61
|
+
with open(self.config_path,'r') as f:
|
82
62
|
config = json.load(f)
|
83
|
-
|
84
|
-
|
85
|
-
if 'buffer_size' in config:
|
63
|
+
if'buffer_size' in config:
|
86
64
|
self.buffer_size = config['buffer_size']
|
87
|
-
if
|
65
|
+
if'sync_interval' in config:
|
88
66
|
self.sync_interval = config['sync_interval']
|
89
|
-
if
|
67
|
+
if'last_sync_time' in config:
|
90
68
|
self.last_sync_time = config['last_sync_time']
|
91
|
-
if
|
69
|
+
if'sync_count' in config:
|
92
70
|
self.sync_count = config['sync_count']
|
93
71
|
except:
|
94
72
|
pass
|
95
|
-
|
73
|
+
|
96
74
|
def _save_config(self):
|
97
|
-
"""Save configuration to file"""
|
98
75
|
config = {
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
'sync_count': self.sync_count
|
76
|
+
'buffer_size': self.buffer_size,
|
77
|
+
'sync_interval': self.sync_interval,
|
78
|
+
'last_sync_time': self.last_sync_time,
|
79
|
+
'sync_count': self.sync_count
|
104
80
|
}
|
105
|
-
|
106
81
|
try:
|
107
|
-
with open(self.config_path,
|
82
|
+
with open(self.config_path,'w') as f:
|
108
83
|
json.dump(config, f)
|
109
84
|
except:
|
110
85
|
pass
|
111
|
-
|
86
|
+
|
112
87
|
def _start_periodic_sync(self):
|
113
|
-
"""Start periodic sync thread"""
|
114
88
|
def periodic_sync():
|
115
89
|
while True:
|
116
90
|
time.sleep(self.sync_interval)
|
117
91
|
self.dispatch("periodic")
|
118
|
-
|
119
92
|
sync_thread = threading.Thread(target=periodic_sync, daemon=True)
|
120
93
|
sync_thread.start()
|
121
|
-
|
94
|
+
|
122
95
|
def add_function_call(self, record):
|
123
|
-
"""Add function call record"""
|
124
|
-
# Ensure record is a dictionary
|
125
96
|
if not isinstance(record, dict):
|
126
97
|
record = {"value": str(record)}
|
127
|
-
|
128
98
|
with self.lock:
|
129
99
|
self.buffer["function_calls"].append(record)
|
130
100
|
self._check_triggers("function_calls")
|
131
|
-
|
101
|
+
|
132
102
|
def add_api_request(self, record):
|
133
|
-
"""Add API request record"""
|
134
|
-
# Ensure record is a dictionary
|
135
103
|
if not isinstance(record, dict):
|
136
104
|
record = {"value": str(record)}
|
137
|
-
|
138
105
|
with self.lock:
|
139
106
|
self.buffer["api_requests"].append(record)
|
140
107
|
self._check_triggers("api_requests")
|
141
|
-
|
108
|
+
|
142
109
|
def add_rate_limit(self, record):
|
143
|
-
"""Add rate limit record"""
|
144
|
-
# Ensure record is a dictionary
|
145
110
|
if not isinstance(record, dict):
|
146
111
|
record = {"value": str(record)}
|
147
|
-
|
148
112
|
with self.lock:
|
149
113
|
self.buffer["rate_limits"].append(record)
|
150
114
|
self._check_triggers("rate_limits")
|
151
|
-
|
115
|
+
|
152
116
|
def _check_triggers(self, record_type: str):
|
153
|
-
"""Check if any sync triggers are met"""
|
154
117
|
current_time = time.time()
|
155
118
|
should_trigger = False
|
156
119
|
trigger_reason = None
|
157
|
-
|
158
|
-
# Get total buffer size
|
159
120
|
total_records = sum(len(buffer) for buffer in self.buffer.values())
|
160
|
-
|
161
|
-
# SIZE TRIGGER: Buffer size threshold reached
|
162
121
|
if total_records >= self.buffer_size:
|
163
122
|
should_trigger = True
|
164
|
-
trigger_reason =
|
165
|
-
|
166
|
-
# EVENT TRIGGER: Critical events (errors, rate limit warnings)
|
167
|
-
elif record_type == "rate_limits" and self.buffer["rate_limits"] and \
|
168
|
-
any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
|
123
|
+
trigger_reason ="buffer_full"
|
124
|
+
elif record_type =="rate_limits" and self.buffer["rate_limits"] and any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
|
169
125
|
should_trigger = True
|
170
|
-
trigger_reason =
|
171
|
-
elif record_type ==
|
172
|
-
any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
|
126
|
+
trigger_reason ="rate_limit_exceeded"
|
127
|
+
elif record_type =="function_calls" and self.buffer["function_calls"] and any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
|
173
128
|
should_trigger = True
|
174
|
-
trigger_reason =
|
175
|
-
|
176
|
-
# TIME-WEIGHTED RANDOM TRIGGER: More likely as time since last sync increases
|
129
|
+
trigger_reason ="function_error"
|
177
130
|
else:
|
178
131
|
time_factor = min(1.0, (current_time - self.last_sync_time) / (self.sync_interval / 2))
|
179
|
-
if random.random() < 0.05 * time_factor:
|
132
|
+
if random.random() < 0.05 * time_factor:
|
180
133
|
should_trigger = True
|
181
|
-
trigger_reason =
|
182
|
-
|
134
|
+
trigger_reason ="random_time_weighted"
|
183
135
|
if should_trigger:
|
184
136
|
threading.Thread(
|
185
137
|
target=self.dispatch,
|
186
138
|
args=(trigger_reason,),
|
187
139
|
daemon=True
|
188
140
|
).start()
|
189
|
-
|
141
|
+
|
190
142
|
def queue(self, package, priority=None):
|
191
|
-
# --- Auto add 'segment' field to every payload ---
|
192
143
|
try:
|
193
144
|
from vnai.scope.promo import ContentManager
|
194
145
|
is_paid = ContentManager().is_paid_user
|
195
|
-
segment_val =
|
146
|
+
segment_val ="paid" if is_paid else"free"
|
196
147
|
except Exception:
|
197
|
-
segment_val =
|
148
|
+
segment_val ="free"
|
198
149
|
|
199
150
|
def ensure_segment(d):
|
200
151
|
if not isinstance(d, dict):
|
201
152
|
return d
|
202
|
-
d = dict(d)
|
203
|
-
if
|
153
|
+
d = dict(d)
|
154
|
+
if"segment" not in d:
|
204
155
|
d["segment"] = segment_val
|
205
156
|
return d
|
206
|
-
|
207
|
-
if isinstance(package, dict) and "segment" not in package:
|
208
|
-
# API key is base64-encoded for obfuscation
|
157
|
+
if isinstance(package, dict) and"segment" not in package:
|
209
158
|
import base64
|
210
159
|
api_key = base64.b64decode("MXlJOEtnYXJudFFyMHB0cmlzZUhoYjRrZG9ta2VueU5JOFZQaXlrNWFvVQ==").decode()
|
211
160
|
package["segment"] = segment_val
|
212
|
-
# Add segment to data if exists and is dict
|
213
161
|
if isinstance(package, dict) and isinstance(package.get("data"), dict):
|
214
|
-
if
|
162
|
+
if"segment" not in package["data"]:
|
215
163
|
package["data"]["segment"] = segment_val
|
216
|
-
# --- End auto segment ---
|
217
|
-
|
218
164
|
"""Queue data package"""
|
219
165
|
if not package:
|
220
166
|
return False
|
221
|
-
|
222
|
-
# Handle non-dictionary packages
|
223
167
|
if not isinstance(package, dict):
|
224
168
|
self.add_function_call(ensure_segment({"message": str(package)}))
|
225
169
|
return True
|
226
|
-
|
227
|
-
# Add timestamp if not present
|
228
|
-
if "timestamp" not in package:
|
170
|
+
if"timestamp" not in package:
|
229
171
|
package["timestamp"] = datetime.now().isoformat()
|
230
|
-
|
231
|
-
# Route based on package type
|
232
|
-
if "type" in package:
|
172
|
+
if"type" in package:
|
233
173
|
package_type = package["type"]
|
234
174
|
data = package.get("data", {})
|
235
|
-
|
236
|
-
# Remove system info if present to avoid duplication
|
237
|
-
if isinstance(data, dict) and "system" in data:
|
238
|
-
# Get machine_id for reference but don't duplicate the whole system info
|
175
|
+
if isinstance(data, dict) and"system" in data:
|
239
176
|
machine_id = data["system"].get("machine_id")
|
240
177
|
data.pop("system")
|
241
178
|
if machine_id:
|
242
179
|
data["machine_id"] = machine_id
|
243
|
-
if package_type ==
|
180
|
+
if package_type =="function":
|
244
181
|
self.add_function_call(ensure_segment(data))
|
245
|
-
elif package_type ==
|
182
|
+
elif package_type =="api_request":
|
246
183
|
self.add_api_request(ensure_segment(data))
|
247
|
-
elif package_type ==
|
184
|
+
elif package_type =="rate_limit":
|
248
185
|
self.add_rate_limit(ensure_segment(data))
|
249
|
-
elif package_type ==
|
250
|
-
# For system info, we'll add it as a special function call
|
251
|
-
# but remove duplicated data
|
186
|
+
elif package_type =="system_info":
|
252
187
|
self.add_function_call({
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
188
|
+
"type":"system_info",
|
189
|
+
"commercial": data.get("commercial"),
|
190
|
+
"packages": data.get("packages"),
|
191
|
+
"timestamp": package.get("timestamp")
|
257
192
|
})
|
258
|
-
elif package_type ==
|
259
|
-
# Handle metrics package with multiple categories
|
193
|
+
elif package_type =="metrics":
|
260
194
|
metrics_data = data
|
261
195
|
for metric_type, metrics_list in metrics_data.items():
|
262
196
|
if isinstance(metrics_list, list):
|
263
|
-
if metric_type ==
|
197
|
+
if metric_type =="function":
|
264
198
|
for item in metrics_list:
|
265
199
|
self.add_function_call(ensure_segment(item))
|
266
|
-
elif metric_type ==
|
200
|
+
elif metric_type =="rate_limit":
|
267
201
|
for item in metrics_list:
|
268
202
|
self.add_rate_limit(ensure_segment(item))
|
269
|
-
elif metric_type ==
|
203
|
+
elif metric_type =="request":
|
270
204
|
for item in metrics_list:
|
271
205
|
self.add_api_request(ensure_segment(item))
|
272
206
|
else:
|
273
|
-
# Default to function calls
|
274
207
|
if isinstance(data, dict) and data is not package:
|
275
208
|
self.add_function_call(ensure_segment(data))
|
276
209
|
else:
|
277
210
|
self.add_function_call(ensure_segment(package))
|
278
211
|
else:
|
279
|
-
# No type specified, default to function call
|
280
212
|
self.add_function_call(ensure_segment(package))
|
281
|
-
|
282
|
-
# Handle high priority items
|
283
|
-
if priority == "high":
|
213
|
+
if priority =="high":
|
284
214
|
self.dispatch("high_priority")
|
285
|
-
|
286
215
|
return True
|
287
|
-
|
216
|
+
|
288
217
|
def _send_data(self, payload):
|
289
|
-
"""Send analytics data to the configured endpoint with required headers."""
|
290
218
|
import base64
|
291
219
|
api_key = base64.b64decode("MXlJOEtnYXJudFFyMHB0cmlzZUhoYjRrZG9ta2VueU5JOFZQaXlrNWFvVQ==").decode()
|
292
|
-
url =
|
220
|
+
url ="https://hq.vnstocks.com/analytics"
|
293
221
|
headers = {
|
294
|
-
|
295
|
-
|
222
|
+
"x-api-key": api_key,
|
223
|
+
"Content-Type":"application/json"
|
296
224
|
}
|
297
225
|
try:
|
298
226
|
response = requests.post(url, json=payload, headers=headers, timeout=5)
|
@@ -301,62 +229,45 @@ class Conduit:
|
|
301
229
|
return False
|
302
230
|
|
303
231
|
def dispatch(self, reason="manual"):
|
304
|
-
"""Send queued data"""
|
305
|
-
# (webhook_url logic removed, always proceed)
|
306
232
|
with self.lock:
|
307
|
-
# Check if all buffers are empty
|
308
233
|
if all(len(records) == 0 for records in self.buffer.values()):
|
309
234
|
return False
|
310
|
-
|
311
|
-
# Create a copy of the buffer for sending
|
312
235
|
data_to_send = {
|
313
|
-
|
314
|
-
|
315
|
-
|
236
|
+
"function_calls": self.buffer["function_calls"].copy(),
|
237
|
+
"api_requests": self.buffer["api_requests"].copy(),
|
238
|
+
"rate_limits": self.buffer["rate_limits"].copy()
|
316
239
|
}
|
317
|
-
|
318
|
-
# Clear buffer
|
319
240
|
self.buffer = {
|
320
|
-
|
321
|
-
|
322
|
-
|
241
|
+
"function_calls": [],
|
242
|
+
"api_requests": [],
|
243
|
+
"rate_limits": []
|
323
244
|
}
|
324
|
-
|
325
|
-
# Update sync time and count
|
326
245
|
self.last_sync_time = time.time()
|
327
246
|
self.sync_count += 1
|
328
247
|
self._save_config()
|
329
|
-
|
330
|
-
# Get environment information ONCE
|
331
248
|
try:
|
332
249
|
from vnai.scope.profile import inspector
|
333
250
|
environment_info = inspector.examine()
|
334
251
|
machine_id = environment_info.get("machine_id", self.machine_id)
|
335
252
|
except:
|
336
|
-
# Fallback if environment info isn't available
|
337
253
|
environment_info = {"machine_id": self.machine_id}
|
338
254
|
machine_id = self.machine_id
|
339
|
-
|
340
|
-
# Create payload with environment info only in metadata
|
341
255
|
payload = {
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
256
|
+
"analytics_data": data_to_send,
|
257
|
+
"metadata": {
|
258
|
+
"timestamp": datetime.now().isoformat(),
|
259
|
+
"machine_id": machine_id,
|
260
|
+
"sync_count": self.sync_count,
|
261
|
+
"trigger_reason": reason,
|
262
|
+
"environment": environment_info,
|
263
|
+
"data_counts": {
|
264
|
+
"function_calls": len(data_to_send["function_calls"]),
|
265
|
+
"api_requests": len(data_to_send["api_requests"]),
|
266
|
+
"rate_limits": len(data_to_send["rate_limits"])
|
353
267
|
}
|
354
268
|
}
|
355
269
|
}
|
356
|
-
|
357
|
-
# Send data
|
358
270
|
success = self._send_data(payload)
|
359
|
-
|
360
271
|
if not success:
|
361
272
|
with self.lock:
|
362
273
|
self.failed_queue.append(payload)
|
@@ -365,7 +276,6 @@ class Conduit:
|
|
365
276
|
with self.lock:
|
366
277
|
to_retry = self.failed_queue.copy()
|
367
278
|
self.failed_queue = []
|
368
|
-
|
369
279
|
success_count = 0
|
370
280
|
for payload in to_retry:
|
371
281
|
if self._send_data(payload):
|
@@ -373,28 +283,20 @@ class Conduit:
|
|
373
283
|
else:
|
374
284
|
with self.lock:
|
375
285
|
self.failed_queue.append(payload)
|
376
|
-
|
377
286
|
return success_count
|
378
|
-
|
379
|
-
# Create singleton instance
|
380
287
|
conduit = Conduit()
|
381
288
|
|
382
|
-
# Exposed functions that match sync.py naming pattern
|
383
289
|
def track_function_call(function_name, source, execution_time, success=True, error=None, args=None):
|
384
|
-
"""Track function call (bridge to add_function_call)"""
|
385
290
|
record = {
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
291
|
+
"function": function_name,
|
292
|
+
"source": source,
|
293
|
+
"execution_time": execution_time,
|
294
|
+
"timestamp": datetime.now().isoformat(),
|
295
|
+
"success": success
|
391
296
|
}
|
392
|
-
|
393
297
|
if error:
|
394
298
|
record["error"] = error
|
395
|
-
|
396
299
|
if args:
|
397
|
-
# Sanitize arguments
|
398
300
|
sanitized_args = {}
|
399
301
|
if isinstance(args, dict):
|
400
302
|
for key, value in args.items():
|
@@ -405,43 +307,35 @@ def track_function_call(function_name, source, execution_time, success=True, err
|
|
405
307
|
else:
|
406
308
|
sanitized_args = {"value": str(args)}
|
407
309
|
record["args"] = sanitized_args
|
408
|
-
|
409
310
|
conduit.add_function_call(record)
|
410
311
|
|
411
312
|
def track_rate_limit(source, limit_type, limit_value, current_usage, is_exceeded):
|
412
|
-
"""Track rate limit checks (bridge to add_rate_limit)"""
|
413
313
|
record = {
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
314
|
+
"source": source,
|
315
|
+
"limit_type": limit_type,
|
316
|
+
"limit_value": limit_value,
|
317
|
+
"current_usage": current_usage,
|
318
|
+
"is_exceeded": is_exceeded,
|
319
|
+
"timestamp": datetime.now().isoformat(),
|
320
|
+
"usage_percentage": (current_usage / limit_value) * 100 if limit_value > 0 else 0
|
421
321
|
}
|
422
|
-
|
423
322
|
conduit.add_rate_limit(record)
|
424
323
|
|
425
324
|
def track_api_request(endpoint, source, method, status_code, execution_time, request_size=0, response_size=0):
|
426
|
-
"""Track API requests (bridge to add_api_request)"""
|
427
325
|
record = {
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
|
434
|
-
|
435
|
-
|
326
|
+
"endpoint": endpoint,
|
327
|
+
"source": source,
|
328
|
+
"method": method,
|
329
|
+
"status_code": status_code,
|
330
|
+
"execution_time": execution_time,
|
331
|
+
"timestamp": datetime.now().isoformat(),
|
332
|
+
"request_size": request_size,
|
333
|
+
"response_size": response_size
|
436
334
|
}
|
437
|
-
|
438
335
|
conduit.add_api_request(record)
|
439
336
|
|
440
|
-
|
441
337
|
def sync_now():
|
442
|
-
"""Manually trigger synchronization"""
|
443
338
|
return conduit.dispatch("manual")
|
444
339
|
|
445
340
|
def retry_failed():
|
446
|
-
|
447
|
-
return conduit.retry_failed()
|
341
|
+
return conduit.retry_failed()
|
vnai/scope/__init__.py
CHANGED
@@ -1,7 +1,4 @@
|
|
1
|
-
# vnai/scope/__init__.py
|
2
|
-
# Environment detection and state tracking
|
3
|
-
|
4
1
|
from vnai.scope.profile import inspector
|
5
2
|
from vnai.scope.state import tracker, record
|
6
3
|
from vnai.scope.promo import manager as content_manager
|
7
|
-
from vnai.scope.promo import present as present_content
|
4
|
+
from vnai.scope.promo import present as present_content
|