vnai 2.0.2__py3-none-any.whl → 2.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vnai/flow/relay.py CHANGED
@@ -1,152 +1,442 @@
1
- _T='execution_time'
2
- _S='manual'
3
- _R='success'
4
- _Q='is_exceeded'
5
- _P='source'
6
- _O='function'
7
- _N='last_sync_time'
8
- _M='sync_interval'
9
- _L='buffer_size'
10
- _K='webhook_url'
11
- _J='value'
12
- _I='sync_count'
13
- _H='machine_id'
14
- _G=False
15
- _F=None
16
- _E='timestamp'
17
- _D='api_requests'
18
- _C='rate_limits'
19
- _B='function_calls'
20
- _A=True
21
- import time,threading,json,random,requests
1
+ # vnai/flow/relay.py
2
+ # Data transmission system (formerly sync)
3
+
4
+ import time
5
+ import threading
6
+ import json
7
+ import random
8
+ import requests
22
9
  from datetime import datetime
23
10
  from pathlib import Path
24
- from typing import Dict,List,Any,Optional
11
+ from typing import Dict, List, Any, Optional
12
+
25
13
  class Conduit:
26
- _instance=_F;_lock=threading.Lock()
27
- def __new__(A,webhook_url=_F,buffer_size=50,sync_interval=300):
28
- with A._lock:
29
- if A._instance is _F:A._instance=super(Conduit,A).__new__(A);A._instance._initialize(webhook_url,buffer_size,sync_interval)
30
- return A._instance
31
- def _initialize(A,webhook_url,buffer_size,sync_interval):
32
- A.webhook_url=webhook_url;A.buffer_size=buffer_size;A.sync_interval=sync_interval;A.buffer={_B:[],_D:[],_C:[]};A.lock=threading.Lock();A.last_sync_time=time.time();A.sync_count=0;A.failed_queue=[];A.home_dir=Path.home();A.project_dir=A.home_dir/'.vnstock';A.project_dir.mkdir(exist_ok=_A);A.data_dir=A.project_dir/'data';A.data_dir.mkdir(exist_ok=_A);A.config_path=A.data_dir/'relay_config.json'
33
- try:from vnai.scope.profile import inspector as B;A.machine_id=B.fingerprint()
34
- except:A.machine_id=A._generate_fallback_id()
35
- A._load_config();A._start_periodic_sync()
36
- def _generate_fallback_id(D)->str:
37
- try:import platform as A,hashlib as B,uuid;C=A.node()+A.platform()+A.processor();return B.md5(C.encode()).hexdigest()
38
- except:import uuid;return str(uuid.uuid4())
39
- def _load_config(B):
40
- if B.config_path.exists():
41
- try:
42
- with open(B.config_path,'r')as C:A=json.load(C)
43
- if not B.webhook_url and _K in A:B.webhook_url=A[_K]
44
- if _L in A:B.buffer_size=A[_L]
45
- if _M in A:B.sync_interval=A[_M]
46
- if _N in A:B.last_sync_time=A[_N]
47
- if _I in A:B.sync_count=A[_I]
48
- except:pass
49
- def _save_config(A):
50
- B={_K:A.webhook_url,_L:A.buffer_size,_M:A.sync_interval,_N:A.last_sync_time,_I:A.sync_count}
51
- try:
52
- with open(A.config_path,'w')as C:json.dump(B,C)
53
- except:pass
54
- def _start_periodic_sync(A):
55
- def B():
56
- while _A:time.sleep(A.sync_interval);A.dispatch('periodic')
57
- C=threading.Thread(target=B,daemon=_A);C.start()
58
- def add_function_call(B,record):
59
- A=record
60
- if not isinstance(A,dict):A={_J:str(A)}
61
- with B.lock:B.buffer[_B].append(A);B._check_triggers(_B)
62
- def add_api_request(B,record):
63
- A=record
64
- if not isinstance(A,dict):A={_J:str(A)}
65
- with B.lock:B.buffer[_D].append(A);B._check_triggers(_D)
66
- def add_rate_limit(B,record):
67
- A=record
68
- if not isinstance(A,dict):A={_J:str(A)}
69
- with B.lock:B.buffer[_C].append(A);B._check_triggers(_C)
70
- def _check_triggers(A,record_type:str):
71
- D=record_type;E=time.time();B=_G;C=_F;F=sum(len(A)for A in A.buffer.values())
72
- if F>=A.buffer_size:B=_A;C='buffer_full'
73
- elif D==_C and A.buffer[_C]and any(A.get(_Q)for A in A.buffer[_C]if isinstance(A,dict)):B=_A;C='rate_limit_exceeded'
74
- elif D==_B and A.buffer[_B]and any(not A.get(_R)for A in A.buffer[_B]if isinstance(A,dict)):B=_A;C='function_error'
75
- else:
76
- G=min(1.,(E-A.last_sync_time)/(A.sync_interval/2))
77
- if random.random()<.05*G:B=_A;C='random_time_weighted'
78
- if B:threading.Thread(target=A.dispatch,args=(C,),daemon=_A).start()
79
- def queue(B,package,priority=_F):
80
- N='packages';M='commercial';L='system_info';K='rate_limit';I='system';H='type';C=package
81
- if not C:return _G
82
- if not isinstance(C,dict):B.add_function_call({'message':str(C)});return _A
83
- if _E not in C:C[_E]=datetime.now().isoformat()
84
- if H in C:
85
- D=C[H];A=C.get('data',{})
86
- if isinstance(A,dict)and I in A:
87
- J=A[I].get(_H);A.pop(I)
88
- if J:A[_H]=J
89
- if D==_O:B.add_function_call(A)
90
- elif D=='api_request':B.add_api_request(A)
91
- elif D==K:B.add_rate_limit(A)
92
- elif D==L:B.add_function_call({H:L,M:A.get(M),N:A.get(N),_E:C.get(_E)})
93
- elif D=='metrics':
94
- O=A
95
- for(G,F)in O.items():
96
- if isinstance(F,list):
97
- if G==_O:
98
- for E in F:B.add_function_call(E)
99
- elif G==K:
100
- for E in F:B.add_rate_limit(E)
101
- elif G=='request':
102
- for E in F:B.add_api_request(E)
103
- else:B.add_function_call(A)
104
- else:B.add_function_call(C)
105
- if priority=='high':B.dispatch('high_priority')
106
- return _A
107
- def dispatch(A,reason=_S):
108
- if not A.webhook_url:return _G
109
- with A.lock:
110
- if all(len(A)==0 for A in A.buffer.values()):return _G
111
- B={_B:A.buffer[_B].copy(),_D:A.buffer[_D].copy(),_C:A.buffer[_C].copy()};A.buffer={_B:[],_D:[],_C:[]};A.last_sync_time=time.time();A.sync_count+=1;A._save_config()
112
- try:from vnai.scope.profile import inspector as G;C=G.examine();D=C.get(_H,A.machine_id)
113
- except:C={_H:A.machine_id};D=A.machine_id
114
- E={'analytics_data':B,'metadata':{_E:datetime.now().isoformat(),_H:D,_I:A.sync_count,'trigger_reason':reason,'environment':C,'data_counts':{_B:len(B[_B]),_D:len(B[_D]),_C:len(B[_C])}}};F=A._send_data(E)
115
- if not F:
116
- with A.lock:
117
- A.failed_queue.append(E)
118
- if len(A.failed_queue)>10:A.failed_queue=A.failed_queue[-10:]
119
- return F
120
- def _send_data(A,payload):
121
- if not A.webhook_url:return _G
122
- try:B=requests.post(A.webhook_url,json=payload,timeout=5);return B.status_code==200
123
- except:return _G
124
- def retry_failed(A):
125
- if not A.failed_queue:return 0
126
- with A.lock:D=A.failed_queue.copy();A.failed_queue=[]
127
- B=0
128
- for C in D:
129
- if A._send_data(C):B+=1
130
- else:
131
- with A.lock:A.failed_queue.append(C)
132
- return B
133
- def configure(A,webhook_url):
134
- with A.lock:A.webhook_url=webhook_url;A._save_config();return _A
135
- conduit=Conduit()
136
- def track_function_call(function_name,source,execution_time,success=_A,error=_F,args=_F):
137
- E=error;A=args;C={_O:function_name,_P:source,_T:execution_time,_E:datetime.now().isoformat(),_R:success}
138
- if E:C['error']=E
139
- if A:
140
- B={}
141
- if isinstance(A,dict):
142
- for(F,D)in A.items():
143
- if isinstance(D,(str,int,float,bool)):B[F]=D
144
- else:B[F]=str(type(D))
145
- else:B={_J:str(A)}
146
- C['args']=B
147
- conduit.add_function_call(C)
148
- def track_rate_limit(source,limit_type,limit_value,current_usage,is_exceeded):B=current_usage;A=limit_value;C={_P:source,'limit_type':limit_type,'limit_value':A,'current_usage':B,_Q:is_exceeded,_E:datetime.now().isoformat(),'usage_percentage':B/A*100 if A>0 else 0};conduit.add_rate_limit(C)
149
- def track_api_request(endpoint,source,method,status_code,execution_time,request_size=0,response_size=0):A={'endpoint':endpoint,_P:source,'method':method,'status_code':status_code,_T:execution_time,_E:datetime.now().isoformat(),'request_size':request_size,'response_size':response_size};conduit.add_api_request(A)
150
- def configure(webhook_url):return conduit.configure(webhook_url)
151
- def sync_now():return conduit.dispatch(_S)
152
- def retry_failed():return conduit.retry_failed()
14
+ """Handles system telemetry flow"""
15
+
16
+ _instance = None
17
+ _lock = threading.Lock()
18
+
19
+ def __new__(cls, webhook_url=None, buffer_size=50, sync_interval=300):
20
+ with cls._lock:
21
+ if cls._instance is None:
22
+ cls._instance = super(Conduit, cls).__new__(cls)
23
+ cls._instance._initialize(webhook_url, buffer_size, sync_interval)
24
+ return cls._instance
25
+
26
+ def _initialize(self, webhook_url, buffer_size, sync_interval):
27
+ """Initialize conduit"""
28
+ self.webhook_url = webhook_url
29
+ self.buffer_size = buffer_size
30
+ self.sync_interval = sync_interval
31
+
32
+ # Separate buffers for different data types
33
+ self.buffer = {
34
+ "function_calls": [],
35
+ "api_requests": [],
36
+ "rate_limits": []
37
+ }
38
+
39
+ self.lock = threading.Lock()
40
+ self.last_sync_time = time.time()
41
+ self.sync_count = 0
42
+ self.failed_queue = []
43
+
44
+ # Home directory setup
45
+ self.home_dir = Path.home()
46
+ self.project_dir = self.home_dir / ".vnstock"
47
+ self.project_dir.mkdir(exist_ok=True)
48
+ self.data_dir = self.project_dir / 'data'
49
+ self.data_dir.mkdir(exist_ok=True)
50
+ self.config_path = self.data_dir / "relay_config.json"
51
+
52
+ # Get machine identifier from system profile
53
+ try:
54
+ from vnai.scope.profile import inspector
55
+ self.machine_id = inspector.fingerprint()
56
+ except:
57
+ self.machine_id = self._generate_fallback_id()
58
+
59
+ # Load config if exists
60
+ self._load_config()
61
+
62
+ # Start periodic sync
63
+ self._start_periodic_sync()
64
+
65
+ def _generate_fallback_id(self) -> str:
66
+ """Generate a fallback machine identifier if profile is unavailable"""
67
+ try:
68
+ import platform
69
+ import hashlib
70
+ import uuid
71
+
72
+ # Try to get machine-specific information
73
+ system_info = platform.node() + platform.platform() + platform.processor()
74
+ return hashlib.md5(system_info.encode()).hexdigest()
75
+ except:
76
+ import uuid
77
+ return str(uuid.uuid4())
78
+
79
+ def _load_config(self):
80
+ """Load configuration from file"""
81
+ if self.config_path.exists():
82
+ try:
83
+ with open(self.config_path, 'r') as f:
84
+ config = json.load(f)
85
+
86
+ if not self.webhook_url and 'webhook_url' in config:
87
+ self.webhook_url = config['webhook_url']
88
+ if 'buffer_size' in config:
89
+ self.buffer_size = config['buffer_size']
90
+ if 'sync_interval' in config:
91
+ self.sync_interval = config['sync_interval']
92
+ if 'last_sync_time' in config:
93
+ self.last_sync_time = config['last_sync_time']
94
+ if 'sync_count' in config:
95
+ self.sync_count = config['sync_count']
96
+ except:
97
+ pass
98
+
99
+ def _save_config(self):
100
+ """Save configuration to file"""
101
+ config = {
102
+ 'webhook_url': self.webhook_url,
103
+ 'buffer_size': self.buffer_size,
104
+ 'sync_interval': self.sync_interval,
105
+ 'last_sync_time': self.last_sync_time,
106
+ 'sync_count': self.sync_count
107
+ }
108
+
109
+ try:
110
+ with open(self.config_path, 'w') as f:
111
+ json.dump(config, f)
112
+ except:
113
+ pass
114
+
115
+ def _start_periodic_sync(self):
116
+ """Start periodic sync thread"""
117
+ def periodic_sync():
118
+ while True:
119
+ time.sleep(self.sync_interval)
120
+ self.dispatch("periodic")
121
+
122
+ sync_thread = threading.Thread(target=periodic_sync, daemon=True)
123
+ sync_thread.start()
124
+
125
+ def add_function_call(self, record):
126
+ """Add function call record"""
127
+ # Ensure record is a dictionary
128
+ if not isinstance(record, dict):
129
+ record = {"value": str(record)}
130
+
131
+ with self.lock:
132
+ self.buffer["function_calls"].append(record)
133
+ self._check_triggers("function_calls")
134
+
135
+ def add_api_request(self, record):
136
+ """Add API request record"""
137
+ # Ensure record is a dictionary
138
+ if not isinstance(record, dict):
139
+ record = {"value": str(record)}
140
+
141
+ with self.lock:
142
+ self.buffer["api_requests"].append(record)
143
+ self._check_triggers("api_requests")
144
+
145
+ def add_rate_limit(self, record):
146
+ """Add rate limit record"""
147
+ # Ensure record is a dictionary
148
+ if not isinstance(record, dict):
149
+ record = {"value": str(record)}
150
+
151
+ with self.lock:
152
+ self.buffer["rate_limits"].append(record)
153
+ self._check_triggers("rate_limits")
154
+
155
+ def _check_triggers(self, record_type: str):
156
+ """Check if any sync triggers are met"""
157
+ current_time = time.time()
158
+ should_trigger = False
159
+ trigger_reason = None
160
+
161
+ # Get total buffer size
162
+ total_records = sum(len(buffer) for buffer in self.buffer.values())
163
+
164
+ # SIZE TRIGGER: Buffer size threshold reached
165
+ if total_records >= self.buffer_size:
166
+ should_trigger = True
167
+ trigger_reason = "buffer_full"
168
+
169
+ # EVENT TRIGGER: Critical events (errors, rate limit warnings)
170
+ elif record_type == "rate_limits" and self.buffer["rate_limits"] and \
171
+ any(item.get("is_exceeded") for item in self.buffer["rate_limits"] if isinstance(item, dict)):
172
+ should_trigger = True
173
+ trigger_reason = "rate_limit_exceeded"
174
+ elif record_type == "function_calls" and self.buffer["function_calls"] and \
175
+ any(not item.get("success") for item in self.buffer["function_calls"] if isinstance(item, dict)):
176
+ should_trigger = True
177
+ trigger_reason = "function_error"
178
+
179
+ # TIME-WEIGHTED RANDOM TRIGGER: More likely as time since last sync increases
180
+ else:
181
+ time_factor = min(1.0, (current_time - self.last_sync_time) / (self.sync_interval / 2))
182
+ if random.random() < 0.05 * time_factor: # 0-5% chance based on time
183
+ should_trigger = True
184
+ trigger_reason = "random_time_weighted"
185
+
186
+ if should_trigger:
187
+ threading.Thread(
188
+ target=self.dispatch,
189
+ args=(trigger_reason,),
190
+ daemon=True
191
+ ).start()
192
+
193
+ def queue(self, package, priority=None):
194
+ """Queue data package"""
195
+ if not package:
196
+ return False
197
+
198
+ # Handle non-dictionary packages
199
+ if not isinstance(package, dict):
200
+ self.add_function_call({"message": str(package)})
201
+ return True
202
+
203
+ # Add timestamp if not present
204
+ if "timestamp" not in package:
205
+ package["timestamp"] = datetime.now().isoformat()
206
+
207
+ # Route based on package type
208
+ if "type" in package:
209
+ package_type = package["type"]
210
+ data = package.get("data", {})
211
+
212
+ # Remove system info if present to avoid duplication
213
+ if isinstance(data, dict) and "system" in data:
214
+ # Get machine_id for reference but don't duplicate the whole system info
215
+ machine_id = data["system"].get("machine_id")
216
+ data.pop("system")
217
+ if machine_id:
218
+ data["machine_id"] = machine_id
219
+
220
+ if package_type == "function":
221
+ self.add_function_call(data)
222
+ elif package_type == "api_request":
223
+ self.add_api_request(data)
224
+ elif package_type == "rate_limit":
225
+ self.add_rate_limit(data)
226
+ elif package_type == "system_info":
227
+ # For system info, we'll add it as a special function call
228
+ # but remove duplicated data
229
+ self.add_function_call({
230
+ "type": "system_info",
231
+ "commercial": data.get("commercial"),
232
+ "packages": data.get("packages"),
233
+ "timestamp": package.get("timestamp")
234
+ })
235
+ elif package_type == "metrics":
236
+ # Handle metrics package with multiple categories
237
+ metrics_data = data
238
+ for metric_type, metrics_list in metrics_data.items():
239
+ if isinstance(metrics_list, list):
240
+ if metric_type == "function":
241
+ for item in metrics_list:
242
+ self.add_function_call(item)
243
+ elif metric_type == "rate_limit":
244
+ for item in metrics_list:
245
+ self.add_rate_limit(item)
246
+ elif metric_type == "request":
247
+ for item in metrics_list:
248
+ self.add_api_request(item)
249
+ else:
250
+ # Default to function calls
251
+ self.add_function_call(data)
252
+ else:
253
+ # No type specified, default to function call
254
+ self.add_function_call(package)
255
+
256
+ # Handle high priority items
257
+ if priority == "high":
258
+ self.dispatch("high_priority")
259
+
260
+ return True
261
+
262
+ def dispatch(self, reason="manual"):
263
+ """Send queued data"""
264
+ if not self.webhook_url:
265
+ return False
266
+
267
+ with self.lock:
268
+ # Check if all buffers are empty
269
+ if all(len(records) == 0 for records in self.buffer.values()):
270
+ return False
271
+
272
+ # Create a copy of the buffer for sending
273
+ data_to_send = {
274
+ "function_calls": self.buffer["function_calls"].copy(),
275
+ "api_requests": self.buffer["api_requests"].copy(),
276
+ "rate_limits": self.buffer["rate_limits"].copy()
277
+ }
278
+
279
+ # Clear buffer
280
+ self.buffer = {
281
+ "function_calls": [],
282
+ "api_requests": [],
283
+ "rate_limits": []
284
+ }
285
+
286
+ # Update sync time and count
287
+ self.last_sync_time = time.time()
288
+ self.sync_count += 1
289
+ self._save_config()
290
+
291
+ # Get environment information ONCE
292
+ try:
293
+ from vnai.scope.profile import inspector
294
+ environment_info = inspector.examine()
295
+ machine_id = environment_info.get("machine_id", self.machine_id)
296
+ except:
297
+ # Fallback if environment info isn't available
298
+ environment_info = {"machine_id": self.machine_id}
299
+ machine_id = self.machine_id
300
+
301
+ # Create payload with environment info only in metadata
302
+ payload = {
303
+ "analytics_data": data_to_send,
304
+ "metadata": {
305
+ "timestamp": datetime.now().isoformat(),
306
+ "machine_id": machine_id,
307
+ "sync_count": self.sync_count,
308
+ "trigger_reason": reason,
309
+ "environment": environment_info,
310
+ "data_counts": {
311
+ "function_calls": len(data_to_send["function_calls"]),
312
+ "api_requests": len(data_to_send["api_requests"]),
313
+ "rate_limits": len(data_to_send["rate_limits"])
314
+ }
315
+ }
316
+ }
317
+
318
+ # Send data
319
+ success = self._send_data(payload)
320
+
321
+ if not success:
322
+ with self.lock:
323
+ self.failed_queue.append(payload)
324
+ if len(self.failed_queue) > 10:
325
+ self.failed_queue = self.failed_queue[-10:]
326
+
327
+ return success
328
+
329
+ def _send_data(self, payload):
330
+ """Send data to webhook"""
331
+ if not self.webhook_url:
332
+ return False
333
+
334
+ try:
335
+ response = requests.post(
336
+ self.webhook_url,
337
+ json=payload,
338
+ timeout=5 # 5 second timeout
339
+ )
340
+
341
+ return response.status_code == 200
342
+ except:
343
+ return False
344
+
345
+ def retry_failed(self):
346
+ """Retry sending failed data"""
347
+ if not self.failed_queue:
348
+ return 0
349
+
350
+ with self.lock:
351
+ to_retry = self.failed_queue.copy()
352
+ self.failed_queue = []
353
+
354
+ success_count = 0
355
+ for payload in to_retry:
356
+ if self._send_data(payload):
357
+ success_count += 1
358
+ else:
359
+ with self.lock:
360
+ self.failed_queue.append(payload)
361
+
362
+ return success_count
363
+
364
+ def configure(self, webhook_url):
365
+ """Configure webhook URL"""
366
+ with self.lock:
367
+ self.webhook_url = webhook_url
368
+ self._save_config()
369
+ return True
370
+
371
+ # Create singleton instance
372
+ conduit = Conduit()
373
+
374
+ # Exposed functions that match sync.py naming pattern
375
+ def track_function_call(function_name, source, execution_time, success=True, error=None, args=None):
376
+ """Track function call (bridge to add_function_call)"""
377
+ record = {
378
+ "function": function_name,
379
+ "source": source,
380
+ "execution_time": execution_time,
381
+ "timestamp": datetime.now().isoformat(),
382
+ "success": success
383
+ }
384
+
385
+ if error:
386
+ record["error"] = error
387
+
388
+ if args:
389
+ # Sanitize arguments
390
+ sanitized_args = {}
391
+ if isinstance(args, dict):
392
+ for key, value in args.items():
393
+ if isinstance(value, (str, int, float, bool)):
394
+ sanitized_args[key] = value
395
+ else:
396
+ sanitized_args[key] = str(type(value))
397
+ else:
398
+ sanitized_args = {"value": str(args)}
399
+ record["args"] = sanitized_args
400
+
401
+ conduit.add_function_call(record)
402
+
403
+ def track_rate_limit(source, limit_type, limit_value, current_usage, is_exceeded):
404
+ """Track rate limit checks (bridge to add_rate_limit)"""
405
+ record = {
406
+ "source": source,
407
+ "limit_type": limit_type,
408
+ "limit_value": limit_value,
409
+ "current_usage": current_usage,
410
+ "is_exceeded": is_exceeded,
411
+ "timestamp": datetime.now().isoformat(),
412
+ "usage_percentage": (current_usage / limit_value) * 100 if limit_value > 0 else 0
413
+ }
414
+
415
+ conduit.add_rate_limit(record)
416
+
417
+ def track_api_request(endpoint, source, method, status_code, execution_time, request_size=0, response_size=0):
418
+ """Track API requests (bridge to add_api_request)"""
419
+ record = {
420
+ "endpoint": endpoint,
421
+ "source": source,
422
+ "method": method,
423
+ "status_code": status_code,
424
+ "execution_time": execution_time,
425
+ "timestamp": datetime.now().isoformat(),
426
+ "request_size": request_size,
427
+ "response_size": response_size
428
+ }
429
+
430
+ conduit.add_api_request(record)
431
+
432
+ def configure(webhook_url):
433
+ """Configure webhook URL"""
434
+ return conduit.configure(webhook_url)
435
+
436
+ def sync_now():
437
+ """Manually trigger synchronization"""
438
+ return conduit.dispatch("manual")
439
+
440
+ def retry_failed():
441
+ """Retry failed synchronizations"""
442
+ return conduit.retry_failed()
vnai/scope/__init__.py CHANGED
@@ -1,4 +1,7 @@
1
+ # vnai/scope/__init__.py
2
+ # Environment detection and state tracking
3
+
1
4
  from vnai.scope.profile import inspector
2
- from vnai.scope.state import tracker,record
5
+ from vnai.scope.state import tracker, record
3
6
  from vnai.scope.promo import manager as content_manager
4
- from vnai.scope.promo import present as present_content
7
+ from vnai.scope.promo import present as present_content