cnhkmcp 1.3.6__py3-none-any.whl → 1.3.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cnhkmcp/__init__.py +1 -1
- cnhkmcp/untracked/APP/.gitignore +32 -0
- cnhkmcp/untracked/APP/MODULAR_STRUCTURE.md +123 -0
- cnhkmcp/untracked/APP/README.md +309 -0
- cnhkmcp/untracked/APP/__pycache__/app.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__init__.py +5 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/__init__.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/feature_engineering.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/idea_house.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/inspiration_house.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/paper_analysis.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/simulator.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/unified_tools.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/__pycache__/wqb_simulator.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/blueprints/feature_engineering.py +347 -0
- cnhkmcp/untracked/APP/blueprints/idea_house.py +221 -0
- cnhkmcp/untracked/APP/blueprints/inspiration_house.py +432 -0
- cnhkmcp/untracked/APP/blueprints/paper_analysis.py +570 -0
- cnhkmcp/untracked/APP/custom_templates/templates.json +4582 -0
- cnhkmcp/untracked/APP/hkSimulator/ace_lib.py +1476 -0
- cnhkmcp/untracked/APP/hkSimulator/autosimulator.py +346 -0
- cnhkmcp/untracked/APP/hkSimulator/helpful_functions.py +180 -0
- cnhkmcp/untracked/APP/mirror_config.txt +20 -0
- cnhkmcp/untracked/APP/operaters.csv +129 -0
- cnhkmcp/untracked/APP/requirements.txt +44 -0
- cnhkmcp/untracked/APP/run_app.bat +28 -0
- cnhkmcp/untracked/APP/run_app.sh +34 -0
- cnhkmcp/untracked/APP/setup_tsinghua.bat +39 -0
- cnhkmcp/untracked/APP/setup_tsinghua.sh +43 -0
- cnhkmcp/untracked/APP/simulator/__pycache__/simulator_wqb.cpython-313.pyc +0 -0
- cnhkmcp/untracked/APP/simulator/alpha_submitter.py +366 -0
- cnhkmcp/untracked/APP/simulator/simulator_wqb.py +602 -0
- cnhkmcp/untracked/APP/ssrn-3332513.pdf +109188 -19
- cnhkmcp/untracked/APP/static/brain.js +478 -0
- cnhkmcp/untracked/APP/static/decoder.js +1275 -0
- cnhkmcp/untracked/APP/static/feature_engineering.js +1729 -0
- cnhkmcp/untracked/APP/static/idea_house.js +937 -0
- cnhkmcp/untracked/APP/static/inspiration_house.js +868 -0
- cnhkmcp/untracked/APP/static/paper_analysis.js +390 -0
- cnhkmcp/untracked/APP/static/script.js +2577 -0
- cnhkmcp/untracked/APP/static/simulator.js +597 -0
- cnhkmcp/untracked/APP/static/styles.css +3099 -0
- cnhkmcp/untracked/APP/templates/feature_engineering.html +959 -0
- cnhkmcp/untracked/APP/templates/idea_house.html +563 -0
- cnhkmcp/untracked/APP/templates/index.html +769 -0
- cnhkmcp/untracked/APP/templates/inspiration_house.html +860 -0
- cnhkmcp/untracked/APP/templates/paper_analysis.html +90 -0
- cnhkmcp/untracked/APP/templates/simulator.html +342 -0
- cnhkmcp/untracked/APP//321/210/342/224/220/320/240/321/210/320/261/320/234/321/206/320/231/320/243/321/205/342/225/235/320/220/321/206/320/230/320/241.py +1489 -0
- {cnhkmcp-1.3.6.dist-info → cnhkmcp-1.3.7.dist-info}/METADATA +1 -1
- cnhkmcp-1.3.7.dist-info/RECORD +67 -0
- cnhkmcp/untracked/APP.zip +0 -0
- cnhkmcp-1.3.6.dist-info/RECORD +0 -20
- {cnhkmcp-1.3.6.dist-info → cnhkmcp-1.3.7.dist-info}/WHEEL +0 -0
- {cnhkmcp-1.3.6.dist-info → cnhkmcp-1.3.7.dist-info}/entry_points.txt +0 -0
- {cnhkmcp-1.3.6.dist-info → cnhkmcp-1.3.7.dist-info}/licenses/LICENSE +0 -0
- {cnhkmcp-1.3.6.dist-info → cnhkmcp-1.3.7.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1489 @@
|
|
|
1
|
+
"""
|
|
2
|
+
BRAIN Expression Template Decoder - Flask Web Application
|
|
3
|
+
A complete web application for decoding string templates with WorldQuant BRAIN integration
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
# Auto-install dependencies if missing
|
|
7
|
+
import subprocess
|
|
8
|
+
import sys
|
|
9
|
+
import os
|
|
10
|
+
|
|
11
|
+
def install_requirements():
|
|
12
|
+
"""Install required packages from requirements.txt if they're missing"""
|
|
13
|
+
print("🔍 Checking and installing required dependencies...")
|
|
14
|
+
print("📋 Verifying packages needed for BRAIN Expression Template Decoder...")
|
|
15
|
+
|
|
16
|
+
# Get the directory where this script is located
|
|
17
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
18
|
+
|
|
19
|
+
# Check if requirements.txt exists in the script directory
|
|
20
|
+
req_file = os.path.join(script_dir, 'requirements.txt')
|
|
21
|
+
if not os.path.exists(req_file):
|
|
22
|
+
print("❌ Error: requirements.txt not found!")
|
|
23
|
+
print(f"Looking for: {req_file}")
|
|
24
|
+
return False
|
|
25
|
+
|
|
26
|
+
# Read mirror configuration if it exists
|
|
27
|
+
mirror_url = 'https://pypi.tuna.tsinghua.edu.cn/simple' # Default to Tsinghua
|
|
28
|
+
mirror_config_file = os.path.join(script_dir, 'mirror_config.txt')
|
|
29
|
+
|
|
30
|
+
if os.path.exists(mirror_config_file):
|
|
31
|
+
try:
|
|
32
|
+
with open(mirror_config_file, 'r', encoding='utf-8') as f:
|
|
33
|
+
for line in f:
|
|
34
|
+
line = line.strip()
|
|
35
|
+
if line and not line.startswith('#') and line.startswith('http'):
|
|
36
|
+
mirror_url = line
|
|
37
|
+
break
|
|
38
|
+
except Exception as e:
|
|
39
|
+
print(f"Warning: Could not read mirror configuration: {e}")
|
|
40
|
+
|
|
41
|
+
# Try to import the main packages to check if they're installed
|
|
42
|
+
packages_to_check = {
|
|
43
|
+
'flask': 'flask',
|
|
44
|
+
'flask_cors': 'flask-cors',
|
|
45
|
+
'requests': 'requests',
|
|
46
|
+
'pandas': 'pandas',
|
|
47
|
+
'PyPDF2': 'PyPDF2',
|
|
48
|
+
'docx': 'python-docx',
|
|
49
|
+
'pdfplumber': 'pdfplumber',
|
|
50
|
+
'fitz': 'PyMuPDF',
|
|
51
|
+
'cozepy': 'cozepy',
|
|
52
|
+
'lxml': 'lxml',
|
|
53
|
+
'bs4': 'beautifulsoup4'
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
missing_packages = []
|
|
57
|
+
for import_name, pip_name in packages_to_check.items():
|
|
58
|
+
try:
|
|
59
|
+
__import__(import_name)
|
|
60
|
+
except ImportError:
|
|
61
|
+
missing_packages.append(pip_name)
|
|
62
|
+
print(f"Missing package: {pip_name} (import name: {import_name})")
|
|
63
|
+
|
|
64
|
+
if missing_packages:
|
|
65
|
+
print(f"⚠️ Missing packages detected: {', '.join(missing_packages)}")
|
|
66
|
+
print("📦 Installing dependencies from requirements.txt...")
|
|
67
|
+
print(f"🌐 Using mirror: {mirror_url}")
|
|
68
|
+
|
|
69
|
+
try:
|
|
70
|
+
# Install all requirements using configured mirror
|
|
71
|
+
subprocess.check_call([
|
|
72
|
+
sys.executable, '-m', 'pip', 'install',
|
|
73
|
+
'-i', mirror_url,
|
|
74
|
+
'-r', req_file
|
|
75
|
+
])
|
|
76
|
+
print("✅ All dependencies installed successfully!")
|
|
77
|
+
return True
|
|
78
|
+
except subprocess.CalledProcessError:
|
|
79
|
+
print(f"❌ Error: Failed to install dependencies using {mirror_url}")
|
|
80
|
+
print("🔄 Trying with default PyPI...")
|
|
81
|
+
try:
|
|
82
|
+
# Fallback to default PyPI
|
|
83
|
+
subprocess.check_call([sys.executable, '-m', 'pip', 'install', '-r', req_file])
|
|
84
|
+
print("✅ All dependencies installed successfully!")
|
|
85
|
+
return True
|
|
86
|
+
except subprocess.CalledProcessError:
|
|
87
|
+
print("❌ Error: Failed to install dependencies. Please run manually:")
|
|
88
|
+
print(f" {sys.executable} -m pip install -i {mirror_url} -r requirements.txt")
|
|
89
|
+
return False
|
|
90
|
+
else:
|
|
91
|
+
print("✅ All required dependencies are already installed!")
|
|
92
|
+
return True
|
|
93
|
+
|
|
94
|
+
# Check and install dependencies before importing
|
|
95
|
+
# This will run every time the module is imported, but only install if needed
|
|
96
|
+
def check_and_install_dependencies():
|
|
97
|
+
"""Check and install dependencies if needed"""
|
|
98
|
+
if not globals().get('_dependencies_checked'):
|
|
99
|
+
if install_requirements():
|
|
100
|
+
globals()['_dependencies_checked'] = True
|
|
101
|
+
return True
|
|
102
|
+
else:
|
|
103
|
+
print("\nPlease install the dependencies manually and try again.")
|
|
104
|
+
return False
|
|
105
|
+
return True
|
|
106
|
+
|
|
107
|
+
# Always run the dependency check when this module is imported
|
|
108
|
+
print("🚀 Initializing BRAIN Expression Template Decoder...")
|
|
109
|
+
if not check_and_install_dependencies():
|
|
110
|
+
if __name__ == "__main__":
|
|
111
|
+
sys.exit(1)
|
|
112
|
+
else:
|
|
113
|
+
print("⚠️ Warning: Some dependencies may be missing. Please run 'pip install -r requirements.txt'")
|
|
114
|
+
print("🔄 Continuing with import, but some features may not work properly.")
|
|
115
|
+
|
|
116
|
+
# Now import the packages
|
|
117
|
+
try:
|
|
118
|
+
from flask import Flask, render_template, request, jsonify, session as flask_session
|
|
119
|
+
from flask_cors import CORS
|
|
120
|
+
import requests
|
|
121
|
+
import json
|
|
122
|
+
import time
|
|
123
|
+
import os
|
|
124
|
+
from datetime import datetime
|
|
125
|
+
print("📚 Core packages imported successfully!")
|
|
126
|
+
except ImportError as e:
|
|
127
|
+
print(f"❌ Failed to import core packages: {e}")
|
|
128
|
+
print("Please run: pip install -r requirements.txt")
|
|
129
|
+
if __name__ == "__main__":
|
|
130
|
+
sys.exit(1)
|
|
131
|
+
raise
|
|
132
|
+
|
|
133
|
+
app = Flask(__name__)
|
|
134
|
+
app.secret_key = 'brain_template_decoder_secret_key_change_in_production'
|
|
135
|
+
CORS(app)
|
|
136
|
+
|
|
137
|
+
print("🌐 Flask application initialized with CORS support!")
|
|
138
|
+
|
|
139
|
+
# BRAIN API configuration
|
|
140
|
+
BRAIN_API_BASE = 'https://api.worldquantbrain.com'
|
|
141
|
+
|
|
142
|
+
# Store BRAIN sessions (in production, use proper session management like Redis)
|
|
143
|
+
brain_sessions = {}
|
|
144
|
+
|
|
145
|
+
print("🧠 BRAIN API integration configured!")
|
|
146
|
+
|
|
147
|
+
def sign_in_to_brain(username, password):
|
|
148
|
+
"""Sign in to BRAIN API with retry logic and biometric authentication support"""
|
|
149
|
+
from urllib.parse import urljoin
|
|
150
|
+
|
|
151
|
+
# Create a session to persistently store the headers
|
|
152
|
+
session = requests.Session()
|
|
153
|
+
# Save credentials into the session
|
|
154
|
+
session.auth = (username, password)
|
|
155
|
+
|
|
156
|
+
retry_count = 0
|
|
157
|
+
max_retries = 3
|
|
158
|
+
|
|
159
|
+
while retry_count < max_retries:
|
|
160
|
+
try:
|
|
161
|
+
# Send a POST request to the /authentication API
|
|
162
|
+
response = session.post(f'{BRAIN_API_BASE}/authentication')
|
|
163
|
+
|
|
164
|
+
# Check if biometric authentication is needed
|
|
165
|
+
if response.status_code == requests.codes.unauthorized:
|
|
166
|
+
if response.headers.get("WWW-Authenticate") == "persona":
|
|
167
|
+
# Get biometric auth URL
|
|
168
|
+
location = response.headers.get("Location")
|
|
169
|
+
if location:
|
|
170
|
+
biometric_url = urljoin(response.url, location)
|
|
171
|
+
|
|
172
|
+
# Return special response indicating biometric auth is needed
|
|
173
|
+
return {
|
|
174
|
+
'requires_biometric': True,
|
|
175
|
+
'biometric_url': biometric_url,
|
|
176
|
+
'session': session,
|
|
177
|
+
'location': location
|
|
178
|
+
}
|
|
179
|
+
else:
|
|
180
|
+
raise Exception("Biometric authentication required but no Location header provided")
|
|
181
|
+
else:
|
|
182
|
+
# Regular authentication failure
|
|
183
|
+
print("Incorrect username or password")
|
|
184
|
+
raise requests.HTTPError("Authentication failed: Invalid username or password")
|
|
185
|
+
|
|
186
|
+
# If we get here, authentication was successful
|
|
187
|
+
response.raise_for_status()
|
|
188
|
+
print("Authentication successful.")
|
|
189
|
+
return session
|
|
190
|
+
|
|
191
|
+
except requests.HTTPError as e:
|
|
192
|
+
if "Invalid username or password" in str(e) or "Authentication failed" in str(e):
|
|
193
|
+
raise # Don't retry for invalid credentials
|
|
194
|
+
print(f"HTTP error occurred: {e}")
|
|
195
|
+
retry_count += 1
|
|
196
|
+
if retry_count < max_retries:
|
|
197
|
+
print(f"Retrying... Attempt {retry_count + 1} of {max_retries}")
|
|
198
|
+
time.sleep(10)
|
|
199
|
+
else:
|
|
200
|
+
print("Max retries reached. Authentication failed.")
|
|
201
|
+
raise
|
|
202
|
+
except Exception as e:
|
|
203
|
+
print(f"Error during authentication: {e}")
|
|
204
|
+
retry_count += 1
|
|
205
|
+
if retry_count < max_retries:
|
|
206
|
+
print(f"Retrying... Attempt {retry_count + 1} of {max_retries}")
|
|
207
|
+
time.sleep(10)
|
|
208
|
+
else:
|
|
209
|
+
print("Max retries reached. Authentication failed.")
|
|
210
|
+
raise
|
|
211
|
+
|
|
212
|
+
# Routes
|
|
213
|
+
@app.route('/')
|
|
214
|
+
def index():
|
|
215
|
+
"""Main application page"""
|
|
216
|
+
return render_template('index.html')
|
|
217
|
+
|
|
218
|
+
@app.route('/simulator')
|
|
219
|
+
def simulator():
|
|
220
|
+
"""User-friendly simulator interface"""
|
|
221
|
+
return render_template('simulator.html')
|
|
222
|
+
|
|
223
|
+
@app.route('/api/simulator/logs', methods=['GET'])
|
|
224
|
+
def get_simulator_logs():
|
|
225
|
+
"""Get available log files in the simulator directory"""
|
|
226
|
+
try:
|
|
227
|
+
import glob
|
|
228
|
+
import os
|
|
229
|
+
from datetime import datetime
|
|
230
|
+
|
|
231
|
+
# Look for log files in the current directory and simulator directory
|
|
232
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
233
|
+
simulator_dir = os.path.join(script_dir, 'simulator')
|
|
234
|
+
|
|
235
|
+
log_files = []
|
|
236
|
+
|
|
237
|
+
# Check both current directory and simulator directory
|
|
238
|
+
for directory in [script_dir, simulator_dir]:
|
|
239
|
+
if os.path.exists(directory):
|
|
240
|
+
pattern = os.path.join(directory, 'wqb*.log')
|
|
241
|
+
for log_file in glob.glob(pattern):
|
|
242
|
+
try:
|
|
243
|
+
stat = os.stat(log_file)
|
|
244
|
+
log_files.append({
|
|
245
|
+
'filename': os.path.basename(log_file),
|
|
246
|
+
'path': log_file,
|
|
247
|
+
'size': f"{stat.st_size / 1024:.1f} KB",
|
|
248
|
+
'modified': datetime.fromtimestamp(stat.st_mtime).strftime('%Y-%m-%d %H:%M:%S'),
|
|
249
|
+
'mtime': stat.st_mtime
|
|
250
|
+
})
|
|
251
|
+
except Exception as e:
|
|
252
|
+
print(f"Error reading log file {log_file}: {e}")
|
|
253
|
+
|
|
254
|
+
# Sort by modification time (newest first)
|
|
255
|
+
log_files.sort(key=lambda x: x['mtime'], reverse=True)
|
|
256
|
+
|
|
257
|
+
# Find the latest log file
|
|
258
|
+
latest = log_files[0]['filename'] if log_files else None
|
|
259
|
+
|
|
260
|
+
return jsonify({
|
|
261
|
+
'logs': log_files,
|
|
262
|
+
'latest': latest,
|
|
263
|
+
'count': len(log_files)
|
|
264
|
+
})
|
|
265
|
+
|
|
266
|
+
except Exception as e:
|
|
267
|
+
return jsonify({'error': f'Error getting log files: {str(e)}'}), 500
|
|
268
|
+
|
|
269
|
+
@app.route('/api/simulator/logs/<filename>', methods=['GET'])
|
|
270
|
+
def get_simulator_log_content(filename):
|
|
271
|
+
"""Get content of a specific log file"""
|
|
272
|
+
try:
|
|
273
|
+
import os
|
|
274
|
+
|
|
275
|
+
# Security: only allow log files with safe names
|
|
276
|
+
if not filename.startswith('wqb') or not filename.endswith('.log'):
|
|
277
|
+
return jsonify({'error': 'Invalid log file name'}), 400
|
|
278
|
+
|
|
279
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
280
|
+
simulator_dir = os.path.join(script_dir, 'simulator')
|
|
281
|
+
|
|
282
|
+
# Look for the file in both directories
|
|
283
|
+
log_path = None
|
|
284
|
+
for directory in [script_dir, simulator_dir]:
|
|
285
|
+
potential_path = os.path.join(directory, filename)
|
|
286
|
+
if os.path.exists(potential_path):
|
|
287
|
+
log_path = potential_path
|
|
288
|
+
break
|
|
289
|
+
|
|
290
|
+
if not log_path:
|
|
291
|
+
return jsonify({'error': 'Log file not found'}), 404
|
|
292
|
+
|
|
293
|
+
# Read file content with multiple encoding attempts
|
|
294
|
+
content = None
|
|
295
|
+
encodings_to_try = ['utf-8', 'gbk', 'gb2312', 'big5', 'latin-1', 'cp1252']
|
|
296
|
+
|
|
297
|
+
for encoding in encodings_to_try:
|
|
298
|
+
try:
|
|
299
|
+
with open(log_path, 'r', encoding=encoding) as f:
|
|
300
|
+
content = f.read()
|
|
301
|
+
print(f"Successfully read log file with {encoding} encoding")
|
|
302
|
+
break
|
|
303
|
+
except UnicodeDecodeError:
|
|
304
|
+
continue
|
|
305
|
+
except Exception as e:
|
|
306
|
+
print(f"Error reading with {encoding}: {e}")
|
|
307
|
+
continue
|
|
308
|
+
|
|
309
|
+
if content is None:
|
|
310
|
+
# Last resort: read as binary and decode with error handling
|
|
311
|
+
try:
|
|
312
|
+
with open(log_path, 'rb') as f:
|
|
313
|
+
raw_content = f.read()
|
|
314
|
+
content = raw_content.decode('utf-8', errors='replace')
|
|
315
|
+
print("Used UTF-8 with error replacement for log content")
|
|
316
|
+
except Exception as e:
|
|
317
|
+
content = f"Error: Could not decode file content - {str(e)}"
|
|
318
|
+
|
|
319
|
+
response = jsonify({
|
|
320
|
+
'content': content,
|
|
321
|
+
'filename': filename,
|
|
322
|
+
'size': len(content)
|
|
323
|
+
})
|
|
324
|
+
response.headers['Content-Type'] = 'application/json; charset=utf-8'
|
|
325
|
+
return response
|
|
326
|
+
|
|
327
|
+
except Exception as e:
|
|
328
|
+
return jsonify({'error': f'Error reading log file: {str(e)}'}), 500
|
|
329
|
+
|
|
330
|
+
@app.route('/api/simulator/test-connection', methods=['POST'])
|
|
331
|
+
def test_simulator_connection():
|
|
332
|
+
"""Test BRAIN API connection for simulator"""
|
|
333
|
+
try:
|
|
334
|
+
data = request.get_json()
|
|
335
|
+
username = data.get('username')
|
|
336
|
+
password = data.get('password')
|
|
337
|
+
|
|
338
|
+
if not username or not password:
|
|
339
|
+
return jsonify({'error': 'Username and password required'}), 400
|
|
340
|
+
|
|
341
|
+
# Test connection using the existing sign_in_to_brain function
|
|
342
|
+
result = sign_in_to_brain(username, password)
|
|
343
|
+
|
|
344
|
+
# Handle biometric authentication requirement
|
|
345
|
+
if isinstance(result, dict) and result.get('requires_biometric'):
|
|
346
|
+
return jsonify({
|
|
347
|
+
'success': False,
|
|
348
|
+
'error': 'Biometric authentication required. Please use the main interface first to complete authentication.',
|
|
349
|
+
'requires_biometric': True
|
|
350
|
+
})
|
|
351
|
+
|
|
352
|
+
# Test a simple API call to verify connection
|
|
353
|
+
brain_session = result
|
|
354
|
+
response = brain_session.get(f'{BRAIN_API_BASE}/data-fields/open')
|
|
355
|
+
|
|
356
|
+
if response.ok:
|
|
357
|
+
return jsonify({
|
|
358
|
+
'success': True,
|
|
359
|
+
'message': 'Connection successful'
|
|
360
|
+
})
|
|
361
|
+
else:
|
|
362
|
+
return jsonify({
|
|
363
|
+
'success': False,
|
|
364
|
+
'error': f'API test failed: {response.status_code}'
|
|
365
|
+
})
|
|
366
|
+
|
|
367
|
+
except Exception as e:
|
|
368
|
+
return jsonify({
|
|
369
|
+
'success': False,
|
|
370
|
+
'error': f'Connection failed: {str(e)}'
|
|
371
|
+
})
|
|
372
|
+
|
|
373
|
+
@app.route('/api/simulator/run', methods=['POST'])
|
|
374
|
+
def run_simulator_with_params():
|
|
375
|
+
"""Run simulator with user-provided parameters in a new terminal"""
|
|
376
|
+
try:
|
|
377
|
+
import subprocess
|
|
378
|
+
import threading
|
|
379
|
+
import json
|
|
380
|
+
import os
|
|
381
|
+
import tempfile
|
|
382
|
+
import sys
|
|
383
|
+
import time
|
|
384
|
+
|
|
385
|
+
# Get form data
|
|
386
|
+
json_file = request.files.get('jsonFile')
|
|
387
|
+
username = request.form.get('username')
|
|
388
|
+
password = request.form.get('password')
|
|
389
|
+
start_position = int(request.form.get('startPosition', 0))
|
|
390
|
+
concurrent_count = int(request.form.get('concurrentCount', 3))
|
|
391
|
+
random_shuffle = request.form.get('randomShuffle') == 'true'
|
|
392
|
+
use_multi_sim = request.form.get('useMultiSim') == 'true'
|
|
393
|
+
alpha_count_per_slot = int(request.form.get('alphaCountPerSlot', 3))
|
|
394
|
+
|
|
395
|
+
if not json_file or not username or not password:
|
|
396
|
+
return jsonify({'error': 'Missing required parameters'}), 400
|
|
397
|
+
|
|
398
|
+
# Validate and read JSON file
|
|
399
|
+
try:
|
|
400
|
+
json_content = json_file.read().decode('utf-8')
|
|
401
|
+
expressions_data = json.loads(json_content)
|
|
402
|
+
if not isinstance(expressions_data, list):
|
|
403
|
+
return jsonify({'error': 'JSON file must contain an array of expressions'}), 400
|
|
404
|
+
except Exception as e:
|
|
405
|
+
return jsonify({'error': f'Invalid JSON file: {str(e)}'}), 400
|
|
406
|
+
|
|
407
|
+
# Get paths
|
|
408
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
409
|
+
simulator_dir = os.path.join(script_dir, 'simulator')
|
|
410
|
+
|
|
411
|
+
# Create temporary files for the automated run
|
|
412
|
+
temp_json_path = os.path.join(simulator_dir, f'temp_expressions_{int(time.time())}.json')
|
|
413
|
+
temp_script_path = os.path.join(simulator_dir, f'temp_automated_{int(time.time())}.py')
|
|
414
|
+
temp_batch_path = os.path.join(simulator_dir, f'temp_run_{int(time.time())}.bat')
|
|
415
|
+
|
|
416
|
+
try:
|
|
417
|
+
# Save the JSON data to temporary file
|
|
418
|
+
with open(temp_json_path, 'w', encoding='utf-8') as f:
|
|
419
|
+
json.dump(expressions_data, f, ensure_ascii=False, indent=2)
|
|
420
|
+
|
|
421
|
+
# Create the automated script that calls automated_main
|
|
422
|
+
script_content = f'''
|
|
423
|
+
import asyncio
|
|
424
|
+
import sys
|
|
425
|
+
import os
|
|
426
|
+
import json
|
|
427
|
+
|
|
428
|
+
# Add current directory to path
|
|
429
|
+
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
|
|
430
|
+
|
|
431
|
+
import simulator_wqb
|
|
432
|
+
|
|
433
|
+
async def run_automated():
|
|
434
|
+
"""Run the automated simulator with parameters from web interface"""
|
|
435
|
+
try:
|
|
436
|
+
# Load JSON data
|
|
437
|
+
with open(r"{temp_json_path}", 'r', encoding='utf-8') as f:
|
|
438
|
+
json_content = f.read()
|
|
439
|
+
|
|
440
|
+
# Call automated_main with parameters
|
|
441
|
+
result = await simulator_wqb.automated_main(
|
|
442
|
+
json_file_content=json_content,
|
|
443
|
+
username="{username}",
|
|
444
|
+
password="{password}",
|
|
445
|
+
start_position={start_position},
|
|
446
|
+
concurrent_count={concurrent_count},
|
|
447
|
+
random_shuffle={random_shuffle},
|
|
448
|
+
use_multi_sim={use_multi_sim},
|
|
449
|
+
alpha_count_per_slot={alpha_count_per_slot}
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
if result['success']:
|
|
453
|
+
print("\\n" + "="*60)
|
|
454
|
+
print("🎉 WEB INTERFACE AUTOMATION SUCCESS 🎉")
|
|
455
|
+
print("="*60)
|
|
456
|
+
print(f"✅ Total simulations: {{result['results']['total']}}")
|
|
457
|
+
print(f"✅ Successful: {{result['results']['successful']}}")
|
|
458
|
+
print(f"❌ Failed: {{result['results']['failed']}}")
|
|
459
|
+
if result['results']['alphaIds']:
|
|
460
|
+
print(f"📊 Generated {{len(result['results']['alphaIds'])}} Alpha IDs")
|
|
461
|
+
print("="*60)
|
|
462
|
+
else:
|
|
463
|
+
print("\\n" + "="*60)
|
|
464
|
+
print("❌ WEB INTERFACE AUTOMATION FAILED")
|
|
465
|
+
print("="*60)
|
|
466
|
+
print(f"Error: {{result['error']}}")
|
|
467
|
+
print("="*60)
|
|
468
|
+
|
|
469
|
+
except Exception as e:
|
|
470
|
+
print(f"\\n❌ Script execution error: {{e}}")
|
|
471
|
+
|
|
472
|
+
finally:
|
|
473
|
+
# Clean up temporary files
|
|
474
|
+
try:
|
|
475
|
+
if os.path.exists(r"{temp_json_path}"):
|
|
476
|
+
os.remove(r"{temp_json_path}")
|
|
477
|
+
if os.path.exists(r"{temp_script_path}"):
|
|
478
|
+
os.remove(r"{temp_script_path}")
|
|
479
|
+
if os.path.exists(r"{temp_batch_path}"):
|
|
480
|
+
os.remove(r"{temp_batch_path}")
|
|
481
|
+
except:
|
|
482
|
+
pass
|
|
483
|
+
|
|
484
|
+
print("\\n🔄 Press any key to close this window...")
|
|
485
|
+
input()
|
|
486
|
+
|
|
487
|
+
if __name__ == '__main__':
|
|
488
|
+
asyncio.run(run_automated())
|
|
489
|
+
'''
|
|
490
|
+
|
|
491
|
+
# Save the script
|
|
492
|
+
with open(temp_script_path, 'w', encoding='utf-8') as f:
|
|
493
|
+
f.write(script_content)
|
|
494
|
+
|
|
495
|
+
# Create batch file for Windows
|
|
496
|
+
batch_content = f'''@echo off
|
|
497
|
+
cd /d "{simulator_dir}"
|
|
498
|
+
python "{os.path.basename(temp_script_path)}"
|
|
499
|
+
'''
|
|
500
|
+
with open(temp_batch_path, 'w', encoding='utf-8') as f:
|
|
501
|
+
f.write(batch_content)
|
|
502
|
+
|
|
503
|
+
# Launch in new terminal
|
|
504
|
+
def launch_simulator():
|
|
505
|
+
try:
|
|
506
|
+
if os.name == 'nt': # Windows
|
|
507
|
+
# Use batch file to avoid path issues
|
|
508
|
+
subprocess.Popen([
|
|
509
|
+
temp_batch_path
|
|
510
|
+
], creationflags=subprocess.CREATE_NEW_CONSOLE)
|
|
511
|
+
else: # Unix-like systems
|
|
512
|
+
# Try different terminal emulators
|
|
513
|
+
terminals = ['gnome-terminal', 'xterm', 'konsole', 'terminal']
|
|
514
|
+
for terminal in terminals:
|
|
515
|
+
try:
|
|
516
|
+
if terminal == 'gnome-terminal':
|
|
517
|
+
subprocess.Popen([
|
|
518
|
+
terminal, '--working-directory', simulator_dir,
|
|
519
|
+
'--', 'python3', os.path.basename(temp_script_path)
|
|
520
|
+
])
|
|
521
|
+
else:
|
|
522
|
+
subprocess.Popen([
|
|
523
|
+
terminal, '-e',
|
|
524
|
+
f'cd "{simulator_dir}" && python3 "{os.path.basename(temp_script_path)}"'
|
|
525
|
+
])
|
|
526
|
+
break
|
|
527
|
+
except FileNotFoundError:
|
|
528
|
+
continue
|
|
529
|
+
else:
|
|
530
|
+
# Fallback: run in background if no terminal found
|
|
531
|
+
subprocess.Popen([
|
|
532
|
+
sys.executable, temp_script_path
|
|
533
|
+
], cwd=simulator_dir)
|
|
534
|
+
except Exception as e:
|
|
535
|
+
print(f"Error launching simulator: {e}")
|
|
536
|
+
|
|
537
|
+
# Start the simulator in a separate thread
|
|
538
|
+
thread = threading.Thread(target=launch_simulator)
|
|
539
|
+
thread.daemon = True
|
|
540
|
+
thread.start()
|
|
541
|
+
|
|
542
|
+
return jsonify({
|
|
543
|
+
'success': True,
|
|
544
|
+
'message': 'Simulator launched in new terminal window',
|
|
545
|
+
'parameters': {
|
|
546
|
+
'expressions_count': len(expressions_data),
|
|
547
|
+
'concurrent_count': concurrent_count,
|
|
548
|
+
'use_multi_sim': use_multi_sim,
|
|
549
|
+
'alpha_count_per_slot': alpha_count_per_slot if use_multi_sim else None
|
|
550
|
+
}
|
|
551
|
+
})
|
|
552
|
+
|
|
553
|
+
except Exception as e:
|
|
554
|
+
# Clean up on error
|
|
555
|
+
try:
|
|
556
|
+
if os.path.exists(temp_json_path):
|
|
557
|
+
os.remove(temp_json_path)
|
|
558
|
+
if os.path.exists(temp_script_path):
|
|
559
|
+
os.remove(temp_script_path)
|
|
560
|
+
if os.path.exists(temp_batch_path):
|
|
561
|
+
os.remove(temp_batch_path)
|
|
562
|
+
except:
|
|
563
|
+
pass
|
|
564
|
+
raise e
|
|
565
|
+
|
|
566
|
+
except Exception as e:
|
|
567
|
+
return jsonify({'error': f'Failed to run simulator: {str(e)}'}), 500
|
|
568
|
+
|
|
569
|
+
@app.route('/api/simulator/stop', methods=['POST'])
|
|
570
|
+
def stop_simulator():
|
|
571
|
+
"""Stop running simulator"""
|
|
572
|
+
try:
|
|
573
|
+
# This is a placeholder - in a production environment, you'd want to
|
|
574
|
+
# implement proper process management to stop running simulations
|
|
575
|
+
return jsonify({
|
|
576
|
+
'success': True,
|
|
577
|
+
'message': 'Stop signal sent'
|
|
578
|
+
})
|
|
579
|
+
except Exception as e:
|
|
580
|
+
return jsonify({'error': f'Failed to stop simulator: {str(e)}'}), 500
|
|
581
|
+
|
|
582
|
+
@app.route('/api/authenticate', methods=['POST'])
|
|
583
|
+
def authenticate():
|
|
584
|
+
"""Authenticate with BRAIN API"""
|
|
585
|
+
try:
|
|
586
|
+
data = request.get_json()
|
|
587
|
+
username = data.get('username')
|
|
588
|
+
password = data.get('password')
|
|
589
|
+
|
|
590
|
+
if not username or not password:
|
|
591
|
+
return jsonify({'error': 'Username and password required'}), 400
|
|
592
|
+
|
|
593
|
+
# Authenticate with BRAIN
|
|
594
|
+
result = sign_in_to_brain(username, password)
|
|
595
|
+
|
|
596
|
+
# Check if biometric authentication is required
|
|
597
|
+
if isinstance(result, dict) and result.get('requires_biometric'):
|
|
598
|
+
# Store the session temporarily with biometric pending status
|
|
599
|
+
session_id = f"{username}_{int(time.time())}_biometric_pending"
|
|
600
|
+
brain_sessions[session_id] = {
|
|
601
|
+
'session': result['session'],
|
|
602
|
+
'username': username,
|
|
603
|
+
'timestamp': time.time(),
|
|
604
|
+
'biometric_pending': True,
|
|
605
|
+
'biometric_location': result['location']
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
# Store session ID in Flask session
|
|
609
|
+
flask_session['brain_session_id'] = session_id
|
|
610
|
+
|
|
611
|
+
return jsonify({
|
|
612
|
+
'success': False,
|
|
613
|
+
'requires_biometric': True,
|
|
614
|
+
'biometric_url': result['biometric_url'],
|
|
615
|
+
'session_id': session_id,
|
|
616
|
+
'message': 'Please complete biometric authentication by visiting the provided URL'
|
|
617
|
+
})
|
|
618
|
+
|
|
619
|
+
# Regular successful authentication
|
|
620
|
+
brain_session = result
|
|
621
|
+
|
|
622
|
+
# Store session
|
|
623
|
+
session_id = f"{username}_{int(time.time())}"
|
|
624
|
+
brain_sessions[session_id] = {
|
|
625
|
+
'session': brain_session,
|
|
626
|
+
'username': username,
|
|
627
|
+
'timestamp': time.time()
|
|
628
|
+
}
|
|
629
|
+
|
|
630
|
+
# Store session ID in Flask session
|
|
631
|
+
flask_session['brain_session_id'] = session_id
|
|
632
|
+
|
|
633
|
+
return jsonify({
|
|
634
|
+
'success': True,
|
|
635
|
+
'session_id': session_id,
|
|
636
|
+
'message': 'Authentication successful'
|
|
637
|
+
})
|
|
638
|
+
|
|
639
|
+
except requests.HTTPError as e:
|
|
640
|
+
if e.response.status_code == 401:
|
|
641
|
+
return jsonify({'error': 'Invalid username or password'}), 401
|
|
642
|
+
else:
|
|
643
|
+
return jsonify({'error': f'Authentication failed: {str(e)}'}), 500
|
|
644
|
+
except Exception as e:
|
|
645
|
+
return jsonify({'error': f'Authentication error: {str(e)}'}), 500
|
|
646
|
+
|
|
647
|
+
@app.route('/api/complete-biometric', methods=['POST'])
|
|
648
|
+
def complete_biometric():
|
|
649
|
+
"""Complete biometric authentication after user has done it in browser"""
|
|
650
|
+
try:
|
|
651
|
+
from urllib.parse import urljoin
|
|
652
|
+
|
|
653
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
654
|
+
if not session_id or session_id not in brain_sessions:
|
|
655
|
+
return jsonify({'error': 'Invalid or expired session'}), 401
|
|
656
|
+
|
|
657
|
+
session_info = brain_sessions[session_id]
|
|
658
|
+
|
|
659
|
+
# Check if this session is waiting for biometric completion
|
|
660
|
+
if not session_info.get('biometric_pending'):
|
|
661
|
+
return jsonify({'error': 'Session is not pending biometric authentication'}), 400
|
|
662
|
+
|
|
663
|
+
brain_session = session_info['session']
|
|
664
|
+
location = session_info['biometric_location']
|
|
665
|
+
|
|
666
|
+
# Complete the biometric authentication following the reference pattern
|
|
667
|
+
try:
|
|
668
|
+
# Construct the full URL for biometric authentication
|
|
669
|
+
auth_url = urljoin(f'{BRAIN_API_BASE}/authentication', location)
|
|
670
|
+
|
|
671
|
+
# Keep trying until biometric auth succeeds (like in reference code)
|
|
672
|
+
max_attempts = 5
|
|
673
|
+
attempt = 0
|
|
674
|
+
|
|
675
|
+
while attempt < max_attempts:
|
|
676
|
+
bio_response = brain_session.post(auth_url)
|
|
677
|
+
if bio_response.status_code == 201:
|
|
678
|
+
# Biometric authentication successful
|
|
679
|
+
break
|
|
680
|
+
elif bio_response.status_code == 401:
|
|
681
|
+
# Biometric authentication not complete yet
|
|
682
|
+
attempt += 1
|
|
683
|
+
if attempt >= max_attempts:
|
|
684
|
+
return jsonify({
|
|
685
|
+
'success': False,
|
|
686
|
+
'error': 'Biometric authentication not completed. Please try again.'
|
|
687
|
+
})
|
|
688
|
+
time.sleep(2) # Wait a bit before retrying
|
|
689
|
+
else:
|
|
690
|
+
# Other error
|
|
691
|
+
bio_response.raise_for_status()
|
|
692
|
+
|
|
693
|
+
# Update session info - remove biometric pending status
|
|
694
|
+
session_info['biometric_pending'] = False
|
|
695
|
+
del session_info['biometric_location']
|
|
696
|
+
|
|
697
|
+
# Create a new session ID without the biometric_pending suffix
|
|
698
|
+
new_session_id = f"{session_info['username']}_{int(time.time())}"
|
|
699
|
+
brain_sessions[new_session_id] = {
|
|
700
|
+
'session': brain_session,
|
|
701
|
+
'username': session_info['username'],
|
|
702
|
+
'timestamp': time.time()
|
|
703
|
+
}
|
|
704
|
+
|
|
705
|
+
# Remove old session
|
|
706
|
+
del brain_sessions[session_id]
|
|
707
|
+
|
|
708
|
+
# Update Flask session
|
|
709
|
+
flask_session['brain_session_id'] = new_session_id
|
|
710
|
+
|
|
711
|
+
return jsonify({
|
|
712
|
+
'success': True,
|
|
713
|
+
'session_id': new_session_id,
|
|
714
|
+
'message': 'Biometric authentication completed successfully'
|
|
715
|
+
})
|
|
716
|
+
|
|
717
|
+
except requests.HTTPError as e:
|
|
718
|
+
return jsonify({
|
|
719
|
+
'success': False,
|
|
720
|
+
'error': f'Failed to complete biometric authentication: {str(e)}'
|
|
721
|
+
})
|
|
722
|
+
|
|
723
|
+
except Exception as e:
|
|
724
|
+
return jsonify({
|
|
725
|
+
'success': False,
|
|
726
|
+
'error': f'Error completing biometric authentication: {str(e)}'
|
|
727
|
+
})
|
|
728
|
+
|
|
729
|
+
@app.route('/api/operators', methods=['GET'])
|
|
730
|
+
def get_operators():
|
|
731
|
+
"""Get user operators from BRAIN API"""
|
|
732
|
+
try:
|
|
733
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
734
|
+
if not session_id or session_id not in brain_sessions:
|
|
735
|
+
return jsonify({'error': 'Invalid or expired session'}), 401
|
|
736
|
+
|
|
737
|
+
session_info = brain_sessions[session_id]
|
|
738
|
+
brain_session = session_info['session']
|
|
739
|
+
|
|
740
|
+
# First try without pagination parameters (most APIs return all operators at once)
|
|
741
|
+
try:
|
|
742
|
+
response = brain_session.get(f'{BRAIN_API_BASE}/operators')
|
|
743
|
+
response.raise_for_status()
|
|
744
|
+
|
|
745
|
+
data = response.json()
|
|
746
|
+
|
|
747
|
+
# If it's a list, we got all operators
|
|
748
|
+
if isinstance(data, list):
|
|
749
|
+
all_operators = data
|
|
750
|
+
print(f"Fetched {len(all_operators)} operators from BRAIN API (direct)")
|
|
751
|
+
# If it's a dict with results, handle pagination
|
|
752
|
+
elif isinstance(data, dict) and 'results' in data:
|
|
753
|
+
all_operators = []
|
|
754
|
+
total_count = data.get('count', len(data['results']))
|
|
755
|
+
print(f"Found {total_count} total operators, fetching all...")
|
|
756
|
+
|
|
757
|
+
# Get first batch
|
|
758
|
+
all_operators.extend(data['results'])
|
|
759
|
+
|
|
760
|
+
# Get remaining batches if needed
|
|
761
|
+
limit = 100
|
|
762
|
+
offset = len(data['results'])
|
|
763
|
+
|
|
764
|
+
while len(all_operators) < total_count:
|
|
765
|
+
params = {'limit': limit, 'offset': offset}
|
|
766
|
+
batch_response = brain_session.get(f'{BRAIN_API_BASE}/operators', params=params)
|
|
767
|
+
batch_response.raise_for_status()
|
|
768
|
+
batch_data = batch_response.json()
|
|
769
|
+
|
|
770
|
+
if isinstance(batch_data, dict) and 'results' in batch_data:
|
|
771
|
+
batch_operators = batch_data['results']
|
|
772
|
+
if not batch_operators: # No more data
|
|
773
|
+
break
|
|
774
|
+
all_operators.extend(batch_operators)
|
|
775
|
+
offset += len(batch_operators)
|
|
776
|
+
else:
|
|
777
|
+
break
|
|
778
|
+
|
|
779
|
+
print(f"Fetched {len(all_operators)} operators from BRAIN API (paginated)")
|
|
780
|
+
else:
|
|
781
|
+
# Unknown format, treat as empty
|
|
782
|
+
all_operators = []
|
|
783
|
+
print("Unknown response format for operators API")
|
|
784
|
+
|
|
785
|
+
except Exception as e:
|
|
786
|
+
print(f"Error fetching operators: {str(e)}")
|
|
787
|
+
# Fallback: try with explicit pagination
|
|
788
|
+
all_operators = []
|
|
789
|
+
limit = 100
|
|
790
|
+
offset = 0
|
|
791
|
+
|
|
792
|
+
while True:
|
|
793
|
+
params = {'limit': limit, 'offset': offset}
|
|
794
|
+
response = brain_session.get(f'{BRAIN_API_BASE}/operators', params=params)
|
|
795
|
+
response.raise_for_status()
|
|
796
|
+
|
|
797
|
+
data = response.json()
|
|
798
|
+
if isinstance(data, list):
|
|
799
|
+
all_operators.extend(data)
|
|
800
|
+
if len(data) < limit:
|
|
801
|
+
break
|
|
802
|
+
elif isinstance(data, dict) and 'results' in data:
|
|
803
|
+
batch_operators = data['results']
|
|
804
|
+
all_operators.extend(batch_operators)
|
|
805
|
+
if len(batch_operators) < limit:
|
|
806
|
+
break
|
|
807
|
+
else:
|
|
808
|
+
break
|
|
809
|
+
|
|
810
|
+
offset += limit
|
|
811
|
+
|
|
812
|
+
print(f"Fetched {len(all_operators)} operators from BRAIN API (fallback)")
|
|
813
|
+
|
|
814
|
+
# Extract name, category, description, definition and other fields (if available)
|
|
815
|
+
filtered_operators = []
|
|
816
|
+
for op in all_operators:
|
|
817
|
+
operator_data = {
|
|
818
|
+
'name': op['name'],
|
|
819
|
+
'category': op['category']
|
|
820
|
+
}
|
|
821
|
+
# Include description if available
|
|
822
|
+
if 'description' in op and op['description']:
|
|
823
|
+
operator_data['description'] = op['description']
|
|
824
|
+
# Include definition if available
|
|
825
|
+
if 'definition' in op and op['definition']:
|
|
826
|
+
operator_data['definition'] = op['definition']
|
|
827
|
+
# Include usage count if available
|
|
828
|
+
if 'usageCount' in op:
|
|
829
|
+
operator_data['usageCount'] = op['usageCount']
|
|
830
|
+
# Include other useful fields if available
|
|
831
|
+
if 'example' in op and op['example']:
|
|
832
|
+
operator_data['example'] = op['example']
|
|
833
|
+
filtered_operators.append(operator_data)
|
|
834
|
+
|
|
835
|
+
return jsonify(filtered_operators)
|
|
836
|
+
|
|
837
|
+
except Exception as e:
|
|
838
|
+
print(f"Error fetching operators: {str(e)}")
|
|
839
|
+
return jsonify({'error': f'Failed to fetch operators: {str(e)}'}), 500
|
|
840
|
+
|
|
841
|
+
@app.route('/api/datafields', methods=['GET'])
|
|
842
|
+
def get_datafields():
|
|
843
|
+
"""Get data fields from BRAIN API"""
|
|
844
|
+
try:
|
|
845
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
846
|
+
if not session_id or session_id not in brain_sessions:
|
|
847
|
+
return jsonify({'error': 'Invalid or expired session'}), 401
|
|
848
|
+
|
|
849
|
+
session_info = brain_sessions[session_id]
|
|
850
|
+
brain_session = session_info['session']
|
|
851
|
+
|
|
852
|
+
# Get parameters
|
|
853
|
+
region = request.args.get('region', 'USA')
|
|
854
|
+
delay = request.args.get('delay', '1')
|
|
855
|
+
universe = request.args.get('universe', 'TOP3000')
|
|
856
|
+
dataset_id = request.args.get('dataset_id', 'fundamental6')
|
|
857
|
+
search = ''
|
|
858
|
+
|
|
859
|
+
# Build URL template based on notebook implementation
|
|
860
|
+
if len(search) == 0:
|
|
861
|
+
url_template = f"{BRAIN_API_BASE}/data-fields?" + \
|
|
862
|
+
f"&instrumentType=EQUITY" + \
|
|
863
|
+
f"®ion={region}&delay={delay}&universe={universe}&dataset.id={dataset_id}&limit=50" + \
|
|
864
|
+
"&offset={x}"
|
|
865
|
+
# Get count from first request
|
|
866
|
+
first_response = brain_session.get(url_template.format(x=0))
|
|
867
|
+
first_response.raise_for_status()
|
|
868
|
+
count = first_response.json()['count']
|
|
869
|
+
else:
|
|
870
|
+
url_template = f"{BRAIN_API_BASE}/data-fields?" + \
|
|
871
|
+
f"&instrumentType=EQUITY" + \
|
|
872
|
+
f"®ion={region}&delay={delay}&universe={universe}&limit=50" + \
|
|
873
|
+
f"&search={search}" + \
|
|
874
|
+
"&offset={x}"
|
|
875
|
+
count = 100 # Default for search queries
|
|
876
|
+
|
|
877
|
+
# Fetch all data fields in batches
|
|
878
|
+
datafields_list = []
|
|
879
|
+
for x in range(0, count, 50):
|
|
880
|
+
response = brain_session.get(url_template.format(x=x))
|
|
881
|
+
response.raise_for_status()
|
|
882
|
+
datafields_list.append(response.json()['results'])
|
|
883
|
+
|
|
884
|
+
# Flatten the list
|
|
885
|
+
datafields_list_flat = [item for sublist in datafields_list for item in sublist]
|
|
886
|
+
|
|
887
|
+
# Filter fields to only include necessary information
|
|
888
|
+
filtered_fields = [
|
|
889
|
+
{
|
|
890
|
+
'id': field['id'],
|
|
891
|
+
'description': field['description'],
|
|
892
|
+
'type': field['type'],
|
|
893
|
+
'coverage': field.get('coverage', 0),
|
|
894
|
+
'userCount': field.get('userCount', 0),
|
|
895
|
+
'alphaCount': field.get('alphaCount', 0)
|
|
896
|
+
}
|
|
897
|
+
for field in datafields_list_flat
|
|
898
|
+
]
|
|
899
|
+
|
|
900
|
+
return jsonify(filtered_fields)
|
|
901
|
+
|
|
902
|
+
except Exception as e:
|
|
903
|
+
return jsonify({'error': f'Failed to fetch data fields: {str(e)}'}), 500
|
|
904
|
+
|
|
905
|
+
@app.route('/api/dataset-description', methods=['GET'])
|
|
906
|
+
def get_dataset_description():
|
|
907
|
+
"""Get dataset description from BRAIN API"""
|
|
908
|
+
try:
|
|
909
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
910
|
+
if not session_id or session_id not in brain_sessions:
|
|
911
|
+
return jsonify({'error': 'Invalid or expired session'}), 401
|
|
912
|
+
|
|
913
|
+
session_info = brain_sessions[session_id]
|
|
914
|
+
brain_session = session_info['session']
|
|
915
|
+
|
|
916
|
+
# Get parameters
|
|
917
|
+
region = request.args.get('region', 'USA')
|
|
918
|
+
delay = request.args.get('delay', '1')
|
|
919
|
+
universe = request.args.get('universe', 'TOP3000')
|
|
920
|
+
dataset_id = request.args.get('dataset_id', 'analyst10')
|
|
921
|
+
|
|
922
|
+
# Build URL for dataset description
|
|
923
|
+
url = f"{BRAIN_API_BASE}/data-sets/{dataset_id}?" + \
|
|
924
|
+
f"instrumentType=EQUITY®ion={region}&delay={delay}&universe={universe}"
|
|
925
|
+
|
|
926
|
+
print(f"Getting dataset description from: {url}")
|
|
927
|
+
|
|
928
|
+
# Make request to BRAIN API
|
|
929
|
+
response = brain_session.get(url)
|
|
930
|
+
response.raise_for_status()
|
|
931
|
+
|
|
932
|
+
data = response.json()
|
|
933
|
+
description = data.get('description', 'No description available')
|
|
934
|
+
|
|
935
|
+
print(f"Dataset description retrieved: {description[:100]}...")
|
|
936
|
+
|
|
937
|
+
return jsonify({
|
|
938
|
+
'success': True,
|
|
939
|
+
'description': description,
|
|
940
|
+
'dataset_id': dataset_id
|
|
941
|
+
})
|
|
942
|
+
|
|
943
|
+
except Exception as e:
|
|
944
|
+
print(f"Dataset description error: {str(e)}")
|
|
945
|
+
return jsonify({'error': f'Failed to get dataset description: {str(e)}'}), 500
|
|
946
|
+
|
|
947
|
+
@app.route('/api/status', methods=['GET'])
|
|
948
|
+
def check_status():
|
|
949
|
+
"""Check if session is still valid"""
|
|
950
|
+
try:
|
|
951
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
952
|
+
if not session_id or session_id not in brain_sessions:
|
|
953
|
+
return jsonify({'valid': False})
|
|
954
|
+
|
|
955
|
+
session_info = brain_sessions[session_id]
|
|
956
|
+
# Check if session is not too old (24 hours)
|
|
957
|
+
if time.time() - session_info['timestamp'] > 86400:
|
|
958
|
+
del brain_sessions[session_id]
|
|
959
|
+
return jsonify({'valid': False})
|
|
960
|
+
|
|
961
|
+
# Check if biometric authentication is pending
|
|
962
|
+
if session_info.get('biometric_pending'):
|
|
963
|
+
return jsonify({
|
|
964
|
+
'valid': False,
|
|
965
|
+
'biometric_pending': True,
|
|
966
|
+
'username': session_info['username'],
|
|
967
|
+
'message': 'Biometric authentication pending'
|
|
968
|
+
})
|
|
969
|
+
|
|
970
|
+
return jsonify({
|
|
971
|
+
'valid': True,
|
|
972
|
+
'username': session_info['username']
|
|
973
|
+
})
|
|
974
|
+
|
|
975
|
+
except Exception as e:
|
|
976
|
+
return jsonify({'error': f'Status check failed: {str(e)}'}), 500
|
|
977
|
+
|
|
978
|
+
@app.route('/api/logout', methods=['POST'])
|
|
979
|
+
def logout():
|
|
980
|
+
"""Logout and clean up session"""
|
|
981
|
+
try:
|
|
982
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
983
|
+
if session_id and session_id in brain_sessions:
|
|
984
|
+
del brain_sessions[session_id]
|
|
985
|
+
|
|
986
|
+
if 'brain_session_id' in flask_session:
|
|
987
|
+
flask_session.pop('brain_session_id')
|
|
988
|
+
|
|
989
|
+
return jsonify({'success': True, 'message': 'Logged out successfully'})
|
|
990
|
+
|
|
991
|
+
except Exception as e:
|
|
992
|
+
return jsonify({'error': f'Logout failed: {str(e)}'}), 500
|
|
993
|
+
|
|
994
|
+
@app.route('/api/test-expression', methods=['POST'])
|
|
995
|
+
def test_expression():
|
|
996
|
+
"""Test an expression using BRAIN API simulation"""
|
|
997
|
+
try:
|
|
998
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
999
|
+
if not session_id or session_id not in brain_sessions:
|
|
1000
|
+
return jsonify({'error': 'Invalid or expired session'}), 401
|
|
1001
|
+
|
|
1002
|
+
session_info = brain_sessions[session_id]
|
|
1003
|
+
brain_session = session_info['session']
|
|
1004
|
+
|
|
1005
|
+
# Get the simulation data from request
|
|
1006
|
+
simulation_data = request.get_json()
|
|
1007
|
+
|
|
1008
|
+
# Ensure required fields are present
|
|
1009
|
+
if 'type' not in simulation_data:
|
|
1010
|
+
simulation_data['type'] = 'REGULAR'
|
|
1011
|
+
|
|
1012
|
+
# Ensure settings have required fields
|
|
1013
|
+
if 'settings' not in simulation_data:
|
|
1014
|
+
simulation_data['settings'] = {}
|
|
1015
|
+
|
|
1016
|
+
# Set default values for missing settings
|
|
1017
|
+
default_settings = {
|
|
1018
|
+
'instrumentType': 'EQUITY',
|
|
1019
|
+
'region': 'USA',
|
|
1020
|
+
'universe': 'TOP3000',
|
|
1021
|
+
'delay': 1,
|
|
1022
|
+
'decay': 15,
|
|
1023
|
+
'neutralization': 'SUBINDUSTRY',
|
|
1024
|
+
'truncation': 0.08,
|
|
1025
|
+
'pasteurization': 'ON',
|
|
1026
|
+
'testPeriod': 'P1Y6M',
|
|
1027
|
+
'unitHandling': 'VERIFY',
|
|
1028
|
+
'nanHandling': 'OFF',
|
|
1029
|
+
'language': 'FASTEXPR',
|
|
1030
|
+
'visualization': False
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
for key, value in default_settings.items():
|
|
1034
|
+
if key not in simulation_data['settings']:
|
|
1035
|
+
simulation_data['settings'][key] = value
|
|
1036
|
+
|
|
1037
|
+
# Convert string boolean values to actual boolean
|
|
1038
|
+
if isinstance(simulation_data['settings'].get('visualization'), str):
|
|
1039
|
+
viz_value = simulation_data['settings']['visualization'].lower()
|
|
1040
|
+
simulation_data['settings']['visualization'] = viz_value == 'true'
|
|
1041
|
+
|
|
1042
|
+
# Send simulation request (following notebook pattern)
|
|
1043
|
+
try:
|
|
1044
|
+
message = {}
|
|
1045
|
+
simulation_response = brain_session.post(f'{BRAIN_API_BASE}/simulations', json=simulation_data)
|
|
1046
|
+
|
|
1047
|
+
# Check if we got a Location header (following notebook pattern)
|
|
1048
|
+
if 'Location' in simulation_response.headers:
|
|
1049
|
+
# Follow the location to get the actual status
|
|
1050
|
+
message = brain_session.get(simulation_response.headers['Location']).json()
|
|
1051
|
+
|
|
1052
|
+
# Check if simulation is running or completed
|
|
1053
|
+
if 'progress' in message.keys():
|
|
1054
|
+
info_to_print = "Simulation is running"
|
|
1055
|
+
return jsonify({
|
|
1056
|
+
'success': True,
|
|
1057
|
+
'status': 'RUNNING',
|
|
1058
|
+
'message': info_to_print,
|
|
1059
|
+
'full_response': message
|
|
1060
|
+
})
|
|
1061
|
+
else:
|
|
1062
|
+
# Return the full message as in notebook
|
|
1063
|
+
return jsonify({
|
|
1064
|
+
'success': message.get('status') != 'ERROR',
|
|
1065
|
+
'status': message.get('status', 'UNKNOWN'),
|
|
1066
|
+
'message': str(message),
|
|
1067
|
+
'full_response': message
|
|
1068
|
+
})
|
|
1069
|
+
else:
|
|
1070
|
+
# Try to get error from response body (following notebook pattern)
|
|
1071
|
+
try:
|
|
1072
|
+
message = simulation_response.json()
|
|
1073
|
+
return jsonify({
|
|
1074
|
+
'success': False,
|
|
1075
|
+
'status': 'ERROR',
|
|
1076
|
+
'message': str(message),
|
|
1077
|
+
'full_response': message
|
|
1078
|
+
})
|
|
1079
|
+
except:
|
|
1080
|
+
return jsonify({
|
|
1081
|
+
'success': False,
|
|
1082
|
+
'status': 'ERROR',
|
|
1083
|
+
'message': 'web Connection Error',
|
|
1084
|
+
'full_response': {}
|
|
1085
|
+
})
|
|
1086
|
+
|
|
1087
|
+
except Exception as e:
|
|
1088
|
+
return jsonify({
|
|
1089
|
+
'success': False,
|
|
1090
|
+
'status': 'ERROR',
|
|
1091
|
+
'message': 'web Connection Error',
|
|
1092
|
+
'full_response': {'error': str(e)}
|
|
1093
|
+
})
|
|
1094
|
+
|
|
1095
|
+
except Exception as e:
|
|
1096
|
+
import traceback
|
|
1097
|
+
return jsonify({
|
|
1098
|
+
'success': False,
|
|
1099
|
+
'status': 'ERROR',
|
|
1100
|
+
'message': f'Test expression failed: {str(e)}',
|
|
1101
|
+
'full_response': {'error': str(e), 'traceback': traceback.format_exc()}
|
|
1102
|
+
}), 500
|
|
1103
|
+
|
|
1104
|
+
@app.route('/api/test-operators', methods=['GET'])
|
|
1105
|
+
def test_operators():
|
|
1106
|
+
"""Test endpoint to check raw BRAIN operators API response"""
|
|
1107
|
+
try:
|
|
1108
|
+
session_id = request.headers.get('Session-ID') or flask_session.get('brain_session_id')
|
|
1109
|
+
if not session_id or session_id not in brain_sessions:
|
|
1110
|
+
return jsonify({'error': 'Invalid or expired session'}), 401
|
|
1111
|
+
|
|
1112
|
+
session_info = brain_sessions[session_id]
|
|
1113
|
+
brain_session = session_info['session']
|
|
1114
|
+
|
|
1115
|
+
# Get raw response from BRAIN API
|
|
1116
|
+
response = brain_session.get(f'{BRAIN_API_BASE}/operators')
|
|
1117
|
+
response.raise_for_status()
|
|
1118
|
+
|
|
1119
|
+
data = response.json()
|
|
1120
|
+
|
|
1121
|
+
# Return raw response info for debugging
|
|
1122
|
+
result = {
|
|
1123
|
+
'type': str(type(data)),
|
|
1124
|
+
'is_list': isinstance(data, list),
|
|
1125
|
+
'is_dict': isinstance(data, dict),
|
|
1126
|
+
'length': len(data) if isinstance(data, list) else None,
|
|
1127
|
+
'keys': list(data.keys()) if isinstance(data, dict) else None,
|
|
1128
|
+
'count_key': data.get('count') if isinstance(data, dict) else None,
|
|
1129
|
+
'first_few_items': data[:3] if isinstance(data, list) else (data.get('results', [])[:3] if isinstance(data, dict) else None)
|
|
1130
|
+
}
|
|
1131
|
+
|
|
1132
|
+
return jsonify(result)
|
|
1133
|
+
|
|
1134
|
+
except Exception as e:
|
|
1135
|
+
return jsonify({'error': f'Test failed: {str(e)}'}), 500
|
|
1136
|
+
|
|
1137
|
+
# Import blueprints
|
|
1138
|
+
try:
|
|
1139
|
+
from blueprints import idea_house_bp, paper_analysis_bp, feature_engineering_bp, inspiration_house_bp
|
|
1140
|
+
print("📦 Blueprints imported successfully!")
|
|
1141
|
+
except ImportError as e:
|
|
1142
|
+
print(f"❌ Failed to import blueprints: {e}")
|
|
1143
|
+
print("Some features may not be available.")
|
|
1144
|
+
|
|
1145
|
+
# Register blueprints
|
|
1146
|
+
app.register_blueprint(idea_house_bp, url_prefix='/idea-house')
|
|
1147
|
+
app.register_blueprint(paper_analysis_bp, url_prefix='/paper-analysis')
|
|
1148
|
+
app.register_blueprint(feature_engineering_bp, url_prefix='/feature-engineering')
|
|
1149
|
+
app.register_blueprint(inspiration_house_bp, url_prefix='/inspiration-house')
|
|
1150
|
+
|
|
1151
|
+
print("🔧 All blueprints registered successfully!")
|
|
1152
|
+
print(" - Idea House: /idea-house")
|
|
1153
|
+
print(" - Paper Analysis: /paper-analysis")
|
|
1154
|
+
print(" - Feature Engineering: /feature-engineering")
|
|
1155
|
+
print(" - Inspiration House: /inspiration-house")
|
|
1156
|
+
|
|
1157
|
+
# Template Management Routes
|
|
1158
|
+
# Get the directory where this script is located for templates
|
|
1159
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
1160
|
+
TEMPLATES_DIR = os.path.join(script_dir, 'custom_templates')
|
|
1161
|
+
|
|
1162
|
+
# Ensure templates directory exists
|
|
1163
|
+
if not os.path.exists(TEMPLATES_DIR):
|
|
1164
|
+
os.makedirs(TEMPLATES_DIR)
|
|
1165
|
+
print(f"📁 Created templates directory: {TEMPLATES_DIR}")
|
|
1166
|
+
else:
|
|
1167
|
+
print(f"📁 Templates directory ready: {TEMPLATES_DIR}")
|
|
1168
|
+
|
|
1169
|
+
print("✅ BRAIN Expression Template Decoder fully initialized!")
|
|
1170
|
+
print("🎯 Ready to process templates and integrate with BRAIN API!")
|
|
1171
|
+
|
|
1172
|
+
@app.route('/api/templates', methods=['GET'])
|
|
1173
|
+
def get_templates():
|
|
1174
|
+
"""Get all custom templates"""
|
|
1175
|
+
try:
|
|
1176
|
+
templates = []
|
|
1177
|
+
templates_file = os.path.join(TEMPLATES_DIR, 'templates.json')
|
|
1178
|
+
|
|
1179
|
+
if os.path.exists(templates_file):
|
|
1180
|
+
with open(templates_file, 'r', encoding='utf-8') as f:
|
|
1181
|
+
templates = json.load(f)
|
|
1182
|
+
|
|
1183
|
+
return jsonify(templates)
|
|
1184
|
+
except Exception as e:
|
|
1185
|
+
return jsonify({'error': f'Error loading templates: {str(e)}'}), 500
|
|
1186
|
+
|
|
1187
|
+
@app.route('/api/templates', methods=['POST'])
|
|
1188
|
+
def save_template():
|
|
1189
|
+
"""Save a new custom template"""
|
|
1190
|
+
try:
|
|
1191
|
+
data = request.get_json()
|
|
1192
|
+
name = data.get('name', '').strip()
|
|
1193
|
+
description = data.get('description', '').strip()
|
|
1194
|
+
expression = data.get('expression', '').strip()
|
|
1195
|
+
template_configurations = data.get('templateConfigurations', {})
|
|
1196
|
+
|
|
1197
|
+
if not name or not expression:
|
|
1198
|
+
return jsonify({'error': 'Name and expression are required'}), 400
|
|
1199
|
+
|
|
1200
|
+
# Load existing templates
|
|
1201
|
+
templates_file = os.path.join(TEMPLATES_DIR, 'templates.json')
|
|
1202
|
+
templates = []
|
|
1203
|
+
|
|
1204
|
+
if os.path.exists(templates_file):
|
|
1205
|
+
with open(templates_file, 'r', encoding='utf-8') as f:
|
|
1206
|
+
templates = json.load(f)
|
|
1207
|
+
|
|
1208
|
+
# Check for duplicate names
|
|
1209
|
+
existing_index = next((i for i, t in enumerate(templates) if t['name'] == name), None)
|
|
1210
|
+
|
|
1211
|
+
new_template = {
|
|
1212
|
+
'name': name,
|
|
1213
|
+
'description': description,
|
|
1214
|
+
'expression': expression,
|
|
1215
|
+
'templateConfigurations': template_configurations,
|
|
1216
|
+
'createdAt': datetime.now().isoformat()
|
|
1217
|
+
}
|
|
1218
|
+
|
|
1219
|
+
if existing_index is not None:
|
|
1220
|
+
# Update existing template but preserve createdAt if it exists
|
|
1221
|
+
if 'createdAt' in templates[existing_index]:
|
|
1222
|
+
new_template['createdAt'] = templates[existing_index]['createdAt']
|
|
1223
|
+
new_template['updatedAt'] = datetime.now().isoformat()
|
|
1224
|
+
templates[existing_index] = new_template
|
|
1225
|
+
message = f'Template "{name}" updated successfully'
|
|
1226
|
+
else:
|
|
1227
|
+
# Add new template
|
|
1228
|
+
templates.append(new_template)
|
|
1229
|
+
message = f'Template "{name}" saved successfully'
|
|
1230
|
+
|
|
1231
|
+
# Save to file
|
|
1232
|
+
with open(templates_file, 'w', encoding='utf-8') as f:
|
|
1233
|
+
json.dump(templates, f, indent=2, ensure_ascii=False)
|
|
1234
|
+
|
|
1235
|
+
return jsonify({'success': True, 'message': message})
|
|
1236
|
+
|
|
1237
|
+
except Exception as e:
|
|
1238
|
+
return jsonify({'error': f'Error saving template: {str(e)}'}), 500
|
|
1239
|
+
|
|
1240
|
+
@app.route('/api/templates/<int:template_id>', methods=['DELETE'])
|
|
1241
|
+
def delete_template(template_id):
|
|
1242
|
+
"""Delete a custom template"""
|
|
1243
|
+
try:
|
|
1244
|
+
templates_file = os.path.join(TEMPLATES_DIR, 'templates.json')
|
|
1245
|
+
templates = []
|
|
1246
|
+
|
|
1247
|
+
if os.path.exists(templates_file):
|
|
1248
|
+
with open(templates_file, 'r', encoding='utf-8') as f:
|
|
1249
|
+
templates = json.load(f)
|
|
1250
|
+
|
|
1251
|
+
if 0 <= template_id < len(templates):
|
|
1252
|
+
deleted_template = templates.pop(template_id)
|
|
1253
|
+
|
|
1254
|
+
# Save updated templates
|
|
1255
|
+
with open(templates_file, 'w', encoding='utf-8') as f:
|
|
1256
|
+
json.dump(templates, f, indent=2, ensure_ascii=False)
|
|
1257
|
+
|
|
1258
|
+
return jsonify({'success': True, 'message': f'Template "{deleted_template["name"]}" deleted successfully'})
|
|
1259
|
+
else:
|
|
1260
|
+
return jsonify({'error': 'Template not found'}), 404
|
|
1261
|
+
|
|
1262
|
+
except Exception as e:
|
|
1263
|
+
return jsonify({'error': f'Error deleting template: {str(e)}'}), 500
|
|
1264
|
+
|
|
1265
|
+
@app.route('/api/templates/export', methods=['GET'])
|
|
1266
|
+
def export_templates():
|
|
1267
|
+
"""Export all templates as JSON"""
|
|
1268
|
+
try:
|
|
1269
|
+
templates_file = os.path.join(TEMPLATES_DIR, 'templates.json')
|
|
1270
|
+
templates = []
|
|
1271
|
+
|
|
1272
|
+
if os.path.exists(templates_file):
|
|
1273
|
+
with open(templates_file, 'r', encoding='utf-8') as f:
|
|
1274
|
+
templates = json.load(f)
|
|
1275
|
+
|
|
1276
|
+
return jsonify(templates)
|
|
1277
|
+
|
|
1278
|
+
except Exception as e:
|
|
1279
|
+
return jsonify({'error': f'Error exporting templates: {str(e)}'}), 500
|
|
1280
|
+
|
|
1281
|
+
@app.route('/api/templates/import', methods=['POST'])
|
|
1282
|
+
def import_templates():
|
|
1283
|
+
"""Import templates from JSON"""
|
|
1284
|
+
try:
|
|
1285
|
+
data = request.get_json()
|
|
1286
|
+
imported_templates = data.get('templates', [])
|
|
1287
|
+
overwrite = data.get('overwrite', False)
|
|
1288
|
+
|
|
1289
|
+
if not isinstance(imported_templates, list):
|
|
1290
|
+
return jsonify({'error': 'Invalid template format'}), 400
|
|
1291
|
+
|
|
1292
|
+
# Validate template structure
|
|
1293
|
+
valid_templates = []
|
|
1294
|
+
for template in imported_templates:
|
|
1295
|
+
if (isinstance(template, dict) and
|
|
1296
|
+
'name' in template and 'expression' in template and
|
|
1297
|
+
template['name'].strip() and template['expression'].strip()):
|
|
1298
|
+
valid_templates.append({
|
|
1299
|
+
'name': template['name'].strip(),
|
|
1300
|
+
'description': template.get('description', '').strip(),
|
|
1301
|
+
'expression': template['expression'].strip(),
|
|
1302
|
+
'templateConfigurations': template.get('templateConfigurations', {}),
|
|
1303
|
+
'createdAt': template.get('createdAt', datetime.now().isoformat())
|
|
1304
|
+
})
|
|
1305
|
+
|
|
1306
|
+
if not valid_templates:
|
|
1307
|
+
return jsonify({'error': 'No valid templates found'}), 400
|
|
1308
|
+
|
|
1309
|
+
# Load existing templates
|
|
1310
|
+
templates_file = os.path.join(TEMPLATES_DIR, 'templates.json')
|
|
1311
|
+
existing_templates = []
|
|
1312
|
+
|
|
1313
|
+
if os.path.exists(templates_file):
|
|
1314
|
+
with open(templates_file, 'r', encoding='utf-8') as f:
|
|
1315
|
+
existing_templates = json.load(f)
|
|
1316
|
+
|
|
1317
|
+
# Handle duplicates
|
|
1318
|
+
duplicates = []
|
|
1319
|
+
new_templates = []
|
|
1320
|
+
|
|
1321
|
+
for template in valid_templates:
|
|
1322
|
+
existing_index = next((i for i, t in enumerate(existing_templates) if t['name'] == template['name']), None)
|
|
1323
|
+
|
|
1324
|
+
if existing_index is not None:
|
|
1325
|
+
duplicates.append(template['name'])
|
|
1326
|
+
if overwrite:
|
|
1327
|
+
existing_templates[existing_index] = template
|
|
1328
|
+
else:
|
|
1329
|
+
new_templates.append(template)
|
|
1330
|
+
|
|
1331
|
+
# Add new templates
|
|
1332
|
+
existing_templates.extend(new_templates)
|
|
1333
|
+
|
|
1334
|
+
# Save to file
|
|
1335
|
+
with open(templates_file, 'w', encoding='utf-8') as f:
|
|
1336
|
+
json.dump(existing_templates, f, indent=2, ensure_ascii=False)
|
|
1337
|
+
|
|
1338
|
+
result = {
|
|
1339
|
+
'success': True,
|
|
1340
|
+
'imported': len(new_templates),
|
|
1341
|
+
'duplicates': duplicates,
|
|
1342
|
+
'overwritten': len(duplicates) if overwrite else 0
|
|
1343
|
+
}
|
|
1344
|
+
|
|
1345
|
+
return jsonify(result)
|
|
1346
|
+
|
|
1347
|
+
except Exception as e:
|
|
1348
|
+
return jsonify({'error': f'Error importing templates: {str(e)}'}), 500
|
|
1349
|
+
|
|
1350
|
+
@app.route('/api/run-simulator', methods=['POST'])
|
|
1351
|
+
def run_simulator():
|
|
1352
|
+
"""Run the simulator_wqb.py script"""
|
|
1353
|
+
try:
|
|
1354
|
+
import subprocess
|
|
1355
|
+
import threading
|
|
1356
|
+
from pathlib import Path
|
|
1357
|
+
|
|
1358
|
+
# Get the script path (now in simulator subfolder)
|
|
1359
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
1360
|
+
simulator_dir = os.path.join(script_dir, 'simulator')
|
|
1361
|
+
simulator_path = os.path.join(simulator_dir, 'simulator_wqb.py')
|
|
1362
|
+
|
|
1363
|
+
# Check if the script exists
|
|
1364
|
+
if not os.path.exists(simulator_path):
|
|
1365
|
+
return jsonify({'error': 'simulator_wqb.py not found in simulator folder'}), 404
|
|
1366
|
+
|
|
1367
|
+
# Run the script in a new terminal window
|
|
1368
|
+
def run_script():
|
|
1369
|
+
try:
|
|
1370
|
+
# For Windows
|
|
1371
|
+
if os.name == 'nt':
|
|
1372
|
+
# Use subprocess with proper working directory (simulator folder)
|
|
1373
|
+
subprocess.Popen(['cmd', '/k', 'python', 'simulator_wqb.py'],
|
|
1374
|
+
cwd=simulator_dir,
|
|
1375
|
+
creationflags=subprocess.CREATE_NEW_CONSOLE)
|
|
1376
|
+
else:
|
|
1377
|
+
# For Unix-like systems
|
|
1378
|
+
subprocess.Popen(['gnome-terminal', '--working-directory', simulator_dir, '--', 'python3', 'simulator_wqb.py'])
|
|
1379
|
+
except Exception as e:
|
|
1380
|
+
print(f"Error running simulator: {e}")
|
|
1381
|
+
|
|
1382
|
+
# Start the script in a separate thread
|
|
1383
|
+
thread = threading.Thread(target=run_script)
|
|
1384
|
+
thread.daemon = True
|
|
1385
|
+
thread.start()
|
|
1386
|
+
|
|
1387
|
+
return jsonify({
|
|
1388
|
+
'success': True,
|
|
1389
|
+
'message': 'Simulator script started in new terminal window'
|
|
1390
|
+
})
|
|
1391
|
+
|
|
1392
|
+
except Exception as e:
|
|
1393
|
+
return jsonify({'error': f'Failed to run simulator: {str(e)}'}), 500
|
|
1394
|
+
|
|
1395
|
+
@app.route('/api/open-submitter', methods=['POST'])
|
|
1396
|
+
def open_submitter():
|
|
1397
|
+
"""Run the alpha_submitter.py script"""
|
|
1398
|
+
try:
|
|
1399
|
+
import subprocess
|
|
1400
|
+
import threading
|
|
1401
|
+
from pathlib import Path
|
|
1402
|
+
|
|
1403
|
+
# Get the script path (now in simulator subfolder)
|
|
1404
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
1405
|
+
simulator_dir = os.path.join(script_dir, 'simulator')
|
|
1406
|
+
submitter_path = os.path.join(simulator_dir, 'alpha_submitter.py')
|
|
1407
|
+
|
|
1408
|
+
# Check if the script exists
|
|
1409
|
+
if not os.path.exists(submitter_path):
|
|
1410
|
+
return jsonify({'error': 'alpha_submitter.py not found in simulator folder'}), 404
|
|
1411
|
+
|
|
1412
|
+
# Run the script in a new terminal window
|
|
1413
|
+
def run_script():
|
|
1414
|
+
try:
|
|
1415
|
+
# For Windows
|
|
1416
|
+
if os.name == 'nt':
|
|
1417
|
+
# Use subprocess with proper working directory (simulator folder)
|
|
1418
|
+
subprocess.Popen(['cmd', '/k', 'python', 'alpha_submitter.py'],
|
|
1419
|
+
cwd=simulator_dir,
|
|
1420
|
+
creationflags=subprocess.CREATE_NEW_CONSOLE)
|
|
1421
|
+
else:
|
|
1422
|
+
# For Unix-like systems
|
|
1423
|
+
subprocess.Popen(['gnome-terminal', '--working-directory', simulator_dir, '--', 'python3', 'alpha_submitter.py'])
|
|
1424
|
+
except Exception as e:
|
|
1425
|
+
print(f"Error running submitter: {e}")
|
|
1426
|
+
|
|
1427
|
+
# Start the script in a separate thread
|
|
1428
|
+
thread = threading.Thread(target=run_script)
|
|
1429
|
+
thread.daemon = True
|
|
1430
|
+
thread.start()
|
|
1431
|
+
|
|
1432
|
+
return jsonify({
|
|
1433
|
+
'success': True,
|
|
1434
|
+
'message': 'Alpha submitter script started in new terminal window'
|
|
1435
|
+
})
|
|
1436
|
+
|
|
1437
|
+
except Exception as e:
|
|
1438
|
+
return jsonify({'error': f'Failed to open submitter: {str(e)}'}), 500
|
|
1439
|
+
|
|
1440
|
+
@app.route('/api/open-hk-simulator', methods=['POST'])
|
|
1441
|
+
def open_hk_simulator():
|
|
1442
|
+
"""Run the autosimulator.py script from hkSimulator folder"""
|
|
1443
|
+
try:
|
|
1444
|
+
import subprocess
|
|
1445
|
+
import threading
|
|
1446
|
+
from pathlib import Path
|
|
1447
|
+
|
|
1448
|
+
# Get the script path (hkSimulator subfolder)
|
|
1449
|
+
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
1450
|
+
hk_simulator_dir = os.path.join(script_dir, 'hkSimulator')
|
|
1451
|
+
autosimulator_path = os.path.join(hk_simulator_dir, 'autosimulator.py')
|
|
1452
|
+
|
|
1453
|
+
# Check if the script exists
|
|
1454
|
+
if not os.path.exists(autosimulator_path):
|
|
1455
|
+
return jsonify({'error': 'autosimulator.py not found in hkSimulator folder'}), 404
|
|
1456
|
+
|
|
1457
|
+
# Run the script in a new terminal window
|
|
1458
|
+
def run_script():
|
|
1459
|
+
try:
|
|
1460
|
+
# For Windows
|
|
1461
|
+
if os.name == 'nt':
|
|
1462
|
+
# Use subprocess with proper working directory (hkSimulator folder)
|
|
1463
|
+
subprocess.Popen(['cmd', '/k', 'python', 'autosimulator.py'],
|
|
1464
|
+
cwd=hk_simulator_dir,
|
|
1465
|
+
creationflags=subprocess.CREATE_NEW_CONSOLE)
|
|
1466
|
+
else:
|
|
1467
|
+
# For Unix-like systems
|
|
1468
|
+
subprocess.Popen(['gnome-terminal', '--working-directory', hk_simulator_dir, '--', 'python3', 'autosimulator.py'])
|
|
1469
|
+
except Exception as e:
|
|
1470
|
+
print(f"Error running HK simulator: {e}")
|
|
1471
|
+
|
|
1472
|
+
# Start the script in a separate thread
|
|
1473
|
+
thread = threading.Thread(target=run_script)
|
|
1474
|
+
thread.daemon = True
|
|
1475
|
+
thread.start()
|
|
1476
|
+
|
|
1477
|
+
return jsonify({
|
|
1478
|
+
'success': True,
|
|
1479
|
+
'message': 'HK simulator script started in new terminal window'
|
|
1480
|
+
})
|
|
1481
|
+
|
|
1482
|
+
except Exception as e:
|
|
1483
|
+
return jsonify({'error': f'Failed to open HK simulator: {str(e)}'}), 500
|
|
1484
|
+
|
|
1485
|
+
if __name__ == '__main__':
|
|
1486
|
+
print("Starting BRAIN Expression Template Decoder Web Application...")
|
|
1487
|
+
print("Application will run on http://localhost:5000")
|
|
1488
|
+
print("BRAIN API integration included - no separate proxy needed!")
|
|
1489
|
+
app.run(debug=True, host='0.0.0.0', port=5000)
|