physicsworks 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- physicsworks-1.0.0.dist-info/METADATA +28 -0
- physicsworks-1.0.0.dist-info/RECORD +28 -0
- physicsworks-1.0.0.dist-info/WHEEL +5 -0
- physicsworks-1.0.0.dist-info/top_level.txt +1 -0
- physicsworks_python/__init__.py +1 -0
- physicsworks_python/events/Geometry.py +15 -0
- physicsworks_python/events/Mesh.py +6 -0
- physicsworks_python/events/__init__.py +11 -0
- physicsworks_python/greet.py +3 -0
- physicsworks_python/nats/__init__.py +0 -0
- physicsworks_python/nats/listener.py +34 -0
- physicsworks_python/nats/publisher.py +15 -0
- physicsworks_python/runner/__init__.py +44 -0
- physicsworks_python/runner/config.py +68 -0
- physicsworks_python/runner/core.py +357 -0
- physicsworks_python/runner/executor.py +606 -0
- physicsworks_python/runner/interface.py +39 -0
- physicsworks_python/runner/logger.py +37 -0
- physicsworks_python/runner/server.py +260 -0
- physicsworks_python/runner/template.py +402 -0
- physicsworks_python/runner/utils.py +234 -0
- physicsworks_python/runner/watcher.py +357 -0
- physicsworks_python/threejs.py +19 -0
- physicsworks_python/wrappers/MongoClientWrapper.py +29 -0
- physicsworks_python/wrappers/NatsClientWrapper.py +62 -0
- physicsworks_python/wrappers/SocketIOClientWrapper.py +23 -0
- physicsworks_python/wrappers/SocketIOServerWrapper.py +18 -0
- physicsworks_python/wrappers/__init__.py +0 -0
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Server communication utilities for the runner package.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import requests
|
|
6
|
+
import json
|
|
7
|
+
import os
|
|
8
|
+
import base64
|
|
9
|
+
import uuid
|
|
10
|
+
import threading
|
|
11
|
+
from typing import Dict, Optional, Any
|
|
12
|
+
|
|
13
|
+
from .config import RuntimeAttributes
|
|
14
|
+
from .logger import DebugLogger
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ServerCommunicator:
|
|
18
|
+
"""Handles all server communication"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, runtime_attrs: RuntimeAttributes, logger: DebugLogger):
|
|
21
|
+
self.runtime_attrs = runtime_attrs
|
|
22
|
+
self.logger = logger
|
|
23
|
+
self.run_node = None
|
|
24
|
+
self.logs_node = None
|
|
25
|
+
self.solver_config = None
|
|
26
|
+
self.inputs = None
|
|
27
|
+
self.scripts = None
|
|
28
|
+
self.meshes = []
|
|
29
|
+
self.geometries = {}
|
|
30
|
+
self.node_lock = threading.Lock() # Protect run_node modifications
|
|
31
|
+
|
|
32
|
+
def set_status(self, status: str, progress: int, status_label: str, extras: Optional[Dict] = None):
|
|
33
|
+
"""Update status and communicate with server"""
|
|
34
|
+
with self.node_lock:
|
|
35
|
+
if (status != self.runtime_attrs.status or
|
|
36
|
+
progress != self.runtime_attrs.progress or
|
|
37
|
+
status_label != self.runtime_attrs.status_label):
|
|
38
|
+
|
|
39
|
+
self.runtime_attrs.status = status
|
|
40
|
+
self.runtime_attrs.progress = progress
|
|
41
|
+
self.runtime_attrs.status_label = status_label
|
|
42
|
+
|
|
43
|
+
self.logger.debug(f"Status: {status} ({progress}%) - {status_label}")
|
|
44
|
+
|
|
45
|
+
if self.run_node:
|
|
46
|
+
self.run_node['status'] = status
|
|
47
|
+
self.run_node['progress'] = progress
|
|
48
|
+
self.run_node['statusLabel'] = status_label
|
|
49
|
+
|
|
50
|
+
if status in ['error', 'finished']:
|
|
51
|
+
self.run_node['actions'] = None
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
config = getattr(self, 'config', {})
|
|
55
|
+
if config and 'host' in config:
|
|
56
|
+
requests.put(
|
|
57
|
+
f"{config['host']}simulation/run/update/{config['simulation']}/{self.runtime_attrs.run_id}",
|
|
58
|
+
json={'node': self.run_node, 'extras': extras},
|
|
59
|
+
headers={'auth-token': config['token']}
|
|
60
|
+
)
|
|
61
|
+
# print(f"Run node updated: {self.run_node}")
|
|
62
|
+
except Exception as e:
|
|
63
|
+
self.logger.error(f"Failed to update server status: {e}")
|
|
64
|
+
|
|
65
|
+
def _sync_node_to_server(self):
|
|
66
|
+
"""Explicitly sync run_node to server (called after node updates)"""
|
|
67
|
+
try:
|
|
68
|
+
config = getattr(self, 'config', {})
|
|
69
|
+
if config and 'host' in config and self.run_node:
|
|
70
|
+
requests.put(
|
|
71
|
+
f"{config['host']}simulation/run/update/{config['simulation']}/{self.runtime_attrs.run_id}",
|
|
72
|
+
json={'node': self.run_node},
|
|
73
|
+
headers={'auth-token': config['token']}
|
|
74
|
+
)
|
|
75
|
+
self.logger.debug("Node tree synced to server")
|
|
76
|
+
except Exception as e:
|
|
77
|
+
self.logger.error(f"Failed to sync node to server: {e}")
|
|
78
|
+
|
|
79
|
+
def _update_logs_node(self):
|
|
80
|
+
"""Update logs node with current log files"""
|
|
81
|
+
with self.node_lock:
|
|
82
|
+
self.logs_node = next((node for node in self.run_node.get('children', []) if node.get('slug') == 'logs'), None)
|
|
83
|
+
if not self.logs_node:
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
logs_children = []
|
|
87
|
+
for log_path in self.runtime_attrs.log_paths:
|
|
88
|
+
if log_path not in self.runtime_attrs.logs:
|
|
89
|
+
filename = f"{uuid.uuid1()}.log"
|
|
90
|
+
self.runtime_attrs.logs[log_path] = {"name": filename, "position": 0}
|
|
91
|
+
else:
|
|
92
|
+
filename = self.runtime_attrs.logs[log_path]['name']
|
|
93
|
+
|
|
94
|
+
# Update file position and upload changes
|
|
95
|
+
config = getattr(self, 'config', {})
|
|
96
|
+
if config:
|
|
97
|
+
metadata = {
|
|
98
|
+
"project": config.get('project'),
|
|
99
|
+
"owner": config.get('owner'),
|
|
100
|
+
"originalname": os.path.basename(log_path),
|
|
101
|
+
"resource": config.get('job'),
|
|
102
|
+
"resourceKind": "Run",
|
|
103
|
+
"simulation": config.get('simulation'),
|
|
104
|
+
}
|
|
105
|
+
self.runtime_attrs.logs[log_path]['position'] = self._add_to_file(
|
|
106
|
+
log_path, self.runtime_attrs.logs[log_path]['position'], filename, metadata
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
if os.path.exists(log_path):
|
|
110
|
+
log_name = os.path.basename(log_path).split(".")[0]
|
|
111
|
+
logs_children.append({
|
|
112
|
+
'id': filename.split(".")[0],
|
|
113
|
+
'name': log_name,
|
|
114
|
+
'isLog': True,
|
|
115
|
+
'resource': config.get('resource', {}),
|
|
116
|
+
'simulation': config.get('simulation'),
|
|
117
|
+
'status': self.runtime_attrs.logs_status.get(log_name, ''),
|
|
118
|
+
'filename': filename
|
|
119
|
+
})
|
|
120
|
+
|
|
121
|
+
self.logs_node['children'] = logs_children
|
|
122
|
+
|
|
123
|
+
# Sync to server immediately - lock ensures we send the latest status
|
|
124
|
+
self._sync_node_to_server()
|
|
125
|
+
|
|
126
|
+
def _update_plots_node(self):
|
|
127
|
+
"""Update plots node with current plot files"""
|
|
128
|
+
with self.node_lock:
|
|
129
|
+
plots_node = next((node for node in self.run_node.get('children', []) if node.get('slug') == 'plots'), None)
|
|
130
|
+
if not plots_node:
|
|
131
|
+
return
|
|
132
|
+
|
|
133
|
+
plots_children = []
|
|
134
|
+
for plot_path in self.runtime_attrs.plots_paths:
|
|
135
|
+
if plot_path not in self.runtime_attrs.plots:
|
|
136
|
+
filename = f"{uuid.uuid1()}.{plot_path.split('.')[-1]}"
|
|
137
|
+
self.runtime_attrs.plots[plot_path] = {"name": filename, "position": 0}
|
|
138
|
+
else:
|
|
139
|
+
filename = self.runtime_attrs.plots[plot_path]['name']
|
|
140
|
+
|
|
141
|
+
plot_name = os.path.basename(plot_path).split(".")[0]
|
|
142
|
+
plots_children.append({
|
|
143
|
+
'id': str(uuid.uuid1()),
|
|
144
|
+
'name': plot_name,
|
|
145
|
+
'filename': filename,
|
|
146
|
+
'simulationId': getattr(self, 'config', {}).get('simulation'),
|
|
147
|
+
'isPlot': True,
|
|
148
|
+
})
|
|
149
|
+
|
|
150
|
+
# Update file position and upload changes
|
|
151
|
+
config = getattr(self, 'config', {})
|
|
152
|
+
if config:
|
|
153
|
+
metadata = {
|
|
154
|
+
"project": config.get('project'),
|
|
155
|
+
"owner": config.get('owner'),
|
|
156
|
+
"originalname": os.path.basename(plot_path),
|
|
157
|
+
"resource": config.get('job'),
|
|
158
|
+
"resourceKind": "Run",
|
|
159
|
+
"simulation": config.get('simulation'),
|
|
160
|
+
}
|
|
161
|
+
self.runtime_attrs.plots[plot_path]['position'] = self._add_to_file(
|
|
162
|
+
plot_path, self.runtime_attrs.plots[plot_path]['position'], filename, metadata
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
plots_node['children'] = plots_children
|
|
166
|
+
|
|
167
|
+
# Sync to server immediately - lock ensures we send the latest status
|
|
168
|
+
self._sync_node_to_server()
|
|
169
|
+
|
|
170
|
+
def _update_media_node(self):
|
|
171
|
+
"""Update media node with current media files"""
|
|
172
|
+
with self.node_lock:
|
|
173
|
+
media_node = next((node for node in self.run_node.get('children', []) if node.get('slug') == 'media'), None)
|
|
174
|
+
if not media_node:
|
|
175
|
+
return
|
|
176
|
+
|
|
177
|
+
media_children = []
|
|
178
|
+
for media_path in self.runtime_attrs.media_paths:
|
|
179
|
+
media_to_upload = self.runtime_attrs.media[media_path]
|
|
180
|
+
media_name = os.path.basename(media_path)
|
|
181
|
+
media_children.append({
|
|
182
|
+
'id': str(uuid.uuid1()),
|
|
183
|
+
'name': media_name,
|
|
184
|
+
'filename': media_to_upload['name'],
|
|
185
|
+
'simulation': getattr(self, 'config', {}).get('simulation'),
|
|
186
|
+
'isMedia': True,
|
|
187
|
+
'mediaType': media_to_upload['name'].split('.')[-1]
|
|
188
|
+
})
|
|
189
|
+
|
|
190
|
+
# Update file position and upload changes (binary mode for media)
|
|
191
|
+
config = getattr(self, 'config', {})
|
|
192
|
+
if config:
|
|
193
|
+
metadata = {
|
|
194
|
+
"project": config.get('project'),
|
|
195
|
+
"owner": config.get('owner'),
|
|
196
|
+
"originalname": os.path.basename(media_path),
|
|
197
|
+
"resource": config.get('job'),
|
|
198
|
+
"resourceKind": "Run",
|
|
199
|
+
"simulation": config.get('simulation'),
|
|
200
|
+
}
|
|
201
|
+
media_to_upload['position'] = self._add_to_file(
|
|
202
|
+
media_path, media_to_upload['position'], media_to_upload['name'], metadata, is_ascii=False
|
|
203
|
+
)
|
|
204
|
+
media_node['children'] = media_children
|
|
205
|
+
|
|
206
|
+
# Sync to server immediately - lock ensures we send the latest status
|
|
207
|
+
self._sync_node_to_server()
|
|
208
|
+
|
|
209
|
+
def _add_to_file(self, path: str, position: int, filename: str, metadata: Dict[str, Any], is_ascii: bool = True) -> int:
|
|
210
|
+
"""Add file changes to server (supports both ASCII and binary files)"""
|
|
211
|
+
try:
|
|
212
|
+
changes = self._read_file_changes(path, position, is_ascii)
|
|
213
|
+
if not changes['newContent']:
|
|
214
|
+
return position
|
|
215
|
+
|
|
216
|
+
# Handle encoding based on file type
|
|
217
|
+
if is_ascii:
|
|
218
|
+
chunk_data = base64.b64encode(changes['newContent'].encode('utf-8')).decode('utf-8')
|
|
219
|
+
else:
|
|
220
|
+
# Binary files are already bytes, encode directly
|
|
221
|
+
chunk_data = base64.b64encode(changes['newContent']).decode('utf-8')
|
|
222
|
+
|
|
223
|
+
payload = {
|
|
224
|
+
'filename': filename,
|
|
225
|
+
'chunk': chunk_data,
|
|
226
|
+
'isNew': position == 0,
|
|
227
|
+
'metadata': metadata,
|
|
228
|
+
'isAscii': is_ascii
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
self.runtime_attrs.current_uploads.append(filename)
|
|
232
|
+
config = getattr(self, 'config', {})
|
|
233
|
+
if config and 'host' in config:
|
|
234
|
+
response = requests.post(
|
|
235
|
+
f"{config['host']}storage/append",
|
|
236
|
+
data={'json': json.dumps(payload)},
|
|
237
|
+
headers={'auth-token': config['token'], 'Content-Type': 'application/x-www-form-urlencoded'}
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
if filename in self.runtime_attrs.current_uploads:
|
|
241
|
+
self.runtime_attrs.current_uploads.remove(filename)
|
|
242
|
+
return changes['lastPosition']
|
|
243
|
+
|
|
244
|
+
except Exception as e:
|
|
245
|
+
self.logger.error(f"Error adding to file {filename}: {e}")
|
|
246
|
+
if filename in self.runtime_attrs.current_uploads:
|
|
247
|
+
self.runtime_attrs.current_uploads.remove(filename)
|
|
248
|
+
return position
|
|
249
|
+
|
|
250
|
+
def _read_file_changes(self, path: str, old_position: int, is_ascii: bool = True) -> Dict[str, Any]:
|
|
251
|
+
"""Read file changes from a specific position (supports both ASCII and binary files)"""
|
|
252
|
+
try:
|
|
253
|
+
read_mode = 'r' if is_ascii else 'rb'
|
|
254
|
+
with open(path, read_mode) as file:
|
|
255
|
+
file.seek(old_position)
|
|
256
|
+
new_content = file.read()
|
|
257
|
+
new_position = file.tell()
|
|
258
|
+
return {'lastPosition': new_position, 'newContent': new_content}
|
|
259
|
+
except Exception:
|
|
260
|
+
return {'lastPosition': old_position, 'newContent': '' if is_ascii else b''}
|
|
@@ -0,0 +1,402 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Template functions for PhysicsWorks solver scripts.
|
|
3
|
+
|
|
4
|
+
This module provides reusable functions for solver templates including:
|
|
5
|
+
- Progress tracking
|
|
6
|
+
- 3-block execution architecture (pre_remote, remote, post_remote)
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
import os
|
|
11
|
+
import time
|
|
12
|
+
from typing import Any, Callable, Dict, Optional
|
|
13
|
+
|
|
14
|
+
from .interface import append_status, StepStatus
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def load_run_inputs(inputs_path: str = "") -> list[Dict[str, Any]]:
|
|
18
|
+
"""
|
|
19
|
+
Load runInputs.json file and return the raw list.
|
|
20
|
+
|
|
21
|
+
Args:
|
|
22
|
+
inputs_path: Optional path to inputs directory (overrides PHYSICSWORKS_INPUTS_DIR env var)
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
List of input dictionaries from runInputs.json
|
|
26
|
+
|
|
27
|
+
Raises:
|
|
28
|
+
FileNotFoundError: If runInputs.json is not found
|
|
29
|
+
json.JSONDecodeError: If JSON is invalid
|
|
30
|
+
"""
|
|
31
|
+
# Use inputs_path parameter if provided, otherwise fall back to environment variable
|
|
32
|
+
inputs_dir = inputs_path if inputs_path else os.getenv("PHYSICSWORKS_INPUTS_DIR")
|
|
33
|
+
|
|
34
|
+
if not inputs_dir:
|
|
35
|
+
raise ValueError("No inputs directory specified. Provide inputs_path or set PHYSICSWORKS_INPUTS_DIR environment variable")
|
|
36
|
+
|
|
37
|
+
run_inputs_file = os.path.join(inputs_dir, "runInputs.json")
|
|
38
|
+
|
|
39
|
+
with open(run_inputs_file, 'r', encoding='utf-8') as f:
|
|
40
|
+
return json.load(f)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def reconstruct_tree_from_run_inputs(inputs_path: str = "") -> Dict[str, Any]:
|
|
44
|
+
"""
|
|
45
|
+
Load and reconstruct nested dictionary tree from runInputs.json file.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
inputs_path: Optional path to inputs directory (overrides PHYSICSWORKS_INPUTS_DIR env var)
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Reconstructed tree dictionary where leaves are the values from run_inputs
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
FileNotFoundError: If runInputs.json is not found
|
|
55
|
+
json.JSONDecodeError: If JSON is invalid
|
|
56
|
+
"""
|
|
57
|
+
run_inputs = load_run_inputs(inputs_path)
|
|
58
|
+
|
|
59
|
+
tree = {}
|
|
60
|
+
|
|
61
|
+
for item in run_inputs:
|
|
62
|
+
full_path = item.get('fullPath', '')
|
|
63
|
+
value = item.get('value')
|
|
64
|
+
|
|
65
|
+
# Split the path by underscore, filtering out empty strings
|
|
66
|
+
# fullPath format: "_slug1_slug2_slug3_..."
|
|
67
|
+
path_parts = [part for part in full_path.split('_') if part]
|
|
68
|
+
|
|
69
|
+
if not path_parts:
|
|
70
|
+
continue
|
|
71
|
+
|
|
72
|
+
# Navigate/create the nested structure
|
|
73
|
+
current_level = tree
|
|
74
|
+
|
|
75
|
+
# Traverse through all parts except the last one
|
|
76
|
+
for part in path_parts[:-1]:
|
|
77
|
+
if part not in current_level:
|
|
78
|
+
current_level[part] = {}
|
|
79
|
+
current_level = current_level[part]
|
|
80
|
+
|
|
81
|
+
# Set the value at the final key
|
|
82
|
+
final_key = path_parts[-1]
|
|
83
|
+
current_level[final_key] = value
|
|
84
|
+
|
|
85
|
+
return tree
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def get_input_value_by_slug(slug: str, inputs_path: str = "") -> Any:
|
|
89
|
+
"""
|
|
90
|
+
Get the value of the first item matching the given slug.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
slug: The slug to search for
|
|
94
|
+
inputs_path: Optional path to inputs directory (overrides PHYSICSWORKS_INPUTS_DIR env var)
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
The value of the first matching item, or None if not found
|
|
98
|
+
"""
|
|
99
|
+
run_inputs = load_run_inputs(inputs_path)
|
|
100
|
+
|
|
101
|
+
for item in run_inputs:
|
|
102
|
+
if item.get('slug') == slug:
|
|
103
|
+
return item.get('value')
|
|
104
|
+
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def get_input_values_by_slug(slug: str, inputs_path: str = "") -> list[Dict[str, Any]]:
|
|
109
|
+
"""
|
|
110
|
+
Get all items matching the given slug with their values and full paths.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
slug: The slug to search for
|
|
114
|
+
inputs_path: Optional path to inputs directory (overrides PHYSICSWORKS_INPUTS_DIR env var)
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
List of dictionaries with 'value' and 'fullPath' keys for all matching items
|
|
118
|
+
"""
|
|
119
|
+
run_inputs = load_run_inputs(inputs_path)
|
|
120
|
+
|
|
121
|
+
results = []
|
|
122
|
+
for item in run_inputs:
|
|
123
|
+
if item.get('slug') == slug:
|
|
124
|
+
results.append({
|
|
125
|
+
'value': item.get('value'),
|
|
126
|
+
'fullPath': item.get('fullPath')
|
|
127
|
+
})
|
|
128
|
+
|
|
129
|
+
return results
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def get_input_value_by_slugs(slugs: list[str], inputs_path: str = "") -> Dict[str, Any]:
|
|
133
|
+
"""
|
|
134
|
+
Get values for multiple slugs as a dictionary.
|
|
135
|
+
|
|
136
|
+
Args:
|
|
137
|
+
slugs: List of slugs to search for
|
|
138
|
+
inputs_path: Optional path to inputs directory (overrides PHYSICSWORKS_INPUTS_DIR env var)
|
|
139
|
+
|
|
140
|
+
Returns:
|
|
141
|
+
Dictionary mapping each slug to its first matching value
|
|
142
|
+
"""
|
|
143
|
+
run_inputs = load_run_inputs(inputs_path)
|
|
144
|
+
|
|
145
|
+
result = {}
|
|
146
|
+
slug_set = set(slugs)
|
|
147
|
+
|
|
148
|
+
for item in run_inputs:
|
|
149
|
+
item_slug = item.get('slug')
|
|
150
|
+
if item_slug in slug_set and item_slug not in result:
|
|
151
|
+
result[item_slug] = item.get('value')
|
|
152
|
+
|
|
153
|
+
return result
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def get_input_values_by_slugs(slugs: list[str], inputs_path: str = "") -> Dict[str, Dict[str, Any]]:
|
|
157
|
+
"""
|
|
158
|
+
Get objects (value and fullPath) for multiple slugs as a dictionary.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
slugs: List of slugs to search for
|
|
162
|
+
inputs_path: Optional path to inputs directory (overrides PHYSICSWORKS_INPUTS_DIR env var)
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
Dictionary mapping each slug to an object with 'value' and 'fullPath' keys
|
|
166
|
+
"""
|
|
167
|
+
run_inputs = load_run_inputs(inputs_path)
|
|
168
|
+
|
|
169
|
+
result = {}
|
|
170
|
+
slug_set = set(slugs)
|
|
171
|
+
|
|
172
|
+
for item in run_inputs:
|
|
173
|
+
item_slug = item.get('slug')
|
|
174
|
+
if item_slug in slug_set and item_slug not in result:
|
|
175
|
+
result[item_slug] = {
|
|
176
|
+
'value': item.get('value'),
|
|
177
|
+
'fullPath': item.get('fullPath')
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
return result
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def update_solver_progress(progress: int, message: str = "Running solver", step: str = "solver", status: StepStatus = StepStatus.RUNNING, outputs_path: str = ""):
|
|
184
|
+
"""
|
|
185
|
+
Update solver progress (0-100%) to status file.
|
|
186
|
+
|
|
187
|
+
Args:
|
|
188
|
+
progress: Progress percentage (0-100)
|
|
189
|
+
message: Status message describing current operation
|
|
190
|
+
step: Step name
|
|
191
|
+
status: Step status (StepStatus enum)
|
|
192
|
+
outputs_path: Optional path to outputs directory (overrides PHYSICSWORKS_OUTPUTS_DIR env var)
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
# Use outputs_path parameter if provided, otherwise fall back to environment variable
|
|
196
|
+
outputs_dir = outputs_path if outputs_path else os.getenv("PHYSICSWORKS_OUTPUTS_DIR")
|
|
197
|
+
|
|
198
|
+
if not outputs_dir:
|
|
199
|
+
return
|
|
200
|
+
|
|
201
|
+
# Scale solver progress from 0-100% to 10-90% overall progress
|
|
202
|
+
overall_progress = int(10 + (progress * 0.8))
|
|
203
|
+
|
|
204
|
+
append_status(outputs_dir, overall_progress, message, step, status)
|
|
205
|
+
print(f"Progress: {progress}% - {message}")
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def add_to_simulation_results(name: str, value: Any, outputs_path: str = ""):
|
|
209
|
+
"""
|
|
210
|
+
Add a name-value pair to results.csv in the outputs directory.
|
|
211
|
+
|
|
212
|
+
Creates the file with headers if it doesn't exist, otherwise appends to it.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
name: The name/label for the result
|
|
216
|
+
value: The value to record
|
|
217
|
+
outputs_path: Optional path to outputs directory (overrides PHYSICSWORKS_OUTPUTS_DIR env var)
|
|
218
|
+
|
|
219
|
+
Raises:
|
|
220
|
+
ValueError: If no outputs directory is specified
|
|
221
|
+
"""
|
|
222
|
+
# Use outputs_path parameter if provided, otherwise fall back to environment variable
|
|
223
|
+
outputs_dir = outputs_path if outputs_path else os.getenv("PHYSICSWORKS_OUTPUTS_DIR")
|
|
224
|
+
|
|
225
|
+
if not outputs_dir:
|
|
226
|
+
raise ValueError("No outputs directory specified. Provide outputs_path or set PHYSICSWORKS_OUTPUTS_DIR environment variable")
|
|
227
|
+
|
|
228
|
+
results_file = os.path.join(outputs_dir, "results.csv")
|
|
229
|
+
|
|
230
|
+
# Check if file exists to determine if we need to write headers
|
|
231
|
+
file_exists = os.path.exists(results_file)
|
|
232
|
+
|
|
233
|
+
# Open in append mode
|
|
234
|
+
with open(results_file, 'a', encoding='utf-8') as f:
|
|
235
|
+
# Write header if file is new
|
|
236
|
+
if not file_exists:
|
|
237
|
+
f.write("name,value\n")
|
|
238
|
+
|
|
239
|
+
# Write the data row
|
|
240
|
+
f.write(f"{name},{value}\n")
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
def replace_placeholders_in_file(file_path: str, replacements: Dict[str, Any]) -> None:
|
|
244
|
+
"""
|
|
245
|
+
Replace placeholders in a file with actual values.
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
file_path: Path to the file to modify
|
|
249
|
+
replacements: Dictionary mapping placeholder strings to their replacement values
|
|
250
|
+
|
|
251
|
+
Example:
|
|
252
|
+
replace_placeholders_in_file(
|
|
253
|
+
"script.py",
|
|
254
|
+
{"{{ITERATIONS}}": 100, "{{STEP_SIZE}}": 0.01}
|
|
255
|
+
)
|
|
256
|
+
"""
|
|
257
|
+
# Read the file content
|
|
258
|
+
with open(file_path, 'r', encoding='utf-8') as f:
|
|
259
|
+
content = f.read()
|
|
260
|
+
|
|
261
|
+
# Replace all placeholders
|
|
262
|
+
for placeholder, value in replacements.items():
|
|
263
|
+
content = content.replace(placeholder, str(value))
|
|
264
|
+
|
|
265
|
+
# Write back to file
|
|
266
|
+
with open(file_path, 'w', encoding='utf-8') as f:
|
|
267
|
+
f.write(content)
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
def pre_remote(callback: Optional[Callable[[], None]] = None):
|
|
271
|
+
"""
|
|
272
|
+
Pre-Remote Block: Setup and preparation tasks.
|
|
273
|
+
|
|
274
|
+
This block runs before the main computation and handles:
|
|
275
|
+
- Input file validation
|
|
276
|
+
- Parameter preprocessing
|
|
277
|
+
- Environment setup
|
|
278
|
+
- Data preparation
|
|
279
|
+
|
|
280
|
+
Args:
|
|
281
|
+
callback: Optional user-defined function to execute custom pre-remote code
|
|
282
|
+
"""
|
|
283
|
+
print("=== Pre-Remote Phase ===")
|
|
284
|
+
update_solver_progress(0, "Starting pre-remote phase")
|
|
285
|
+
|
|
286
|
+
if callback:
|
|
287
|
+
print("Executing user-defined pre-remote code...")
|
|
288
|
+
callback()
|
|
289
|
+
else:
|
|
290
|
+
print("No custom pre-remote code provided")
|
|
291
|
+
time.sleep(0.5) # Simulate work
|
|
292
|
+
|
|
293
|
+
update_solver_progress(10, "Pre-remote phase completed")
|
|
294
|
+
print("Pre-remote phase completed")
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def remote(callback: Optional[Callable[[], None]] = None):
|
|
298
|
+
"""
|
|
299
|
+
Remote Block: Main computation/solver execution.
|
|
300
|
+
|
|
301
|
+
This is the core processing block that handles:
|
|
302
|
+
- Main solver execution
|
|
303
|
+
- Numerical computations
|
|
304
|
+
- Remote environment execution (Docker, Slurm, etc.)
|
|
305
|
+
- Heavy computational tasks
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
callback: Optional user-defined function to execute custom remote/solver code
|
|
309
|
+
"""
|
|
310
|
+
print("=== Remote Phase ===")
|
|
311
|
+
update_solver_progress(15, "Starting remote phase")
|
|
312
|
+
|
|
313
|
+
if callback:
|
|
314
|
+
print("Executing user-defined remote code...")
|
|
315
|
+
callback()
|
|
316
|
+
else:
|
|
317
|
+
print("No custom remote code provided, running simulation...")
|
|
318
|
+
# Default simulation with progress updates
|
|
319
|
+
phases = [
|
|
320
|
+
("Initializing computation", 20),
|
|
321
|
+
("Setting up solver", 30),
|
|
322
|
+
("Running main solver", 50),
|
|
323
|
+
("Computing results", 70),
|
|
324
|
+
("Finalizing computation", 85)
|
|
325
|
+
]
|
|
326
|
+
|
|
327
|
+
for phase_name, progress in phases:
|
|
328
|
+
update_solver_progress(progress, phase_name)
|
|
329
|
+
time.sleep(0.5) # Simulate work
|
|
330
|
+
|
|
331
|
+
update_solver_progress(90, "Remote phase completed")
|
|
332
|
+
print("Remote phase completed")
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
def post_remote(callback: Optional[Callable[[], None]] = None):
|
|
336
|
+
"""
|
|
337
|
+
Post-Remote Block: Post-processing and cleanup.
|
|
338
|
+
|
|
339
|
+
This block runs after main computation and handles:
|
|
340
|
+
- Result post-processing
|
|
341
|
+
- Output file generation
|
|
342
|
+
- Data cleanup
|
|
343
|
+
- Report generation
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
callback: Optional user-defined function to execute custom post-remote code
|
|
347
|
+
"""
|
|
348
|
+
print("=== Post-Remote Phase ===")
|
|
349
|
+
|
|
350
|
+
if callback:
|
|
351
|
+
print("Executing user-defined post-remote code...")
|
|
352
|
+
callback()
|
|
353
|
+
else:
|
|
354
|
+
print("No custom post-remote code provided")
|
|
355
|
+
time.sleep(0.5) # Simulate work
|
|
356
|
+
|
|
357
|
+
print("Post-remote phase completed")
|
|
358
|
+
|
|
359
|
+
def run_solver(
|
|
360
|
+
pre_remote_fn: Optional[Callable[[], None]] = None,
|
|
361
|
+
remote_fn: Optional[Callable[[], None]] = None,
|
|
362
|
+
post_remote_fn: Optional[Callable[[], None]] = None
|
|
363
|
+
) -> int:
|
|
364
|
+
"""
|
|
365
|
+
Execute the 3-block solver architecture.
|
|
366
|
+
|
|
367
|
+
This is the main entry point that runs all three blocks sequentially:
|
|
368
|
+
1. Pre-Remote (setup and preparation)
|
|
369
|
+
2. Remote (main computation)
|
|
370
|
+
3. Post-Remote (post-processing and cleanup)
|
|
371
|
+
|
|
372
|
+
Args:
|
|
373
|
+
pre_remote_fn: Optional callback for pre-remote phase
|
|
374
|
+
remote_fn: Optional callback for remote phase
|
|
375
|
+
post_remote_fn: Optional callback for post-remote phase
|
|
376
|
+
|
|
377
|
+
Returns:
|
|
378
|
+
0 on success, 1 on failure
|
|
379
|
+
"""
|
|
380
|
+
print("=== PhysicsWorks Solver Script ===")
|
|
381
|
+
|
|
382
|
+
try:
|
|
383
|
+
# Block 1: Pre-Remote
|
|
384
|
+
pre_remote(pre_remote_fn)
|
|
385
|
+
|
|
386
|
+
# Block 2: Remote (Main computation)
|
|
387
|
+
remote(remote_fn)
|
|
388
|
+
|
|
389
|
+
# Block 3: Post-Remote
|
|
390
|
+
post_remote(post_remote_fn)
|
|
391
|
+
|
|
392
|
+
print("=== All phases completed successfully ===")
|
|
393
|
+
return 0
|
|
394
|
+
|
|
395
|
+
except Exception as e:
|
|
396
|
+
print(f"Error in solver execution: {e}")
|
|
397
|
+
# Try to update status, but don't fail if environment variable is not set
|
|
398
|
+
try:
|
|
399
|
+
update_solver_progress(0, f"Error: {e}", status=StepStatus.FAILED)
|
|
400
|
+
except:
|
|
401
|
+
pass
|
|
402
|
+
return 1
|