westpa 2022.12__cp313-cp313-macosx_10_13_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of westpa might be problematic. Click here for more details.
- westpa/__init__.py +14 -0
- westpa/_version.py +21 -0
- westpa/analysis/__init__.py +5 -0
- westpa/analysis/core.py +746 -0
- westpa/analysis/statistics.py +27 -0
- westpa/analysis/trajectories.py +360 -0
- westpa/cli/__init__.py +0 -0
- westpa/cli/core/__init__.py +0 -0
- westpa/cli/core/w_fork.py +152 -0
- westpa/cli/core/w_init.py +230 -0
- westpa/cli/core/w_run.py +77 -0
- westpa/cli/core/w_states.py +212 -0
- westpa/cli/core/w_succ.py +99 -0
- westpa/cli/core/w_truncate.py +68 -0
- westpa/cli/tools/__init__.py +0 -0
- westpa/cli/tools/ploterr.py +506 -0
- westpa/cli/tools/plothist.py +706 -0
- westpa/cli/tools/w_assign.py +596 -0
- westpa/cli/tools/w_bins.py +166 -0
- westpa/cli/tools/w_crawl.py +119 -0
- westpa/cli/tools/w_direct.py +547 -0
- westpa/cli/tools/w_dumpsegs.py +94 -0
- westpa/cli/tools/w_eddist.py +506 -0
- westpa/cli/tools/w_fluxanl.py +376 -0
- westpa/cli/tools/w_ipa.py +833 -0
- westpa/cli/tools/w_kinavg.py +127 -0
- westpa/cli/tools/w_kinetics.py +96 -0
- westpa/cli/tools/w_multi_west.py +414 -0
- westpa/cli/tools/w_ntop.py +213 -0
- westpa/cli/tools/w_pdist.py +515 -0
- westpa/cli/tools/w_postanalysis_matrix.py +82 -0
- westpa/cli/tools/w_postanalysis_reweight.py +53 -0
- westpa/cli/tools/w_red.py +491 -0
- westpa/cli/tools/w_reweight.py +780 -0
- westpa/cli/tools/w_select.py +226 -0
- westpa/cli/tools/w_stateprobs.py +111 -0
- westpa/cli/tools/w_trace.py +599 -0
- westpa/core/__init__.py +0 -0
- westpa/core/_rc.py +673 -0
- westpa/core/binning/__init__.py +55 -0
- westpa/core/binning/_assign.cpython-313-darwin.so +0 -0
- westpa/core/binning/assign.py +455 -0
- westpa/core/binning/binless.py +96 -0
- westpa/core/binning/binless_driver.py +54 -0
- westpa/core/binning/binless_manager.py +190 -0
- westpa/core/binning/bins.py +47 -0
- westpa/core/binning/mab.py +506 -0
- westpa/core/binning/mab_driver.py +54 -0
- westpa/core/binning/mab_manager.py +198 -0
- westpa/core/data_manager.py +1694 -0
- westpa/core/extloader.py +74 -0
- westpa/core/h5io.py +995 -0
- westpa/core/kinetics/__init__.py +24 -0
- westpa/core/kinetics/_kinetics.cpython-313-darwin.so +0 -0
- westpa/core/kinetics/events.py +147 -0
- westpa/core/kinetics/matrates.py +156 -0
- westpa/core/kinetics/rate_averaging.py +266 -0
- westpa/core/progress.py +218 -0
- westpa/core/propagators/__init__.py +54 -0
- westpa/core/propagators/executable.py +719 -0
- westpa/core/reweight/__init__.py +14 -0
- westpa/core/reweight/_reweight.cpython-313-darwin.so +0 -0
- westpa/core/reweight/matrix.py +126 -0
- westpa/core/segment.py +119 -0
- westpa/core/sim_manager.py +835 -0
- westpa/core/states.py +359 -0
- westpa/core/systems.py +93 -0
- westpa/core/textio.py +74 -0
- westpa/core/trajectory.py +330 -0
- westpa/core/we_driver.py +910 -0
- westpa/core/wm_ops.py +43 -0
- westpa/core/yamlcfg.py +391 -0
- westpa/fasthist/__init__.py +34 -0
- westpa/fasthist/_fasthist.cpython-313-darwin.so +0 -0
- westpa/mclib/__init__.py +271 -0
- westpa/mclib/__main__.py +28 -0
- westpa/mclib/_mclib.cpython-313-darwin.so +0 -0
- westpa/oldtools/__init__.py +4 -0
- westpa/oldtools/aframe/__init__.py +35 -0
- westpa/oldtools/aframe/atool.py +75 -0
- westpa/oldtools/aframe/base_mixin.py +26 -0
- westpa/oldtools/aframe/binning.py +178 -0
- westpa/oldtools/aframe/data_reader.py +560 -0
- westpa/oldtools/aframe/iter_range.py +200 -0
- westpa/oldtools/aframe/kinetics.py +117 -0
- westpa/oldtools/aframe/mcbs.py +153 -0
- westpa/oldtools/aframe/output.py +39 -0
- westpa/oldtools/aframe/plotting.py +90 -0
- westpa/oldtools/aframe/trajwalker.py +126 -0
- westpa/oldtools/aframe/transitions.py +469 -0
- westpa/oldtools/cmds/__init__.py +0 -0
- westpa/oldtools/cmds/w_ttimes.py +361 -0
- westpa/oldtools/files.py +34 -0
- westpa/oldtools/miscfn.py +23 -0
- westpa/oldtools/stats/__init__.py +4 -0
- westpa/oldtools/stats/accumulator.py +35 -0
- westpa/oldtools/stats/edfs.py +129 -0
- westpa/oldtools/stats/mcbs.py +96 -0
- westpa/tools/__init__.py +33 -0
- westpa/tools/binning.py +472 -0
- westpa/tools/core.py +340 -0
- westpa/tools/data_reader.py +159 -0
- westpa/tools/dtypes.py +31 -0
- westpa/tools/iter_range.py +198 -0
- westpa/tools/kinetics_tool.py +340 -0
- westpa/tools/plot.py +283 -0
- westpa/tools/progress.py +17 -0
- westpa/tools/selected_segs.py +154 -0
- westpa/tools/wipi.py +751 -0
- westpa/trajtree/__init__.py +4 -0
- westpa/trajtree/_trajtree.cpython-313-darwin.so +0 -0
- westpa/trajtree/trajtree.py +117 -0
- westpa/westext/__init__.py +0 -0
- westpa/westext/adaptvoronoi/__init__.py +3 -0
- westpa/westext/adaptvoronoi/adaptVor_driver.py +214 -0
- westpa/westext/hamsm_restarting/__init__.py +3 -0
- westpa/westext/hamsm_restarting/example_overrides.py +35 -0
- westpa/westext/hamsm_restarting/restart_driver.py +1165 -0
- westpa/westext/stringmethod/__init__.py +11 -0
- westpa/westext/stringmethod/fourier_fitting.py +69 -0
- westpa/westext/stringmethod/string_driver.py +253 -0
- westpa/westext/stringmethod/string_method.py +306 -0
- westpa/westext/weed/BinCluster.py +180 -0
- westpa/westext/weed/ProbAdjustEquil.py +100 -0
- westpa/westext/weed/UncertMath.py +247 -0
- westpa/westext/weed/__init__.py +10 -0
- westpa/westext/weed/weed_driver.py +192 -0
- westpa/westext/wess/ProbAdjust.py +101 -0
- westpa/westext/wess/__init__.py +6 -0
- westpa/westext/wess/wess_driver.py +217 -0
- westpa/work_managers/__init__.py +57 -0
- westpa/work_managers/core.py +396 -0
- westpa/work_managers/environment.py +134 -0
- westpa/work_managers/mpi.py +318 -0
- westpa/work_managers/processes.py +187 -0
- westpa/work_managers/serial.py +28 -0
- westpa/work_managers/threads.py +79 -0
- westpa/work_managers/zeromq/__init__.py +20 -0
- westpa/work_managers/zeromq/core.py +641 -0
- westpa/work_managers/zeromq/node.py +131 -0
- westpa/work_managers/zeromq/work_manager.py +526 -0
- westpa/work_managers/zeromq/worker.py +320 -0
- westpa-2022.12.dist-info/AUTHORS +22 -0
- westpa-2022.12.dist-info/LICENSE +21 -0
- westpa-2022.12.dist-info/METADATA +193 -0
- westpa-2022.12.dist-info/RECORD +149 -0
- westpa-2022.12.dist-info/WHEEL +6 -0
- westpa-2022.12.dist-info/entry_points.txt +29 -0
- westpa-2022.12.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,719 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
import shutil
|
|
4
|
+
import signal
|
|
5
|
+
import subprocess
|
|
6
|
+
import sys
|
|
7
|
+
import tempfile
|
|
8
|
+
import time
|
|
9
|
+
import tarfile
|
|
10
|
+
import pickle
|
|
11
|
+
from io import BytesIO
|
|
12
|
+
|
|
13
|
+
import numpy as np
|
|
14
|
+
from numpy.random import MT19937, Generator
|
|
15
|
+
|
|
16
|
+
import westpa
|
|
17
|
+
from westpa.core.extloader import get_object
|
|
18
|
+
from westpa.core.propagators import WESTPropagator
|
|
19
|
+
from westpa.core.states import BasisState, InitialState, return_state_type
|
|
20
|
+
from westpa.core.segment import Segment
|
|
21
|
+
from westpa.core.yamlcfg import check_bool
|
|
22
|
+
|
|
23
|
+
from westpa.core.trajectory import load_trajectory
|
|
24
|
+
from westpa.core.h5io import safe_extract
|
|
25
|
+
|
|
26
|
+
log = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
# Get a list of user-friendly signal names
|
|
29
|
+
SIGNAL_NAMES = {getattr(signal, name): name for name in dir(signal) if name.startswith('SIG') and not name.startswith('SIG_')}
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def pcoord_loader(fieldname, pcoord_return_filename, destobj, single_point):
|
|
33
|
+
"""Read progress coordinate data into the ``pcoord`` field on ``destobj``.
|
|
34
|
+
An exception will be raised if the data is malformed. If ``single_point`` is true,
|
|
35
|
+
then only one (N-dimensional) point will be read, otherwise system.pcoord_len points
|
|
36
|
+
will be read.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
system = westpa.rc.get_system_driver()
|
|
40
|
+
|
|
41
|
+
assert fieldname == 'pcoord'
|
|
42
|
+
|
|
43
|
+
pcoord = np.loadtxt(pcoord_return_filename, dtype=system.pcoord_dtype)
|
|
44
|
+
|
|
45
|
+
if single_point:
|
|
46
|
+
expected_shape = (system.pcoord_ndim,)
|
|
47
|
+
if pcoord.ndim == 0:
|
|
48
|
+
pcoord.shape = (1,)
|
|
49
|
+
else:
|
|
50
|
+
expected_shape = (system.pcoord_len, system.pcoord_ndim)
|
|
51
|
+
if pcoord.ndim < 2:
|
|
52
|
+
pcoord.shape = expected_shape
|
|
53
|
+
if pcoord.shape != expected_shape:
|
|
54
|
+
raise ValueError(
|
|
55
|
+
'progress coordinate data has incorrect shape {!r} [expected {!r}] Check pcoord.err or seg_logs for more information.'.format(
|
|
56
|
+
pcoord.shape, expected_shape
|
|
57
|
+
)
|
|
58
|
+
)
|
|
59
|
+
destobj.pcoord = pcoord
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def aux_data_loader(fieldname, data_filename, segment, single_point):
|
|
63
|
+
data = np.loadtxt(data_filename)
|
|
64
|
+
segment.data[fieldname] = data
|
|
65
|
+
if data.nbytes == 0:
|
|
66
|
+
raise ValueError('could not read any data for {}'.format(fieldname))
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def npy_data_loader(fieldname, coord_file, segment, single_point):
|
|
70
|
+
log.debug('using npy_data_loader')
|
|
71
|
+
data = np.load(coord_file, allow_pickle=True)
|
|
72
|
+
segment.data[fieldname] = data
|
|
73
|
+
if data.nbytes == 0:
|
|
74
|
+
raise ValueError('could not read any data for {}'.format(fieldname))
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def pickle_data_loader(fieldname, coord_file, segment, single_point):
|
|
78
|
+
log.debug('using pickle_data_loader')
|
|
79
|
+
with open(coord_file, 'rb') as fo:
|
|
80
|
+
data = pickle.load(fo)
|
|
81
|
+
segment.data[fieldname] = data
|
|
82
|
+
if data.nbytes == 0:
|
|
83
|
+
raise ValueError('could not read any data for {}'.format(fieldname))
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def trajectory_loader(fieldname, coord_folder, segment, single_point):
|
|
87
|
+
'''Load data from the trajectory return. ``coord_folder`` should be the path to a folder
|
|
88
|
+
containing trajectory files. ``segment`` is the ``Segment`` object that the data is associated with.
|
|
89
|
+
Please see ``load_trajectory`` for more details. ``single_point`` is not used by this loader.'''
|
|
90
|
+
try:
|
|
91
|
+
data = load_trajectory(coord_folder)
|
|
92
|
+
segment.data['iterh5/trajectory'] = data
|
|
93
|
+
except Exception as e:
|
|
94
|
+
log.warning('could not read any {} data for HDF5 Framework: {}'.format(fieldname, str(e)))
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def restart_loader(fieldname, restart_folder, segment, single_point):
|
|
98
|
+
'''Load data from the restart return. The loader will tar all files in ``restart_folder``
|
|
99
|
+
and store it in the per-iteration HDF5 file. ``segment`` is the ``Segment`` object that
|
|
100
|
+
the data is associated with. ``single_point`` is not used by this loader.'''
|
|
101
|
+
try:
|
|
102
|
+
d = BytesIO()
|
|
103
|
+
with tarfile.open(mode='w:gz', fileobj=d) as t:
|
|
104
|
+
t.add(restart_folder, arcname='.')
|
|
105
|
+
|
|
106
|
+
segment.data['iterh5/restart'] = d.getvalue() + b'\x01' # add tail protection
|
|
107
|
+
except Exception as e:
|
|
108
|
+
log.warning('could not read any {} data for HDF5 Framework: {}'.format(fieldname, str(e)))
|
|
109
|
+
finally:
|
|
110
|
+
d.close()
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def restart_writer(path, segment):
|
|
114
|
+
'''Prepare the necessary files from the per-iteration HDF5 file to run ``segment``.'''
|
|
115
|
+
try:
|
|
116
|
+
restart = segment.data.pop('iterh5/restart', None)
|
|
117
|
+
# Making an exception for start states in iteration 1
|
|
118
|
+
if restart is None:
|
|
119
|
+
raise ValueError('restart data is not present')
|
|
120
|
+
|
|
121
|
+
d = BytesIO(restart[:-1]) # remove tail protection
|
|
122
|
+
with tarfile.open(fileobj=d, mode='r:gz') as t:
|
|
123
|
+
safe_extract(t, path=path)
|
|
124
|
+
|
|
125
|
+
except ValueError as e:
|
|
126
|
+
log.warning('could not write HDF5 Framework restart data for {}: {}'.format(str(segment), str(e)))
|
|
127
|
+
d = BytesIO()
|
|
128
|
+
if segment.n_iter == 1:
|
|
129
|
+
log.warning(
|
|
130
|
+
'In iteration 1. Assuming this is a start state and proceeding to skip reading restart from per-iteration HDF5 file for {}'.format(
|
|
131
|
+
str(segment)
|
|
132
|
+
)
|
|
133
|
+
)
|
|
134
|
+
except Exception as e:
|
|
135
|
+
log.warning('could not write HDF5 Framework restart data for {}: {}'.format(str(segment), str(e)))
|
|
136
|
+
finally:
|
|
137
|
+
d.close()
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def seglog_loader(fieldname, log_file, segment, single_point):
|
|
141
|
+
'''Load data from the log return. The loader will tar all files in ``log_file``
|
|
142
|
+
and store it in the per-iteration HDF5 file. ``segment`` is the ``Segment`` object that
|
|
143
|
+
the data is associated with. ``single_point`` is not used by this loader.'''
|
|
144
|
+
try:
|
|
145
|
+
d = BytesIO()
|
|
146
|
+
with tarfile.open(mode='w:gz', fileobj=d) as t:
|
|
147
|
+
t.add(log_file, arcname='.')
|
|
148
|
+
|
|
149
|
+
segment.data['iterh5/log'] = d.getvalue() + b'\x01' # add tail protection
|
|
150
|
+
except Exception as e:
|
|
151
|
+
|
|
152
|
+
log.warning('could not read any data for {}: {}'.format(fieldname, str(e)))
|
|
153
|
+
finally:
|
|
154
|
+
d.close()
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
# Dictionary with all the possible loaders
|
|
158
|
+
data_loaders = {
|
|
159
|
+
'default': aux_data_loader,
|
|
160
|
+
'auxdata_loader': aux_data_loader,
|
|
161
|
+
'aux_data_loader': aux_data_loader,
|
|
162
|
+
'npy_loader': npy_data_loader,
|
|
163
|
+
'npy_data_loader': npy_data_loader,
|
|
164
|
+
'pickle_loader': pickle_data_loader,
|
|
165
|
+
'pickle_data_loader': pickle_data_loader,
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class ExecutablePropagator(WESTPropagator):
|
|
170
|
+
ENV_CURRENT_ITER = 'WEST_CURRENT_ITER'
|
|
171
|
+
|
|
172
|
+
# Environment variables set during propagation
|
|
173
|
+
ENV_CURRENT_SEG_ID = 'WEST_CURRENT_SEG_ID'
|
|
174
|
+
ENV_CURRENT_SEG_DATA_REF = 'WEST_CURRENT_SEG_DATA_REF'
|
|
175
|
+
ENV_CURRENT_SEG_INITPOINT = 'WEST_CURRENT_SEG_INITPOINT_TYPE'
|
|
176
|
+
ENV_PARENT_SEG_ID = 'WEST_PARENT_ID'
|
|
177
|
+
ENV_PARENT_DATA_REF = 'WEST_PARENT_DATA_REF'
|
|
178
|
+
|
|
179
|
+
# Environment variables set during propagation and state generation
|
|
180
|
+
ENV_BSTATE_ID = 'WEST_BSTATE_ID'
|
|
181
|
+
ENV_BSTATE_DATA_REF = 'WEST_BSTATE_DATA_REF'
|
|
182
|
+
ENV_ISTATE_ID = 'WEST_ISTATE_ID'
|
|
183
|
+
ENV_ISTATE_DATA_REF = 'WEST_ISTATE_DATA_REF'
|
|
184
|
+
|
|
185
|
+
# Environment variables for progress coordinate calculation
|
|
186
|
+
ENV_STRUCT_DATA_REF = 'WEST_STRUCT_DATA_REF'
|
|
187
|
+
|
|
188
|
+
ENV_RAND16 = 'WEST_RAND16'
|
|
189
|
+
ENV_RAND32 = 'WEST_RAND32'
|
|
190
|
+
ENV_RAND64 = 'WEST_RAND64'
|
|
191
|
+
ENV_RAND128 = 'WEST_RAND128'
|
|
192
|
+
ENV_RANDFLOAT = 'WEST_RANDFLOAT'
|
|
193
|
+
|
|
194
|
+
def __init__(self, rc=None):
|
|
195
|
+
super().__init__(rc)
|
|
196
|
+
|
|
197
|
+
# A mapping of environment variables to template strings which will be
|
|
198
|
+
# added to the environment of all children launched.
|
|
199
|
+
self.addtl_child_environ = dict()
|
|
200
|
+
|
|
201
|
+
# A mapping of executable name ('propagator', 'pre_iteration', 'post_iteration') to
|
|
202
|
+
# a dictionary of attributes like 'executable', 'stdout', 'stderr', 'environ', etc.
|
|
203
|
+
self.exe_info = {}
|
|
204
|
+
self.exe_info['propagator'] = {}
|
|
205
|
+
self.exe_info['pre_iteration'] = {}
|
|
206
|
+
self.exe_info['post_iteration'] = {}
|
|
207
|
+
self.exe_info['get_pcoord'] = {}
|
|
208
|
+
self.exe_info['gen_istate'] = {}
|
|
209
|
+
|
|
210
|
+
# A mapping of data set name ('pcoord', 'coord', 'com', etc) to a dictionary of
|
|
211
|
+
# attributes like 'loader', 'dtype', etc
|
|
212
|
+
self.data_info = {}
|
|
213
|
+
self.data_info['pcoord'] = {}
|
|
214
|
+
|
|
215
|
+
# Validate configuration
|
|
216
|
+
config = self.rc.config
|
|
217
|
+
|
|
218
|
+
for key in [
|
|
219
|
+
('west', 'executable', 'propagator', 'executable'),
|
|
220
|
+
('west', 'data', 'data_refs', 'segment'),
|
|
221
|
+
('west', 'data', 'data_refs', 'basis_state'),
|
|
222
|
+
('west', 'data', 'data_refs', 'initial_state'),
|
|
223
|
+
]:
|
|
224
|
+
config.require(key)
|
|
225
|
+
|
|
226
|
+
self.segment_ref_template = config['west', 'data', 'data_refs', 'segment']
|
|
227
|
+
self.basis_state_ref_template = config['west', 'data', 'data_refs', 'basis_state']
|
|
228
|
+
self.initial_state_ref_template = config['west', 'data', 'data_refs', 'initial_state']
|
|
229
|
+
store_h5 = config.get(['west', 'data', 'data_refs', 'iteration']) is not None
|
|
230
|
+
|
|
231
|
+
# Create a persistent RNG for each worker
|
|
232
|
+
self.rng = Generator(MT19937())
|
|
233
|
+
|
|
234
|
+
# Load additional environment variables for all child processes
|
|
235
|
+
self.addtl_child_environ.update({k: str(v) for k, v in (config['west', 'executable', 'environ'] or {}).items()})
|
|
236
|
+
|
|
237
|
+
# Load configuration items relating to child processes
|
|
238
|
+
for child_type in ('propagator', 'pre_iteration', 'post_iteration', 'get_pcoord', 'gen_istate', 'subgroup_walkers'):
|
|
239
|
+
child_info = config.get(['west', 'executable', child_type])
|
|
240
|
+
if not child_info:
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
info_prefix = ['west', 'executable', child_type]
|
|
244
|
+
|
|
245
|
+
# require executable to be specified if anything is specified at all
|
|
246
|
+
config.require(info_prefix + ['executable'])
|
|
247
|
+
|
|
248
|
+
self.exe_info[child_type]['executable'] = child_info['executable']
|
|
249
|
+
self.exe_info[child_type]['stdin'] = child_info.get('stdin', os.devnull)
|
|
250
|
+
self.exe_info[child_type]['stdout'] = child_info.get('stdout', None)
|
|
251
|
+
self.exe_info[child_type]['stderr'] = child_info.get('stderr', None)
|
|
252
|
+
self.exe_info[child_type]['cwd'] = child_info.get('cwd', None)
|
|
253
|
+
|
|
254
|
+
if child_type not in ('propagator', 'get_pcoord', 'gen_istate'):
|
|
255
|
+
self.exe_info[child_type]['enabled'] = child_info.get('enabled', True)
|
|
256
|
+
else:
|
|
257
|
+
# for consistency, propagator, get_pcoord, and gen_istate can never be disabled
|
|
258
|
+
self.exe_info[child_type]['enabled'] = True
|
|
259
|
+
|
|
260
|
+
# apply environment modifications specific to this executable
|
|
261
|
+
self.exe_info[child_type]['environ'] = {k: str(v) for k, v in (child_info.get('environ') or {}).items()}
|
|
262
|
+
|
|
263
|
+
log.debug('exe_info: {!r}'.format(self.exe_info))
|
|
264
|
+
|
|
265
|
+
# Load configuration items relating to dataset input
|
|
266
|
+
self.data_info['pcoord'] = {'name': 'pcoord', 'loader': pcoord_loader, 'enabled': True, 'filename': None, 'dir': False}
|
|
267
|
+
self.data_info['trajectory'] = {
|
|
268
|
+
'name': 'trajectory',
|
|
269
|
+
'loader': trajectory_loader,
|
|
270
|
+
'enabled': store_h5,
|
|
271
|
+
'filename': None,
|
|
272
|
+
'dir': True,
|
|
273
|
+
}
|
|
274
|
+
self.data_info['restart'] = {
|
|
275
|
+
'name': 'restart',
|
|
276
|
+
'loader': restart_loader,
|
|
277
|
+
'enabled': store_h5,
|
|
278
|
+
'filename': None,
|
|
279
|
+
'dir': True,
|
|
280
|
+
}
|
|
281
|
+
self.data_info['log'] = {'name': 'seglog', 'loader': seglog_loader, 'enabled': store_h5, 'filename': None, 'dir': False}
|
|
282
|
+
|
|
283
|
+
# Grab config from west.executable.datasets, else fallback to west.data.datasets.
|
|
284
|
+
dataset_configs = config.get(["west", "executable", "datasets"]) or config.get(['west', 'data', 'datasets'], {})
|
|
285
|
+
for dsinfo in dataset_configs:
|
|
286
|
+
try:
|
|
287
|
+
dsname = dsinfo['name']
|
|
288
|
+
except KeyError:
|
|
289
|
+
raise ValueError('dataset specifications require a ``name`` field')
|
|
290
|
+
|
|
291
|
+
if dsname == 'pcoord':
|
|
292
|
+
# can never disable pcoord collection
|
|
293
|
+
dsinfo['enabled'] = True
|
|
294
|
+
else:
|
|
295
|
+
check_bool(dsinfo.setdefault('enabled', True))
|
|
296
|
+
|
|
297
|
+
loader_directive = dsinfo.get('loader', None)
|
|
298
|
+
if callable(loader_directive):
|
|
299
|
+
loader = loader_directive
|
|
300
|
+
elif loader_directive in data_loaders.keys():
|
|
301
|
+
if dsname not in ['pcoord', 'seglog', 'restart', 'trajectory']:
|
|
302
|
+
loader = data_loaders[loader_directive]
|
|
303
|
+
else:
|
|
304
|
+
loader = get_object(loader_directive)
|
|
305
|
+
elif dsname not in ['pcoord', 'seglog', 'restart', 'trajectory']:
|
|
306
|
+
loader = aux_data_loader
|
|
307
|
+
else:
|
|
308
|
+
# YOLO. Or maybe it wasn't specified.
|
|
309
|
+
loader = loader_directive
|
|
310
|
+
|
|
311
|
+
if loader:
|
|
312
|
+
dsinfo['loader'] = loader
|
|
313
|
+
self.data_info.setdefault(dsname, {}).update(dsinfo)
|
|
314
|
+
|
|
315
|
+
log.debug('data_info: {!r}'.format(self.data_info))
|
|
316
|
+
|
|
317
|
+
@staticmethod
|
|
318
|
+
def makepath(template, template_args=None, expanduser=True, expandvars=True, abspath=False, realpath=False):
|
|
319
|
+
template_args = template_args or {}
|
|
320
|
+
path = template.format(**template_args)
|
|
321
|
+
if expandvars:
|
|
322
|
+
path = os.path.expandvars(path)
|
|
323
|
+
if expanduser:
|
|
324
|
+
path = os.path.expanduser(path)
|
|
325
|
+
if realpath:
|
|
326
|
+
path = os.path.realpath(path)
|
|
327
|
+
if abspath:
|
|
328
|
+
path = os.path.abspath(path)
|
|
329
|
+
path = os.path.normpath(path)
|
|
330
|
+
return path
|
|
331
|
+
|
|
332
|
+
def random_val_env_vars(self):
|
|
333
|
+
'''Return a set of environment variables containing random seeds. These are returned
|
|
334
|
+
as a dictionary, suitable for use in ``os.environ.update()`` or as the ``env`` argument to
|
|
335
|
+
``subprocess.Popen()``. Every child process executed by ``exec_child()`` gets these.'''
|
|
336
|
+
|
|
337
|
+
return {
|
|
338
|
+
self.ENV_RAND16: str(self.rng.integers(2**16, dtype=np.uint16)),
|
|
339
|
+
self.ENV_RAND32: str(self.rng.integers(2**32, dtype=np.uint32)),
|
|
340
|
+
self.ENV_RAND64: str(self.rng.integers(2**64, dtype=np.uint64)),
|
|
341
|
+
self.ENV_RAND128: str(int(self.rng.integers(2**64, dtype=np.uint64)) + int(self.rng.integers(2**64, dtype=np.uint64))),
|
|
342
|
+
self.ENV_RANDFLOAT: str(self.rng.random()),
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
def exec_child(self, executable, environ=None, stdin=None, stdout=None, stderr=None, cwd=None):
|
|
346
|
+
'''Execute a child process with the environment set from the current environment, the
|
|
347
|
+
values of self.addtl_child_environ, the random numbers returned by self.random_val_env_vars, and
|
|
348
|
+
the given ``environ`` (applied in that order). stdin/stdout/stderr are optionally redirected.
|
|
349
|
+
|
|
350
|
+
This function waits on the child process to finish, then returns
|
|
351
|
+
(rc, rusage), where rc is the child's return code and rusage is the resource usage tuple from os.wait4()'''
|
|
352
|
+
|
|
353
|
+
all_environ = dict(os.environ)
|
|
354
|
+
all_environ.update(self.addtl_child_environ)
|
|
355
|
+
all_environ.update(self.random_val_env_vars())
|
|
356
|
+
all_environ.update(environ or {})
|
|
357
|
+
|
|
358
|
+
stdin = open(stdin, 'rb') if stdin else sys.stdin
|
|
359
|
+
stdout = open(stdout, 'wb') if stdout else sys.stdout
|
|
360
|
+
if stderr == 'stdout':
|
|
361
|
+
stderr = stdout
|
|
362
|
+
else:
|
|
363
|
+
stderr = open(stderr, 'wb') if stderr else sys.stderr
|
|
364
|
+
|
|
365
|
+
# close_fds is critical for preventing out-of-file errors
|
|
366
|
+
proc = subprocess.Popen(
|
|
367
|
+
[executable],
|
|
368
|
+
cwd=cwd,
|
|
369
|
+
stdin=stdin,
|
|
370
|
+
stdout=stdout,
|
|
371
|
+
stderr=stderr if stderr != stdout else subprocess.STDOUT,
|
|
372
|
+
close_fds=True,
|
|
373
|
+
env=all_environ,
|
|
374
|
+
)
|
|
375
|
+
|
|
376
|
+
# Wait on child and get resource usage
|
|
377
|
+
(_pid, _status, rusage) = os.wait4(proc.pid, 0)
|
|
378
|
+
# Do a subprocess.Popen.wait() to let the Popen instance (and subprocess module) know that
|
|
379
|
+
# we are done with the process, and to get a more friendly return code
|
|
380
|
+
rc = proc.wait()
|
|
381
|
+
|
|
382
|
+
return (rc, rusage)
|
|
383
|
+
|
|
384
|
+
def exec_child_from_child_info(self, child_info, template_args, environ):
|
|
385
|
+
for key, value in child_info.get('environ', {}).items():
|
|
386
|
+
environ[key] = self.makepath(value)
|
|
387
|
+
return self.exec_child(
|
|
388
|
+
executable=self.makepath(child_info['executable'], template_args),
|
|
389
|
+
environ=environ,
|
|
390
|
+
cwd=self.makepath(child_info['cwd'], template_args) if child_info['cwd'] else None,
|
|
391
|
+
stdin=self.makepath(child_info['stdin'], template_args) if child_info['stdin'] else os.devnull,
|
|
392
|
+
stdout=self.makepath(child_info['stdout'], template_args) if child_info['stdout'] else None,
|
|
393
|
+
stderr=self.makepath(child_info['stderr'], template_args) if child_info['stderr'] else None,
|
|
394
|
+
)
|
|
395
|
+
|
|
396
|
+
# Functions to create template arguments and environment values for child processes
|
|
397
|
+
def update_args_env_basis_state(self, template_args, environ, basis_state):
|
|
398
|
+
new_template_args = {'basis_state': basis_state}
|
|
399
|
+
new_env = {
|
|
400
|
+
self.ENV_BSTATE_ID: str(basis_state.state_id if basis_state.state_id is not None else -1),
|
|
401
|
+
self.ENV_BSTATE_DATA_REF: self.makepath(self.basis_state_ref_template, new_template_args),
|
|
402
|
+
}
|
|
403
|
+
template_args.update(new_template_args)
|
|
404
|
+
environ.update(new_env)
|
|
405
|
+
return template_args, environ
|
|
406
|
+
|
|
407
|
+
def update_args_env_initial_state(self, template_args, environ, initial_state):
|
|
408
|
+
new_template_args = {'initial_state': initial_state}
|
|
409
|
+
new_env = {
|
|
410
|
+
self.ENV_ISTATE_ID: str(initial_state.state_id if initial_state.state_id is not None else -1),
|
|
411
|
+
self.ENV_ISTATE_DATA_REF: self.makepath(self.initial_state_ref_template, new_template_args),
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
if initial_state.basis_state is not None:
|
|
415
|
+
basis_state = initial_state.basis_state
|
|
416
|
+
elif initial_state.istate_type == InitialState.ISTATE_TYPE_START:
|
|
417
|
+
basis_state = BasisState(
|
|
418
|
+
label=f"sstate_{initial_state.state_id}", pcoord=initial_state.pcoord, probability=0.0, auxref=""
|
|
419
|
+
)
|
|
420
|
+
else:
|
|
421
|
+
basis_state = self.basis_states[initial_state.basis_state_id]
|
|
422
|
+
|
|
423
|
+
self.update_args_env_basis_state(new_template_args, new_env, basis_state)
|
|
424
|
+
|
|
425
|
+
template_args.update(new_template_args)
|
|
426
|
+
environ.update(new_env)
|
|
427
|
+
return template_args, environ
|
|
428
|
+
|
|
429
|
+
def update_args_env_iter(self, template_args, environ, n_iter):
|
|
430
|
+
environ[self.ENV_CURRENT_ITER] = str(n_iter if n_iter is not None else -1)
|
|
431
|
+
template_args['n_iter'] = int(n_iter)
|
|
432
|
+
return template_args, n_iter
|
|
433
|
+
|
|
434
|
+
def update_args_env_segment(self, template_args, environ, segment):
|
|
435
|
+
template_args['segment'] = segment
|
|
436
|
+
|
|
437
|
+
environ[self.ENV_CURRENT_SEG_INITPOINT] = Segment.initpoint_type_names[segment.initpoint_type]
|
|
438
|
+
|
|
439
|
+
if segment.initpoint_type == Segment.SEG_INITPOINT_CONTINUES:
|
|
440
|
+
# Could use actual parent object here if the work manager cared to pass that much data
|
|
441
|
+
# to us (we'd need at least the subset of parents for all segments sent in the call to propagate)
|
|
442
|
+
# that may make a good west.cfg option for future crazy extensibility, but for now,
|
|
443
|
+
# just populate the bare minimum
|
|
444
|
+
parent = Segment(n_iter=segment.n_iter - 1, seg_id=segment.parent_id)
|
|
445
|
+
parent_template_args = dict(template_args)
|
|
446
|
+
parent_template_args['segment'] = parent
|
|
447
|
+
|
|
448
|
+
environ[self.ENV_PARENT_SEG_ID] = str(segment.parent_id if segment.parent_id is not None else -1)
|
|
449
|
+
environ[self.ENV_PARENT_DATA_REF] = self.makepath(self.segment_ref_template, parent_template_args)
|
|
450
|
+
elif segment.initpoint_type == Segment.SEG_INITPOINT_NEWTRAJ:
|
|
451
|
+
# This segment is initiated from a basis state; WEST_PARENT_SEG_ID and WEST_PARENT_DATA_REF are
|
|
452
|
+
# set to the basis state ID and data ref
|
|
453
|
+
initial_state = self.initial_states[segment.initial_state_id]
|
|
454
|
+
|
|
455
|
+
if initial_state.istate_type == InitialState.ISTATE_TYPE_START:
|
|
456
|
+
basis_state = BasisState(
|
|
457
|
+
label=f"sstate_{initial_state.state_id}", pcoord=initial_state.pcoord, probability=0.0, auxref=""
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
else:
|
|
461
|
+
basis_state = self.basis_states[initial_state.basis_state_id]
|
|
462
|
+
|
|
463
|
+
if self.ENV_BSTATE_ID not in environ:
|
|
464
|
+
self.update_args_env_basis_state(template_args, environ, basis_state)
|
|
465
|
+
if self.ENV_ISTATE_ID not in environ:
|
|
466
|
+
self.update_args_env_initial_state(template_args, environ, initial_state)
|
|
467
|
+
|
|
468
|
+
assert initial_state.istate_type in (
|
|
469
|
+
InitialState.ISTATE_TYPE_BASIS,
|
|
470
|
+
InitialState.ISTATE_TYPE_GENERATED,
|
|
471
|
+
InitialState.ISTATE_TYPE_START,
|
|
472
|
+
)
|
|
473
|
+
if initial_state.istate_type == InitialState.ISTATE_TYPE_BASIS:
|
|
474
|
+
environ[self.ENV_PARENT_DATA_REF] = environ[self.ENV_BSTATE_DATA_REF]
|
|
475
|
+
|
|
476
|
+
elif initial_state.istate_type == InitialState.ISTATE_TYPE_START:
|
|
477
|
+
# This points to the start-state PDB
|
|
478
|
+
environ[self.ENV_PARENT_DATA_REF] = environ[self.ENV_BSTATE_DATA_REF] + '/' + initial_state.basis_auxref
|
|
479
|
+
else: # initial_state.type == InitialState.ISTATE_TYPE_GENERATED
|
|
480
|
+
environ[self.ENV_PARENT_DATA_REF] = environ[self.ENV_ISTATE_DATA_REF]
|
|
481
|
+
|
|
482
|
+
environ[self.ENV_CURRENT_SEG_ID] = str(segment.seg_id if segment.seg_id is not None else -1)
|
|
483
|
+
environ[self.ENV_CURRENT_SEG_DATA_REF] = self.makepath(self.segment_ref_template, template_args)
|
|
484
|
+
return template_args, environ
|
|
485
|
+
|
|
486
|
+
def template_args_for_segment(self, segment):
|
|
487
|
+
template_args, environ = {}, {}
|
|
488
|
+
self.update_args_env_iter(template_args, environ, segment.n_iter)
|
|
489
|
+
self.update_args_env_segment(template_args, environ, segment)
|
|
490
|
+
return template_args
|
|
491
|
+
|
|
492
|
+
def exec_for_segment(self, child_info, segment, addtl_env=None):
|
|
493
|
+
'''Execute a child process with environment and template expansion from the given
|
|
494
|
+
segment.'''
|
|
495
|
+
template_args, environ = {}, {}
|
|
496
|
+
self.update_args_env_iter(template_args, environ, segment.n_iter)
|
|
497
|
+
self.update_args_env_segment(template_args, environ, segment)
|
|
498
|
+
environ.update(addtl_env or {})
|
|
499
|
+
self.prepare_file_system(segment, environ)
|
|
500
|
+
child_info['cwd'] = environ[self.ENV_CURRENT_SEG_DATA_REF]
|
|
501
|
+
return self.exec_child_from_child_info(child_info, template_args, environ)
|
|
502
|
+
|
|
503
|
+
def exec_for_iteration(self, child_info, n_iter, addtl_env=None):
|
|
504
|
+
'''Execute a child process with environment and template expansion from the given
|
|
505
|
+
iteration number.'''
|
|
506
|
+
template_args, environ = {}, {}
|
|
507
|
+
self.update_args_env_iter(template_args, environ, n_iter)
|
|
508
|
+
environ.update(addtl_env or {})
|
|
509
|
+
return self.exec_child_from_child_info(child_info, template_args, environ)
|
|
510
|
+
|
|
511
|
+
def exec_for_basis_state(self, child_info, basis_state, addtl_env=None):
|
|
512
|
+
'''Execute a child process with environment and template expansion from the
|
|
513
|
+
given basis state'''
|
|
514
|
+
template_args, environ = {}, {}
|
|
515
|
+
self.update_args_env_basis_state(template_args, environ, basis_state)
|
|
516
|
+
environ.update(addtl_env or {})
|
|
517
|
+
return self.exec_child_from_child_info(child_info, template_args, environ)
|
|
518
|
+
|
|
519
|
+
def exec_for_initial_state(self, child_info, initial_state, addtl_env=None):
|
|
520
|
+
'''Execute a child process with environment and template expansion from the given
|
|
521
|
+
initial state.'''
|
|
522
|
+
template_args, environ = {}, {}
|
|
523
|
+
self.update_args_env_initial_state(template_args, environ, initial_state)
|
|
524
|
+
environ.update(addtl_env or {})
|
|
525
|
+
return self.exec_child_from_child_info(child_info, template_args, environ)
|
|
526
|
+
|
|
527
|
+
def prepare_file_system(self, segment, environ):
|
|
528
|
+
try:
|
|
529
|
+
# If the filesystem is properly clean.
|
|
530
|
+
os.makedirs(environ[self.ENV_CURRENT_SEG_DATA_REF])
|
|
531
|
+
except Exception:
|
|
532
|
+
# If the filesystem is NOT properly clean.
|
|
533
|
+
shutil.rmtree(environ[self.ENV_CURRENT_SEG_DATA_REF])
|
|
534
|
+
os.makedirs(environ[self.ENV_CURRENT_SEG_DATA_REF])
|
|
535
|
+
if self.data_info['restart']['enabled']:
|
|
536
|
+
restart_writer(environ[self.ENV_CURRENT_SEG_DATA_REF], segment=segment)
|
|
537
|
+
|
|
538
|
+
def setup_dataset_return(self, segment=None, subset_keys=None):
|
|
539
|
+
'''Set up temporary files and environment variables that point to them for segment
|
|
540
|
+
runners to return data. ``segment`` is the ``Segment`` object that the return data
|
|
541
|
+
is associated with. ``subset_keys`` specifies the names of a subset of data to be
|
|
542
|
+
returned.'''
|
|
543
|
+
if subset_keys is None:
|
|
544
|
+
subset_keys = self.data_info.keys()
|
|
545
|
+
|
|
546
|
+
addtl_env = {}
|
|
547
|
+
return_files = {}
|
|
548
|
+
del_return_files = {}
|
|
549
|
+
|
|
550
|
+
for dataset in self.data_info:
|
|
551
|
+
if dataset not in subset_keys:
|
|
552
|
+
continue
|
|
553
|
+
|
|
554
|
+
if not self.data_info[dataset].get('enabled', False):
|
|
555
|
+
continue
|
|
556
|
+
|
|
557
|
+
return_template = self.data_info[dataset].get('filename')
|
|
558
|
+
if return_template:
|
|
559
|
+
if segment is None:
|
|
560
|
+
raise ValueError('segment needs to be provided for dataset return')
|
|
561
|
+
return_files[dataset] = self.makepath(return_template, self.template_args_for_segment(segment))
|
|
562
|
+
del_return_files[dataset] = False
|
|
563
|
+
else:
|
|
564
|
+
isdir = self.data_info[dataset].get('dir', False)
|
|
565
|
+
if isdir:
|
|
566
|
+
rfname = tempfile.mkdtemp()
|
|
567
|
+
else:
|
|
568
|
+
(fd, rfname) = tempfile.mkstemp()
|
|
569
|
+
os.close(fd)
|
|
570
|
+
return_files[dataset] = rfname
|
|
571
|
+
del_return_files[dataset] = True
|
|
572
|
+
|
|
573
|
+
addtl_env['WEST_{}_RETURN'.format(dataset.upper())] = return_files[dataset]
|
|
574
|
+
|
|
575
|
+
return addtl_env, return_files, del_return_files
|
|
576
|
+
|
|
577
|
+
def retrieve_dataset_return(self, state, return_files, del_return_files, single_point):
|
|
578
|
+
'''Retrieve returned data from the temporary locations directed by the environment variables.
|
|
579
|
+
``state`` is a ``Segment``, ``BasisState`` , or ``InitialState``object that the return data is
|
|
580
|
+
associated with. ``return_files`` is a ``dict`` where the keys are the dataset names and
|
|
581
|
+
the values are the paths to the temporarily files that contain the returned data.
|
|
582
|
+
``del_return_files`` is a ``dict`` where the keys are the names of datasets to be deleted
|
|
583
|
+
(if the corresponding value is set to ``True``) once the data is retrieved.'''
|
|
584
|
+
|
|
585
|
+
state_name, state_id = return_state_type(state)
|
|
586
|
+
|
|
587
|
+
for dataset in self.data_info:
|
|
588
|
+
if dataset not in return_files:
|
|
589
|
+
continue
|
|
590
|
+
|
|
591
|
+
# pcoord is always enabled (see __init__)
|
|
592
|
+
if not self.data_info[dataset].get('enabled', False):
|
|
593
|
+
continue
|
|
594
|
+
|
|
595
|
+
filename = return_files[dataset]
|
|
596
|
+
loader = self.data_info[dataset]['loader']
|
|
597
|
+
try:
|
|
598
|
+
loader(dataset, filename, state, single_point=single_point)
|
|
599
|
+
except Exception as e:
|
|
600
|
+
log.error('could not read {} for {} {} from {!r}: {!r}'.format(dataset, state_name, state_id, filename, e))
|
|
601
|
+
if isinstance(state, Segment):
|
|
602
|
+
state.status = state.SEG_STATUS_FAILED
|
|
603
|
+
break
|
|
604
|
+
else:
|
|
605
|
+
if del_return_files.get(dataset, False):
|
|
606
|
+
try:
|
|
607
|
+
if os.path.isfile(filename):
|
|
608
|
+
os.unlink(filename)
|
|
609
|
+
else:
|
|
610
|
+
shutil.rmtree(filename)
|
|
611
|
+
except Exception as e:
|
|
612
|
+
log.warning(
|
|
613
|
+
'could not delete {} file {!r} for {} {}: {!r}'.format(dataset, filename, state_name, state_id, e)
|
|
614
|
+
)
|
|
615
|
+
else:
|
|
616
|
+
log.debug('deleted {} file {!r} for {} {}'.format(dataset, filename, state_name, state_id))
|
|
617
|
+
|
|
618
|
+
# Specific functions required by the WEST framework
|
|
619
|
+
def get_pcoord(self, state):
|
|
620
|
+
'''Get the progress coordinate of the given basis or initial state.'''
|
|
621
|
+
|
|
622
|
+
template_args, environ = {}, {}
|
|
623
|
+
|
|
624
|
+
if isinstance(state, BasisState):
|
|
625
|
+
execfn = self.exec_for_basis_state
|
|
626
|
+
self.update_args_env_basis_state(template_args, environ, state)
|
|
627
|
+
struct_ref = environ[self.ENV_BSTATE_DATA_REF]
|
|
628
|
+
elif isinstance(state, InitialState):
|
|
629
|
+
execfn = self.exec_for_initial_state
|
|
630
|
+
self.update_args_env_initial_state(template_args, environ, state)
|
|
631
|
+
struct_ref = environ[self.ENV_ISTATE_DATA_REF]
|
|
632
|
+
else:
|
|
633
|
+
raise TypeError('state must be a BasisState or InitialState')
|
|
634
|
+
|
|
635
|
+
child_info = self.exe_info.get('get_pcoord')
|
|
636
|
+
addtl_env, return_files, del_return_files = self.setup_dataset_return(
|
|
637
|
+
subset_keys=['pcoord', 'trajectory', 'restart', 'log']
|
|
638
|
+
)
|
|
639
|
+
addtl_env[self.ENV_STRUCT_DATA_REF] = struct_ref
|
|
640
|
+
|
|
641
|
+
rc, rusage = execfn(child_info, state, addtl_env)
|
|
642
|
+
if rc != 0:
|
|
643
|
+
log.error('get_pcoord executable {!r} returned {}'.format(child_info['executable'], rc))
|
|
644
|
+
|
|
645
|
+
self.retrieve_dataset_return(state, return_files, del_return_files, True)
|
|
646
|
+
|
|
647
|
+
def gen_istate(self, basis_state, initial_state):
|
|
648
|
+
'''Generate a new initial state from the given basis state.'''
|
|
649
|
+
child_info = self.exe_info.get('gen_istate')
|
|
650
|
+
rc, rusage = self.exec_for_initial_state(child_info, initial_state)
|
|
651
|
+
if rc != 0:
|
|
652
|
+
log.error('gen_istate executable {!r} returned {}'.format(child_info['executable'], rc))
|
|
653
|
+
initial_state.istate_status = InitialState.ISTATE_STATUS_FAILED
|
|
654
|
+
return
|
|
655
|
+
|
|
656
|
+
# Determine and load the progress coordinate value for this state
|
|
657
|
+
try:
|
|
658
|
+
self.get_pcoord(initial_state)
|
|
659
|
+
except Exception:
|
|
660
|
+
log.exception('could not get progress coordinate for initial state {!r}'.format(initial_state))
|
|
661
|
+
initial_state.istate_status = InitialState.ISTATE_STATUS_FAILED
|
|
662
|
+
raise
|
|
663
|
+
else:
|
|
664
|
+
initial_state.istate_status = InitialState.ISTATE_STATUS_PREPARED
|
|
665
|
+
|
|
666
|
+
def prepare_iteration(self, n_iter, segments):
|
|
667
|
+
child_info = self.exe_info.get('pre_iteration')
|
|
668
|
+
if child_info and child_info['enabled']:
|
|
669
|
+
try:
|
|
670
|
+
rc, rusage = self.exec_for_iteration(child_info, n_iter)
|
|
671
|
+
except OSError as e:
|
|
672
|
+
log.warning('could not execute pre-iteration program {!r}: {}'.format(child_info['executable'], e))
|
|
673
|
+
else:
|
|
674
|
+
if rc != 0:
|
|
675
|
+
log.warning('pre-iteration executable {!r} returned {}'.format(child_info['executable'], rc))
|
|
676
|
+
|
|
677
|
+
def finalize_iteration(self, n_iter, segments):
|
|
678
|
+
child_info = self.exe_info.get('post_iteration')
|
|
679
|
+
if child_info and child_info['enabled']:
|
|
680
|
+
try:
|
|
681
|
+
rc, rusage = self.exec_for_iteration(child_info, n_iter)
|
|
682
|
+
except OSError as e:
|
|
683
|
+
log.warning('could not execute post-iteration program {!r}: {}'.format(child_info['executable'], e))
|
|
684
|
+
else:
|
|
685
|
+
if rc != 0:
|
|
686
|
+
log.warning('post-iteration executable {!r} returned {}'.format(child_info['executable'], rc))
|
|
687
|
+
|
|
688
|
+
def propagate(self, segments):
|
|
689
|
+
child_info = self.exe_info['propagator']
|
|
690
|
+
|
|
691
|
+
for segment in segments:
|
|
692
|
+
starttime = time.time()
|
|
693
|
+
|
|
694
|
+
addtl_env, return_files, del_return_files = self.setup_dataset_return(segment)
|
|
695
|
+
|
|
696
|
+
# Spawn propagator and wait for its completion
|
|
697
|
+
rc, rusage = self.exec_for_segment(child_info, segment, addtl_env)
|
|
698
|
+
|
|
699
|
+
if rc == 0:
|
|
700
|
+
segment.status = Segment.SEG_STATUS_COMPLETE
|
|
701
|
+
elif rc < 0:
|
|
702
|
+
log.error('child process for segment %d exited on signal %d (%s)' % (segment.seg_id, -rc, SIGNAL_NAMES[-rc]))
|
|
703
|
+
segment.status = Segment.SEG_STATUS_FAILED
|
|
704
|
+
continue
|
|
705
|
+
else:
|
|
706
|
+
log.error('child process for segment %d exited with code %d' % (segment.seg_id, rc))
|
|
707
|
+
segment.status = Segment.SEG_STATUS_FAILED
|
|
708
|
+
continue
|
|
709
|
+
|
|
710
|
+
# Extract data and store on segment for recording in the master thread/process/node
|
|
711
|
+
self.retrieve_dataset_return(segment, return_files, del_return_files, False)
|
|
712
|
+
|
|
713
|
+
if segment.status == Segment.SEG_STATUS_FAILED:
|
|
714
|
+
continue
|
|
715
|
+
|
|
716
|
+
# Record timing info
|
|
717
|
+
segment.walltime = time.time() - starttime
|
|
718
|
+
segment.cputime = rusage.ru_utime
|
|
719
|
+
return segments
|