westpa 2022.10__cp312-cp312-macosx_11_0_arm64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of westpa might be problematic. Click here for more details.
- westpa/__init__.py +14 -0
- westpa/_version.py +21 -0
- westpa/analysis/__init__.py +5 -0
- westpa/analysis/core.py +746 -0
- westpa/analysis/statistics.py +27 -0
- westpa/analysis/trajectories.py +360 -0
- westpa/cli/__init__.py +0 -0
- westpa/cli/core/__init__.py +0 -0
- westpa/cli/core/w_fork.py +152 -0
- westpa/cli/core/w_init.py +230 -0
- westpa/cli/core/w_run.py +77 -0
- westpa/cli/core/w_states.py +212 -0
- westpa/cli/core/w_succ.py +99 -0
- westpa/cli/core/w_truncate.py +59 -0
- westpa/cli/tools/__init__.py +0 -0
- westpa/cli/tools/ploterr.py +506 -0
- westpa/cli/tools/plothist.py +706 -0
- westpa/cli/tools/w_assign.py +596 -0
- westpa/cli/tools/w_bins.py +166 -0
- westpa/cli/tools/w_crawl.py +119 -0
- westpa/cli/tools/w_direct.py +547 -0
- westpa/cli/tools/w_dumpsegs.py +94 -0
- westpa/cli/tools/w_eddist.py +506 -0
- westpa/cli/tools/w_fluxanl.py +378 -0
- westpa/cli/tools/w_ipa.py +833 -0
- westpa/cli/tools/w_kinavg.py +127 -0
- westpa/cli/tools/w_kinetics.py +96 -0
- westpa/cli/tools/w_multi_west.py +414 -0
- westpa/cli/tools/w_ntop.py +213 -0
- westpa/cli/tools/w_pdist.py +515 -0
- westpa/cli/tools/w_postanalysis_matrix.py +82 -0
- westpa/cli/tools/w_postanalysis_reweight.py +53 -0
- westpa/cli/tools/w_red.py +486 -0
- westpa/cli/tools/w_reweight.py +780 -0
- westpa/cli/tools/w_select.py +226 -0
- westpa/cli/tools/w_stateprobs.py +111 -0
- westpa/cli/tools/w_trace.py +599 -0
- westpa/core/__init__.py +0 -0
- westpa/core/_rc.py +673 -0
- westpa/core/binning/__init__.py +55 -0
- westpa/core/binning/_assign.cpython-312-darwin.so +0 -0
- westpa/core/binning/assign.py +449 -0
- westpa/core/binning/binless.py +96 -0
- westpa/core/binning/binless_driver.py +54 -0
- westpa/core/binning/binless_manager.py +190 -0
- westpa/core/binning/bins.py +47 -0
- westpa/core/binning/mab.py +427 -0
- westpa/core/binning/mab_driver.py +54 -0
- westpa/core/binning/mab_manager.py +198 -0
- westpa/core/data_manager.py +1694 -0
- westpa/core/extloader.py +74 -0
- westpa/core/h5io.py +995 -0
- westpa/core/kinetics/__init__.py +24 -0
- westpa/core/kinetics/_kinetics.cpython-312-darwin.so +0 -0
- westpa/core/kinetics/events.py +147 -0
- westpa/core/kinetics/matrates.py +156 -0
- westpa/core/kinetics/rate_averaging.py +266 -0
- westpa/core/progress.py +218 -0
- westpa/core/propagators/__init__.py +54 -0
- westpa/core/propagators/executable.py +715 -0
- westpa/core/reweight/__init__.py +14 -0
- westpa/core/reweight/_reweight.cpython-312-darwin.so +0 -0
- westpa/core/reweight/matrix.py +126 -0
- westpa/core/segment.py +119 -0
- westpa/core/sim_manager.py +830 -0
- westpa/core/states.py +359 -0
- westpa/core/systems.py +93 -0
- westpa/core/textio.py +74 -0
- westpa/core/trajectory.py +330 -0
- westpa/core/we_driver.py +908 -0
- westpa/core/wm_ops.py +43 -0
- westpa/core/yamlcfg.py +391 -0
- westpa/fasthist/__init__.py +34 -0
- westpa/fasthist/__main__.py +110 -0
- westpa/fasthist/_fasthist.cpython-312-darwin.so +0 -0
- westpa/mclib/__init__.py +264 -0
- westpa/mclib/__main__.py +28 -0
- westpa/mclib/_mclib.cpython-312-darwin.so +0 -0
- westpa/oldtools/__init__.py +4 -0
- westpa/oldtools/aframe/__init__.py +35 -0
- westpa/oldtools/aframe/atool.py +75 -0
- westpa/oldtools/aframe/base_mixin.py +26 -0
- westpa/oldtools/aframe/binning.py +178 -0
- westpa/oldtools/aframe/data_reader.py +560 -0
- westpa/oldtools/aframe/iter_range.py +200 -0
- westpa/oldtools/aframe/kinetics.py +117 -0
- westpa/oldtools/aframe/mcbs.py +146 -0
- westpa/oldtools/aframe/output.py +39 -0
- westpa/oldtools/aframe/plotting.py +90 -0
- westpa/oldtools/aframe/trajwalker.py +126 -0
- westpa/oldtools/aframe/transitions.py +469 -0
- westpa/oldtools/cmds/__init__.py +0 -0
- westpa/oldtools/cmds/w_ttimes.py +358 -0
- westpa/oldtools/files.py +34 -0
- westpa/oldtools/miscfn.py +23 -0
- westpa/oldtools/stats/__init__.py +4 -0
- westpa/oldtools/stats/accumulator.py +35 -0
- westpa/oldtools/stats/edfs.py +129 -0
- westpa/oldtools/stats/mcbs.py +89 -0
- westpa/tools/__init__.py +33 -0
- westpa/tools/binning.py +472 -0
- westpa/tools/core.py +340 -0
- westpa/tools/data_reader.py +159 -0
- westpa/tools/dtypes.py +31 -0
- westpa/tools/iter_range.py +198 -0
- westpa/tools/kinetics_tool.py +340 -0
- westpa/tools/plot.py +283 -0
- westpa/tools/progress.py +17 -0
- westpa/tools/selected_segs.py +154 -0
- westpa/tools/wipi.py +751 -0
- westpa/trajtree/__init__.py +4 -0
- westpa/trajtree/_trajtree.cpython-312-darwin.so +0 -0
- westpa/trajtree/trajtree.py +117 -0
- westpa/westext/__init__.py +0 -0
- westpa/westext/adaptvoronoi/__init__.py +3 -0
- westpa/westext/adaptvoronoi/adaptVor_driver.py +214 -0
- westpa/westext/hamsm_restarting/__init__.py +3 -0
- westpa/westext/hamsm_restarting/example_overrides.py +35 -0
- westpa/westext/hamsm_restarting/restart_driver.py +1165 -0
- westpa/westext/stringmethod/__init__.py +11 -0
- westpa/westext/stringmethod/fourier_fitting.py +69 -0
- westpa/westext/stringmethod/string_driver.py +253 -0
- westpa/westext/stringmethod/string_method.py +306 -0
- westpa/westext/weed/BinCluster.py +180 -0
- westpa/westext/weed/ProbAdjustEquil.py +100 -0
- westpa/westext/weed/UncertMath.py +247 -0
- westpa/westext/weed/__init__.py +10 -0
- westpa/westext/weed/weed_driver.py +182 -0
- westpa/westext/wess/ProbAdjust.py +101 -0
- westpa/westext/wess/__init__.py +6 -0
- westpa/westext/wess/wess_driver.py +207 -0
- westpa/work_managers/__init__.py +57 -0
- westpa/work_managers/core.py +396 -0
- westpa/work_managers/environment.py +134 -0
- westpa/work_managers/mpi.py +318 -0
- westpa/work_managers/processes.py +187 -0
- westpa/work_managers/serial.py +28 -0
- westpa/work_managers/threads.py +79 -0
- westpa/work_managers/zeromq/__init__.py +20 -0
- westpa/work_managers/zeromq/core.py +641 -0
- westpa/work_managers/zeromq/node.py +131 -0
- westpa/work_managers/zeromq/work_manager.py +526 -0
- westpa/work_managers/zeromq/worker.py +320 -0
- westpa-2022.10.dist-info/AUTHORS +22 -0
- westpa-2022.10.dist-info/LICENSE +21 -0
- westpa-2022.10.dist-info/METADATA +183 -0
- westpa-2022.10.dist-info/RECORD +150 -0
- westpa-2022.10.dist-info/WHEEL +5 -0
- westpa-2022.10.dist-info/entry_points.txt +29 -0
- westpa-2022.10.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
log = logging.getLogger(__name__)
|
|
4
|
+
|
|
5
|
+
import numpy
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class TrajWalker:
|
|
9
|
+
"""A class to perform analysis by walking the trajectory tree. A stack is used rather than recursion, or else
|
|
10
|
+
the highest number of iterations capable of being considered would be the same as the Python recursion limit.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
def __init__(self, data_reader, history_chunksize=100):
|
|
14
|
+
self.data_reader = data_reader
|
|
15
|
+
self.history_chunksize = history_chunksize
|
|
16
|
+
self.n_segs_visited = 0
|
|
17
|
+
|
|
18
|
+
# TrajTree.count_segs_in_range() is now DataReader.total_segs_in_range()
|
|
19
|
+
|
|
20
|
+
def trace_to_root(self, n_iter, seg_id):
|
|
21
|
+
'''Trace the given segment back to its starting point, returning a list of Segment
|
|
22
|
+
objects describing the entire trajectory.'''
|
|
23
|
+
|
|
24
|
+
segments = []
|
|
25
|
+
segment = self.data_reader.get_segments_by_id(n_iter, [seg_id])[0]
|
|
26
|
+
segments.append(segment)
|
|
27
|
+
while segment.p_parent_id >= 0:
|
|
28
|
+
segment = self.data_reader.get_segments_by_id(segment.n_iter - 1, [segment.p_parent_id])[0]
|
|
29
|
+
segments.append(segment)
|
|
30
|
+
return list(reversed(segments))
|
|
31
|
+
|
|
32
|
+
def get_trajectory_roots(self, first_iter, last_iter, include_pcoords=True):
|
|
33
|
+
'''Get segments which start new trajectories. If min_iter or max_iter is specified, restrict the
|
|
34
|
+
set of iterations within which the search is conducted.'''
|
|
35
|
+
|
|
36
|
+
roots = []
|
|
37
|
+
for n_iter in range(first_iter, last_iter + 1):
|
|
38
|
+
seg_ids = self.data_reader.get_created_seg_ids(n_iter)
|
|
39
|
+
segments = self.data_reader.get_segments_by_id(n_iter, seg_ids, include_pcoords=include_pcoords)
|
|
40
|
+
roots.extend(segments)
|
|
41
|
+
return roots
|
|
42
|
+
|
|
43
|
+
def get_initial_nodes(self, first_iter, last_iter, include_pcoords=True):
|
|
44
|
+
'''Get segments with which to begin a tree walk -- those alive or created within [first_iter,last_iter].'''
|
|
45
|
+
|
|
46
|
+
root_ids = dict()
|
|
47
|
+
|
|
48
|
+
# All trajectories alive or newly created in first_iter are initial nodes
|
|
49
|
+
root_ids[first_iter] = set(self.data_reader.get_seg_ids(first_iter))
|
|
50
|
+
|
|
51
|
+
# Find trajectories created in [first_iter, last_iter]
|
|
52
|
+
for n_iter in range(first_iter, last_iter + 1):
|
|
53
|
+
seg_ids = self.data_reader.get_created_seg_ids(n_iter)
|
|
54
|
+
try:
|
|
55
|
+
root_ids[n_iter].update(seg_ids)
|
|
56
|
+
except KeyError:
|
|
57
|
+
root_ids[n_iter] = set(seg_ids)
|
|
58
|
+
|
|
59
|
+
# Convert to Segment objects
|
|
60
|
+
segments = []
|
|
61
|
+
for n_iter, id_set in root_ids.items():
|
|
62
|
+
segments.extend(self.data_reader.get_segments_by_id(n_iter, id_set, include_pcoords=include_pcoords))
|
|
63
|
+
return segments
|
|
64
|
+
|
|
65
|
+
def trace_trajectories(
|
|
66
|
+
self, first_iter, last_iter, callable, include_pcoords=True, cargs=None, ckwargs=None, get_state=None, set_state=None
|
|
67
|
+
):
|
|
68
|
+
"""
|
|
69
|
+
Walk the trajectory tree depth-first, calling
|
|
70
|
+
``callable(segment, children, history, *cargs, **ckwargs)`` for each segment
|
|
71
|
+
visited. ``segment`` is the segment being visited, ``children`` is that
|
|
72
|
+
segment's children, ``history`` is the chain of segments leading
|
|
73
|
+
to ``segment`` (not including ``segment``). get_state and set_state are
|
|
74
|
+
used to record and reset, respectively, any state specific to
|
|
75
|
+
``callable`` when a new branch is traversed.
|
|
76
|
+
"""
|
|
77
|
+
|
|
78
|
+
cargs = cargs or tuple()
|
|
79
|
+
ckwargs = ckwargs or dict()
|
|
80
|
+
|
|
81
|
+
# Either both or neither of external state getter/setter required
|
|
82
|
+
if (get_state or set_state) and not (get_state and set_state):
|
|
83
|
+
raise ValueError('either both or neither of get_state/set_state must be specified')
|
|
84
|
+
|
|
85
|
+
# This will grow to contain the maximum trajectory length
|
|
86
|
+
history = numpy.empty((self.history_chunksize,), numpy.object_)
|
|
87
|
+
roots = self.get_initial_nodes(first_iter, last_iter, include_pcoords)
|
|
88
|
+
|
|
89
|
+
for root in roots:
|
|
90
|
+
children = self.data_reader.get_children(root, include_pcoords)
|
|
91
|
+
|
|
92
|
+
# Visit the root node of each tree unconditionally
|
|
93
|
+
callable(root, children, [], *cargs, **ckwargs)
|
|
94
|
+
self.n_segs_visited += 1
|
|
95
|
+
|
|
96
|
+
state_stack = [{'node': root, 'children': children, 'len_history': 0, 'ext': get_state() if get_state else None}]
|
|
97
|
+
|
|
98
|
+
# Walk the tree, depth-first
|
|
99
|
+
while state_stack:
|
|
100
|
+
state = state_stack.pop(-1)
|
|
101
|
+
|
|
102
|
+
node = state['node']
|
|
103
|
+
children = state['children']
|
|
104
|
+
len_history = state['len_history']
|
|
105
|
+
if set_state:
|
|
106
|
+
set_state(state['ext'])
|
|
107
|
+
|
|
108
|
+
# Descend as far as we can
|
|
109
|
+
while node.n_iter < last_iter and len(children):
|
|
110
|
+
# Save current state before descending
|
|
111
|
+
state_stack.append(
|
|
112
|
+
{'node': node, 'children': children, 'len_history': len_history, 'ext': get_state() if get_state else None}
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
# Add an item to the historical record
|
|
116
|
+
if len_history >= history.shape[0]:
|
|
117
|
+
history.resize((history.shape[0] + self.history_chunksize,))
|
|
118
|
+
history[len_history] = node
|
|
119
|
+
len_history += 1
|
|
120
|
+
|
|
121
|
+
node = children.pop(-1)
|
|
122
|
+
children = self.data_reader.get_children(node, include_pcoords)
|
|
123
|
+
|
|
124
|
+
# Visit the new node as we descend
|
|
125
|
+
callable(node, children, history[:len_history], *cargs, **ckwargs)
|
|
126
|
+
self.n_segs_visited += 1
|
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
import westpa
|
|
6
|
+
from westpa.oldtools.aframe import AnalysisMixin
|
|
7
|
+
from westpa.oldtools.aframe.trajwalker import TrajWalker
|
|
8
|
+
|
|
9
|
+
log = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TransitionEventAccumulator:
|
|
13
|
+
index_dtype = np.uintp
|
|
14
|
+
count_dtype = np.uint64
|
|
15
|
+
weight_dtype = np.float64
|
|
16
|
+
output_tdat_chunksize = 4096 # HDF5 chunksize for transition data (~300 KiB)
|
|
17
|
+
tdat_buffersize = 524288 # Internal buffer length (~38 MiB)
|
|
18
|
+
max_acc = 32768
|
|
19
|
+
|
|
20
|
+
def __init__(self, n_bins, output_group, calc_fpts=True):
|
|
21
|
+
self.calc_fpts = calc_fpts
|
|
22
|
+
self.n_bins = n_bins
|
|
23
|
+
self.iibins = np.arange(n_bins)
|
|
24
|
+
self.iibdisc = np.empty((n_bins,), np.bool_)
|
|
25
|
+
|
|
26
|
+
self.bin_index_dtype = np.min_scalar_type(n_bins)
|
|
27
|
+
|
|
28
|
+
self.tdat_dtype = np.dtype(
|
|
29
|
+
[
|
|
30
|
+
('traj', self.index_dtype),
|
|
31
|
+
('n_iter', self.index_dtype),
|
|
32
|
+
('timepoint', self.index_dtype),
|
|
33
|
+
('initial_bin', self.bin_index_dtype),
|
|
34
|
+
('final_bin', self.bin_index_dtype),
|
|
35
|
+
('initial_weight', self.weight_dtype),
|
|
36
|
+
('final_weight', self.weight_dtype),
|
|
37
|
+
('initial_bin_pop', self.weight_dtype),
|
|
38
|
+
('duration', self.index_dtype),
|
|
39
|
+
('fpt', self.index_dtype),
|
|
40
|
+
]
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# HDF5 group in which to store results
|
|
44
|
+
self.output_group = output_group
|
|
45
|
+
self.tdat_buffer = np.empty((self.tdat_buffersize,), dtype=self.tdat_dtype)
|
|
46
|
+
self.tdat_buffer_offset = 0
|
|
47
|
+
self.output_tdat_offset = 0
|
|
48
|
+
self.output_tdat_ds = None
|
|
49
|
+
|
|
50
|
+
# Accumulators/counters
|
|
51
|
+
self.n_trans = None # shape (n_bins,n_bins)
|
|
52
|
+
|
|
53
|
+
# Time points and per-timepoint data
|
|
54
|
+
self.last_exit = None # (n_bins,)
|
|
55
|
+
self.last_entry = None # (n_bins,)
|
|
56
|
+
self.last_completion = None # (n_bins,n_bins)
|
|
57
|
+
self.weight_last_exit = None # (n_bins)
|
|
58
|
+
self.bin_pops_last_exit = None # (n_bins,)
|
|
59
|
+
|
|
60
|
+
# Analysis continuation information
|
|
61
|
+
self.timepoint = None # current time index for separate calls on same trajectory
|
|
62
|
+
self.last_bin = None # last region occupied, for separate calls on same trajectory
|
|
63
|
+
self.last_bin_pop = None # total weight in self.last_region at end of last processing step
|
|
64
|
+
|
|
65
|
+
self.clear()
|
|
66
|
+
|
|
67
|
+
def clear(self):
|
|
68
|
+
self.clear_state()
|
|
69
|
+
self.n_trans = np.zeros((self.n_bins, self.n_bins), self.count_dtype)
|
|
70
|
+
self.tdat_buffer = np.empty((self.tdat_buffersize,), dtype=self.tdat_dtype)
|
|
71
|
+
self.tdat_buffer_offset = 0
|
|
72
|
+
self.output_tdat_offset = 0
|
|
73
|
+
self.output_tdat_ds = None
|
|
74
|
+
|
|
75
|
+
def clear_state(self):
|
|
76
|
+
self.last_exit = np.zeros((self.n_bins,), self.index_dtype)
|
|
77
|
+
self.last_entry = np.zeros((self.n_bins,), self.index_dtype)
|
|
78
|
+
self.last_completion = np.zeros((self.n_bins, self.n_bins), self.index_dtype)
|
|
79
|
+
self.weight_last_exit = np.zeros((self.n_bins,), self.weight_dtype)
|
|
80
|
+
self.bin_pops_last_exit = np.zeros((self.n_bins,), self.weight_dtype)
|
|
81
|
+
self.timepoint = 0
|
|
82
|
+
self.last_bin = None
|
|
83
|
+
self.last_bin_pop = None
|
|
84
|
+
|
|
85
|
+
def get_state(self):
|
|
86
|
+
return {
|
|
87
|
+
'last_entry': self.last_entry.copy(),
|
|
88
|
+
'last_exit': self.last_exit.copy(),
|
|
89
|
+
'last_completion': self.last_completion.copy(),
|
|
90
|
+
'weight_last_exit': self.weight_last_exit.copy(),
|
|
91
|
+
'bin_pops_last_exit': self.bin_pops_last_exit.copy(),
|
|
92
|
+
'timepoint': self.timepoint,
|
|
93
|
+
'last_bin': self.last_bin,
|
|
94
|
+
'last_bin_pop': self.last_bin_pop,
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
def set_state(self, state_dict):
|
|
98
|
+
self.last_entry = state_dict['last_entry']
|
|
99
|
+
self.last_exit = state_dict['last_exit']
|
|
100
|
+
self.last_completion = state_dict['last_completion']
|
|
101
|
+
self.weight_last_exit = state_dict['weight_last_exit']
|
|
102
|
+
self.bin_pops_last_exit = state_dict['bin_pops_last_exit']
|
|
103
|
+
self.timepoint = state_dict['timepoint']
|
|
104
|
+
self.last_bin = state_dict['last_bin']
|
|
105
|
+
self.last_bin_pop = state_dict['last_bin_pop']
|
|
106
|
+
|
|
107
|
+
def record_transition_data(self, tdat):
|
|
108
|
+
"""Update running statistics and write transition data to HDF5 (with buffering)"""
|
|
109
|
+
|
|
110
|
+
# Write out accumulated transition data
|
|
111
|
+
if self.output_tdat_ds is None:
|
|
112
|
+
# Create dataset
|
|
113
|
+
try:
|
|
114
|
+
del self.output_group['transitions']
|
|
115
|
+
except KeyError:
|
|
116
|
+
pass
|
|
117
|
+
|
|
118
|
+
self.output_tdat_ds = self.output_group.create_dataset(
|
|
119
|
+
'transitions',
|
|
120
|
+
shape=(1,),
|
|
121
|
+
dtype=self.tdat_dtype,
|
|
122
|
+
maxshape=(None,),
|
|
123
|
+
chunks=(self.output_tdat_chunksize,),
|
|
124
|
+
compression='gzip',
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
# If the amount of data to write exceeds our remaining buffer space, flush the buffer, then
|
|
128
|
+
# write data directly to HDF5, otherwise just add to the buffer and wait for the last flush
|
|
129
|
+
if len(tdat) + self.tdat_buffer_offset > self.tdat_buffersize:
|
|
130
|
+
self.flush_transition_data()
|
|
131
|
+
ub = self.output_tdat_offset + len(tdat)
|
|
132
|
+
self.output_tdat_ds.resize((ub,))
|
|
133
|
+
self.output_tdat_ds[self.output_tdat_offset : ub] = tdat
|
|
134
|
+
self.output_tdat_offset += len(tdat)
|
|
135
|
+
else:
|
|
136
|
+
self.tdat_buffer[self.tdat_buffer_offset : (self.tdat_buffer_offset + len(tdat))] = tdat
|
|
137
|
+
self.tdat_buffer_offset += len(tdat)
|
|
138
|
+
|
|
139
|
+
def flush_transition_data(self):
|
|
140
|
+
"""Flush any unwritten output that may be present"""
|
|
141
|
+
if self.output_tdat_ds is None:
|
|
142
|
+
return
|
|
143
|
+
|
|
144
|
+
# self.tdat_buffer_offset is the number of items in the buffer
|
|
145
|
+
nbuf = self.tdat_buffer_offset
|
|
146
|
+
if nbuf == 0:
|
|
147
|
+
return
|
|
148
|
+
ub = nbuf + self.output_tdat_offset
|
|
149
|
+
if ub > self.output_tdat_ds.len():
|
|
150
|
+
# Resize dataset to fit data
|
|
151
|
+
self.output_tdat_ds.resize((ub,))
|
|
152
|
+
|
|
153
|
+
self.output_tdat_ds[self.output_tdat_offset : ub] = self.tdat_buffer[:nbuf]
|
|
154
|
+
self.output_tdat_offset += nbuf
|
|
155
|
+
self.tdat_buffer_offset = 0
|
|
156
|
+
|
|
157
|
+
def start_accumulation(self, assignments, weights, bin_pops, traj=0, n_iter=0):
|
|
158
|
+
self.clear_state()
|
|
159
|
+
timepoints = np.arange(len(assignments))
|
|
160
|
+
self._accumulate_transitions(timepoints, assignments, weights, bin_pops, traj, n_iter)
|
|
161
|
+
|
|
162
|
+
def continue_accumulation(self, assignments, weights, bin_pops, traj=0, n_iter=0):
|
|
163
|
+
aug_assign = np.empty((len(assignments) + 1,), assignments.dtype)
|
|
164
|
+
aug_assign[0] = self.last_bin
|
|
165
|
+
aug_assign[1:] = assignments
|
|
166
|
+
|
|
167
|
+
aug_weights = np.empty((len(weights) + 1,), self.weight_dtype)
|
|
168
|
+
aug_weights[0] = 0
|
|
169
|
+
aug_weights[1:] = weights
|
|
170
|
+
|
|
171
|
+
aug_pops = np.empty((len(bin_pops) + 1, len(bin_pops[0])), self.weight_dtype)
|
|
172
|
+
aug_pops[0, :] = 0
|
|
173
|
+
aug_pops[0, self.last_bin] = self.last_bin_pop
|
|
174
|
+
aug_pops[1:] = bin_pops
|
|
175
|
+
|
|
176
|
+
timepoints = np.arange(self.timepoint, self.timepoint + len(aug_assign))
|
|
177
|
+
self._accumulate_transitions(timepoints, aug_assign, aug_weights, aug_pops, traj, n_iter)
|
|
178
|
+
|
|
179
|
+
def _accumulate_transitions(self, timepoints, assignments, weights, bin_pops, traj, n_iter):
|
|
180
|
+
tdat = []
|
|
181
|
+
|
|
182
|
+
assignments_from_1 = assignments[1:]
|
|
183
|
+
assignments_to_1 = assignments[:-1]
|
|
184
|
+
|
|
185
|
+
calc_fpts = self.calc_fpts
|
|
186
|
+
|
|
187
|
+
trans_occur = assignments_from_1 != assignments_to_1
|
|
188
|
+
trans_ibin = assignments_to_1[trans_occur]
|
|
189
|
+
trans_fbin = assignments_from_1[trans_occur]
|
|
190
|
+
trans_timepoints = timepoints[1:][trans_occur]
|
|
191
|
+
trans_weights = weights[1:][trans_occur] # arrival weights
|
|
192
|
+
trans_ibinpops = bin_pops[:-1][trans_occur]
|
|
193
|
+
|
|
194
|
+
last_exit = self.last_exit
|
|
195
|
+
last_entry = self.last_entry
|
|
196
|
+
last_completion = self.last_completion
|
|
197
|
+
bin_pops_last_exit = self.bin_pops_last_exit
|
|
198
|
+
weight_last_exit = self.weight_last_exit
|
|
199
|
+
n_trans = self.n_trans
|
|
200
|
+
iibdisc = self.iibdisc
|
|
201
|
+
iibins = self.iibins
|
|
202
|
+
tdat_maxlen = self.max_acc
|
|
203
|
+
for trans_ti, weight, ibin, fbin, ibinpops in zip(trans_timepoints, trans_weights, trans_ibin, trans_fbin, trans_ibinpops):
|
|
204
|
+
# Record this crossing event's data
|
|
205
|
+
bin_pops_last_exit[ibin] = ibinpops[ibin]
|
|
206
|
+
last_exit[ibin] = trans_ti
|
|
207
|
+
last_entry[fbin] = trans_ti
|
|
208
|
+
weight_last_exit[ibin] = weight
|
|
209
|
+
|
|
210
|
+
# See what other transitions this crossing event completes
|
|
211
|
+
iibdisc[:] = last_exit > 0
|
|
212
|
+
iibdisc &= last_entry > last_completion[:, fbin]
|
|
213
|
+
|
|
214
|
+
# Calculate event durations, etc for each transition generated by this crossing event
|
|
215
|
+
durations = -last_exit + trans_ti + 1 # = time now - time of exit from initial bin
|
|
216
|
+
if calc_fpts:
|
|
217
|
+
fpts = -last_completion[fbin, :] + trans_ti # = time now - time of last final->initial transition
|
|
218
|
+
fpts[last_completion[:, fbin] == 0] = 0
|
|
219
|
+
|
|
220
|
+
for iibin in iibins[iibdisc]:
|
|
221
|
+
tdat.append(
|
|
222
|
+
(
|
|
223
|
+
traj,
|
|
224
|
+
n_iter,
|
|
225
|
+
trans_ti,
|
|
226
|
+
iibin,
|
|
227
|
+
fbin,
|
|
228
|
+
weight_last_exit[iibin],
|
|
229
|
+
weight,
|
|
230
|
+
bin_pops_last_exit[iibin],
|
|
231
|
+
durations[iibin],
|
|
232
|
+
fpts[iibin],
|
|
233
|
+
)
|
|
234
|
+
)
|
|
235
|
+
else:
|
|
236
|
+
for iibin in iibins[iibdisc]:
|
|
237
|
+
tdat.append(
|
|
238
|
+
(
|
|
239
|
+
traj,
|
|
240
|
+
n_iter,
|
|
241
|
+
trans_ti,
|
|
242
|
+
iibin,
|
|
243
|
+
fbin,
|
|
244
|
+
weight_last_exit[iibin],
|
|
245
|
+
weight,
|
|
246
|
+
bin_pops_last_exit[iibin],
|
|
247
|
+
durations[iibin],
|
|
248
|
+
0,
|
|
249
|
+
)
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
# Update tracking and statistics
|
|
253
|
+
last_completion[iibdisc, fbin] = trans_ti
|
|
254
|
+
n_trans[iibdisc, fbin] += 1
|
|
255
|
+
|
|
256
|
+
if len(tdat) > tdat_maxlen:
|
|
257
|
+
self.record_transition_data(tdat)
|
|
258
|
+
|
|
259
|
+
self.record_transition_data(tdat)
|
|
260
|
+
self.timepoint = timepoints[-1]
|
|
261
|
+
self.last_bin = assignments[-1]
|
|
262
|
+
self.last_bin_pop = bin_pops[-1, assignments[-1]]
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
class TransitionAnalysisMixin(AnalysisMixin):
|
|
266
|
+
def __init__(self):
|
|
267
|
+
super().__init__()
|
|
268
|
+
self.discard_transition_data = False
|
|
269
|
+
self.calc_fpts = False
|
|
270
|
+
self.trans_h5gname = 'transitions'
|
|
271
|
+
self.trans_h5group = None
|
|
272
|
+
self.__transitions_ds = None
|
|
273
|
+
self.n_trajs = 0
|
|
274
|
+
|
|
275
|
+
def require_transitions_group(self):
|
|
276
|
+
if self.trans_h5group is None:
|
|
277
|
+
self.trans_h5group = self.anal_h5file.require_group(self.trans_h5gname)
|
|
278
|
+
return self.trans_h5group
|
|
279
|
+
|
|
280
|
+
def delete_transitions_group(self):
|
|
281
|
+
self.trans_h5group = None
|
|
282
|
+
del self.anal_h5file[self.trans_h5gname]
|
|
283
|
+
|
|
284
|
+
def get_transitions_ds(self):
|
|
285
|
+
if self.__transitions_ds is not None:
|
|
286
|
+
return self.__transitions_ds
|
|
287
|
+
else:
|
|
288
|
+
self.__transitions_ds = self.trans_h5group['transitions']
|
|
289
|
+
return self.__transitions_ds
|
|
290
|
+
|
|
291
|
+
def add_args(self, parser, upcall=True):
|
|
292
|
+
if upcall:
|
|
293
|
+
try:
|
|
294
|
+
upfunc = super().add_args
|
|
295
|
+
except AttributeError:
|
|
296
|
+
pass
|
|
297
|
+
else:
|
|
298
|
+
upfunc(parser)
|
|
299
|
+
|
|
300
|
+
group = parser.add_argument_group('transition analysis options')
|
|
301
|
+
group.add_argument(
|
|
302
|
+
'--discard-transition-data',
|
|
303
|
+
dest='discard_transition_data',
|
|
304
|
+
action='store_true',
|
|
305
|
+
help='''Discard any existing transition data stored in the analysis HDF5 file.''',
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
def process_args(self, args, upcall=True):
|
|
309
|
+
self.discard_transition_data = args.discard_transition_data
|
|
310
|
+
|
|
311
|
+
if upcall:
|
|
312
|
+
try:
|
|
313
|
+
upfunc = super().process_args
|
|
314
|
+
except AttributeError:
|
|
315
|
+
pass
|
|
316
|
+
else:
|
|
317
|
+
upfunc(args)
|
|
318
|
+
|
|
319
|
+
def require_transitions(self):
|
|
320
|
+
self.require_bin_assignments()
|
|
321
|
+
self.require_transitions_group()
|
|
322
|
+
do_trans = False
|
|
323
|
+
if self.discard_transition_data:
|
|
324
|
+
westpa.rc.pstatus('Discarding existing transition data.')
|
|
325
|
+
do_trans = True
|
|
326
|
+
elif not self.check_data_binhash(self.trans_h5group):
|
|
327
|
+
westpa.rc.pstatus('Bin definitions have changed; deleting existing transition data.')
|
|
328
|
+
do_trans = True
|
|
329
|
+
elif 'transitions' in self.trans_h5group and not self.check_data_iter_range_least(self.trans_h5group):
|
|
330
|
+
westpa.rc.pstatus('Existing transition data is for different first/last iterations; deleting.')
|
|
331
|
+
do_trans = True
|
|
332
|
+
|
|
333
|
+
if do_trans:
|
|
334
|
+
self.delete_transitions_group()
|
|
335
|
+
self.find_transitions()
|
|
336
|
+
|
|
337
|
+
def find_transitions(self):
|
|
338
|
+
westpa.rc.pstatus('Finding transitions...')
|
|
339
|
+
output_group = self.require_transitions_group()
|
|
340
|
+
|
|
341
|
+
self.n_segs_visited = 0
|
|
342
|
+
self.n_total_segs = self.total_segs_in_range(self.first_iter, self.last_iter)
|
|
343
|
+
self.accumulator = TransitionEventAccumulator(self.n_bins, output_group, calc_fpts=self.calc_fpts)
|
|
344
|
+
self.bin_assignments = self.get_bin_assignments(self.first_iter, self.last_iter)
|
|
345
|
+
self.bin_populations = self.get_bin_populations(self.first_iter, self.last_iter)
|
|
346
|
+
|
|
347
|
+
walker = TrajWalker(data_reader=self)
|
|
348
|
+
|
|
349
|
+
self.__pcoord_len = self.get_pcoord_len(self.first_iter)
|
|
350
|
+
self.__quiet_mode = westpa.rc.quiet_mode
|
|
351
|
+
|
|
352
|
+
walker.trace_trajectories(
|
|
353
|
+
self.first_iter,
|
|
354
|
+
self.last_iter,
|
|
355
|
+
callable=self._segment_callback,
|
|
356
|
+
include_pcoords=False,
|
|
357
|
+
get_state=self.accumulator.get_state,
|
|
358
|
+
set_state=self.accumulator.set_state,
|
|
359
|
+
)
|
|
360
|
+
self.accumulator.flush_transition_data()
|
|
361
|
+
try:
|
|
362
|
+
del output_group['n_trans']
|
|
363
|
+
except KeyError:
|
|
364
|
+
pass
|
|
365
|
+
output_group['n_trans'] = self.accumulator.n_trans
|
|
366
|
+
|
|
367
|
+
for h5object in (output_group, output_group['n_trans'], output_group['transitions']):
|
|
368
|
+
self.record_data_iter_range(h5object)
|
|
369
|
+
self.record_data_binhash(h5object)
|
|
370
|
+
h5object.attrs['n_trajs'] = self.n_trajs
|
|
371
|
+
h5object.attrs['n_segs'] = self.n_segs_visited
|
|
372
|
+
|
|
373
|
+
self.accumulator.clear()
|
|
374
|
+
westpa.rc.pstatus()
|
|
375
|
+
|
|
376
|
+
def _segment_callback(self, segment, children, history):
|
|
377
|
+
iiter = segment.n_iter - self.first_iter
|
|
378
|
+
seg_id = segment.seg_id
|
|
379
|
+
weights = np.empty((self.__pcoord_len,), np.float64)
|
|
380
|
+
weights[:] = segment.weight
|
|
381
|
+
bin_pops = self.bin_populations[iiter, :, :]
|
|
382
|
+
|
|
383
|
+
if len(history) == 0:
|
|
384
|
+
# New trajectory
|
|
385
|
+
self.n_trajs += 1
|
|
386
|
+
self.accumulator.start_accumulation(
|
|
387
|
+
self.bin_assignments[iiter, seg_id, :], weights, bin_pops, traj=self.n_trajs, n_iter=segment.n_iter
|
|
388
|
+
)
|
|
389
|
+
else:
|
|
390
|
+
# Continuing trajectory
|
|
391
|
+
self.accumulator.continue_accumulation(
|
|
392
|
+
self.bin_assignments[iiter, seg_id, :], weights, bin_pops, traj=self.n_trajs, n_iter=segment.n_iter
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
self.n_segs_visited += 1
|
|
396
|
+
|
|
397
|
+
if not self.__quiet_mode and (self.n_segs_visited % 1000 == 0 or self.n_segs_visited == self.n_total_segs):
|
|
398
|
+
pct_visited = self.n_segs_visited / self.n_total_segs * 100
|
|
399
|
+
westpa.rc.pstatus(
|
|
400
|
+
'\r {:d} of {:d} segments ({:.1f}%) analyzed ({:d} independent trajectories)'.format(
|
|
401
|
+
int(self.n_segs_visited), int(self.n_total_segs), float(pct_visited), self.n_trajs
|
|
402
|
+
),
|
|
403
|
+
end='',
|
|
404
|
+
)
|
|
405
|
+
westpa.rc.pflush()
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
class BFTransitionAnalysisMixin(TransitionAnalysisMixin):
|
|
409
|
+
def require_transitions(self):
|
|
410
|
+
self.require_bin_assignments()
|
|
411
|
+
self.require_transitions_group()
|
|
412
|
+
do_trans = False
|
|
413
|
+
if self.discard_transition_data:
|
|
414
|
+
westpa.rc.pstatus('Discarding existing transition data.')
|
|
415
|
+
do_trans = True
|
|
416
|
+
elif not self.check_data_binhash(self.trans_h5group):
|
|
417
|
+
westpa.rc.pstatus('Bin definitions have changed; deleting existing transition data.')
|
|
418
|
+
do_trans = True
|
|
419
|
+
|
|
420
|
+
if do_trans:
|
|
421
|
+
self.delete_transitions_group()
|
|
422
|
+
self.find_transitions()
|
|
423
|
+
|
|
424
|
+
def find_transitions(self, chunksize=65536):
|
|
425
|
+
self.require_bf_h5file()
|
|
426
|
+
self.require_binning_group()
|
|
427
|
+
westpa.rc.pstatus('Finding transitions...')
|
|
428
|
+
output_group = self.require_analysis_group('transitions')
|
|
429
|
+
|
|
430
|
+
self.accumulator = TransitionEventAccumulator(self.n_bins, output_group, calc_fpts=True)
|
|
431
|
+
assignments_ds = self.binning_h5group['bin_assignments']
|
|
432
|
+
|
|
433
|
+
max_nrows = assignments_ds.len()
|
|
434
|
+
maxwidth_nrows = len(str(max_nrows))
|
|
435
|
+
|
|
436
|
+
for traj_id in range(self.get_n_trajs()):
|
|
437
|
+
nrows = self.get_traj_len(traj_id)
|
|
438
|
+
for istart in range(0, nrows, chunksize):
|
|
439
|
+
iend = min(istart + chunksize, nrows)
|
|
440
|
+
assignments = assignments_ds[traj_id, istart:iend]
|
|
441
|
+
weights = np.ones((len(assignments),))
|
|
442
|
+
binpops = np.ones((len(assignments), self.n_bins))
|
|
443
|
+
|
|
444
|
+
if istart == 0:
|
|
445
|
+
self.accumulator.start_accumulation(assignments, weights, binpops, traj=traj_id)
|
|
446
|
+
else:
|
|
447
|
+
self.accumulator.continue_accumulation(assignments, weights, binpops, traj=traj_id)
|
|
448
|
+
|
|
449
|
+
westpa.rc.pstatus(
|
|
450
|
+
'\r Trajectory {:d}: {:{mwnr}d}/{:<{mwnr}d} ({:.2f}%)'.format(
|
|
451
|
+
int(traj_id), int(iend), int(nrows), iend / nrows * 100, mwnr=maxwidth_nrows
|
|
452
|
+
),
|
|
453
|
+
end='',
|
|
454
|
+
)
|
|
455
|
+
westpa.rc.pflush()
|
|
456
|
+
self.accumulator.flush_transition_data()
|
|
457
|
+
del assignments, weights, binpops
|
|
458
|
+
westpa.rc.pstatus()
|
|
459
|
+
|
|
460
|
+
try:
|
|
461
|
+
del output_group['n_trans']
|
|
462
|
+
except KeyError:
|
|
463
|
+
pass
|
|
464
|
+
output_group['n_trans'] = self.accumulator.n_trans
|
|
465
|
+
|
|
466
|
+
for h5object in (output_group, output_group['n_trans'], output_group['transitions']):
|
|
467
|
+
self.record_data_binhash(h5object)
|
|
468
|
+
|
|
469
|
+
self.accumulator.clear()
|
|
File without changes
|