westpa 2022.12__cp312-cp312-macosx_10_13_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of westpa might be problematic. Click here for more details.

Files changed (149) hide show
  1. westpa/__init__.py +14 -0
  2. westpa/_version.py +21 -0
  3. westpa/analysis/__init__.py +5 -0
  4. westpa/analysis/core.py +746 -0
  5. westpa/analysis/statistics.py +27 -0
  6. westpa/analysis/trajectories.py +360 -0
  7. westpa/cli/__init__.py +0 -0
  8. westpa/cli/core/__init__.py +0 -0
  9. westpa/cli/core/w_fork.py +152 -0
  10. westpa/cli/core/w_init.py +230 -0
  11. westpa/cli/core/w_run.py +77 -0
  12. westpa/cli/core/w_states.py +212 -0
  13. westpa/cli/core/w_succ.py +99 -0
  14. westpa/cli/core/w_truncate.py +68 -0
  15. westpa/cli/tools/__init__.py +0 -0
  16. westpa/cli/tools/ploterr.py +506 -0
  17. westpa/cli/tools/plothist.py +706 -0
  18. westpa/cli/tools/w_assign.py +596 -0
  19. westpa/cli/tools/w_bins.py +166 -0
  20. westpa/cli/tools/w_crawl.py +119 -0
  21. westpa/cli/tools/w_direct.py +547 -0
  22. westpa/cli/tools/w_dumpsegs.py +94 -0
  23. westpa/cli/tools/w_eddist.py +506 -0
  24. westpa/cli/tools/w_fluxanl.py +376 -0
  25. westpa/cli/tools/w_ipa.py +833 -0
  26. westpa/cli/tools/w_kinavg.py +127 -0
  27. westpa/cli/tools/w_kinetics.py +96 -0
  28. westpa/cli/tools/w_multi_west.py +414 -0
  29. westpa/cli/tools/w_ntop.py +213 -0
  30. westpa/cli/tools/w_pdist.py +515 -0
  31. westpa/cli/tools/w_postanalysis_matrix.py +82 -0
  32. westpa/cli/tools/w_postanalysis_reweight.py +53 -0
  33. westpa/cli/tools/w_red.py +491 -0
  34. westpa/cli/tools/w_reweight.py +780 -0
  35. westpa/cli/tools/w_select.py +226 -0
  36. westpa/cli/tools/w_stateprobs.py +111 -0
  37. westpa/cli/tools/w_trace.py +599 -0
  38. westpa/core/__init__.py +0 -0
  39. westpa/core/_rc.py +673 -0
  40. westpa/core/binning/__init__.py +55 -0
  41. westpa/core/binning/_assign.cpython-312-darwin.so +0 -0
  42. westpa/core/binning/assign.py +455 -0
  43. westpa/core/binning/binless.py +96 -0
  44. westpa/core/binning/binless_driver.py +54 -0
  45. westpa/core/binning/binless_manager.py +190 -0
  46. westpa/core/binning/bins.py +47 -0
  47. westpa/core/binning/mab.py +506 -0
  48. westpa/core/binning/mab_driver.py +54 -0
  49. westpa/core/binning/mab_manager.py +198 -0
  50. westpa/core/data_manager.py +1694 -0
  51. westpa/core/extloader.py +74 -0
  52. westpa/core/h5io.py +995 -0
  53. westpa/core/kinetics/__init__.py +24 -0
  54. westpa/core/kinetics/_kinetics.cpython-312-darwin.so +0 -0
  55. westpa/core/kinetics/events.py +147 -0
  56. westpa/core/kinetics/matrates.py +156 -0
  57. westpa/core/kinetics/rate_averaging.py +266 -0
  58. westpa/core/progress.py +218 -0
  59. westpa/core/propagators/__init__.py +54 -0
  60. westpa/core/propagators/executable.py +719 -0
  61. westpa/core/reweight/__init__.py +14 -0
  62. westpa/core/reweight/_reweight.cpython-312-darwin.so +0 -0
  63. westpa/core/reweight/matrix.py +126 -0
  64. westpa/core/segment.py +119 -0
  65. westpa/core/sim_manager.py +835 -0
  66. westpa/core/states.py +359 -0
  67. westpa/core/systems.py +93 -0
  68. westpa/core/textio.py +74 -0
  69. westpa/core/trajectory.py +330 -0
  70. westpa/core/we_driver.py +910 -0
  71. westpa/core/wm_ops.py +43 -0
  72. westpa/core/yamlcfg.py +391 -0
  73. westpa/fasthist/__init__.py +34 -0
  74. westpa/fasthist/_fasthist.cpython-312-darwin.so +0 -0
  75. westpa/mclib/__init__.py +271 -0
  76. westpa/mclib/__main__.py +28 -0
  77. westpa/mclib/_mclib.cpython-312-darwin.so +0 -0
  78. westpa/oldtools/__init__.py +4 -0
  79. westpa/oldtools/aframe/__init__.py +35 -0
  80. westpa/oldtools/aframe/atool.py +75 -0
  81. westpa/oldtools/aframe/base_mixin.py +26 -0
  82. westpa/oldtools/aframe/binning.py +178 -0
  83. westpa/oldtools/aframe/data_reader.py +560 -0
  84. westpa/oldtools/aframe/iter_range.py +200 -0
  85. westpa/oldtools/aframe/kinetics.py +117 -0
  86. westpa/oldtools/aframe/mcbs.py +153 -0
  87. westpa/oldtools/aframe/output.py +39 -0
  88. westpa/oldtools/aframe/plotting.py +90 -0
  89. westpa/oldtools/aframe/trajwalker.py +126 -0
  90. westpa/oldtools/aframe/transitions.py +469 -0
  91. westpa/oldtools/cmds/__init__.py +0 -0
  92. westpa/oldtools/cmds/w_ttimes.py +361 -0
  93. westpa/oldtools/files.py +34 -0
  94. westpa/oldtools/miscfn.py +23 -0
  95. westpa/oldtools/stats/__init__.py +4 -0
  96. westpa/oldtools/stats/accumulator.py +35 -0
  97. westpa/oldtools/stats/edfs.py +129 -0
  98. westpa/oldtools/stats/mcbs.py +96 -0
  99. westpa/tools/__init__.py +33 -0
  100. westpa/tools/binning.py +472 -0
  101. westpa/tools/core.py +340 -0
  102. westpa/tools/data_reader.py +159 -0
  103. westpa/tools/dtypes.py +31 -0
  104. westpa/tools/iter_range.py +198 -0
  105. westpa/tools/kinetics_tool.py +340 -0
  106. westpa/tools/plot.py +283 -0
  107. westpa/tools/progress.py +17 -0
  108. westpa/tools/selected_segs.py +154 -0
  109. westpa/tools/wipi.py +751 -0
  110. westpa/trajtree/__init__.py +4 -0
  111. westpa/trajtree/_trajtree.cpython-312-darwin.so +0 -0
  112. westpa/trajtree/trajtree.py +117 -0
  113. westpa/westext/__init__.py +0 -0
  114. westpa/westext/adaptvoronoi/__init__.py +3 -0
  115. westpa/westext/adaptvoronoi/adaptVor_driver.py +214 -0
  116. westpa/westext/hamsm_restarting/__init__.py +3 -0
  117. westpa/westext/hamsm_restarting/example_overrides.py +35 -0
  118. westpa/westext/hamsm_restarting/restart_driver.py +1165 -0
  119. westpa/westext/stringmethod/__init__.py +11 -0
  120. westpa/westext/stringmethod/fourier_fitting.py +69 -0
  121. westpa/westext/stringmethod/string_driver.py +253 -0
  122. westpa/westext/stringmethod/string_method.py +306 -0
  123. westpa/westext/weed/BinCluster.py +180 -0
  124. westpa/westext/weed/ProbAdjustEquil.py +100 -0
  125. westpa/westext/weed/UncertMath.py +247 -0
  126. westpa/westext/weed/__init__.py +10 -0
  127. westpa/westext/weed/weed_driver.py +192 -0
  128. westpa/westext/wess/ProbAdjust.py +101 -0
  129. westpa/westext/wess/__init__.py +6 -0
  130. westpa/westext/wess/wess_driver.py +217 -0
  131. westpa/work_managers/__init__.py +57 -0
  132. westpa/work_managers/core.py +396 -0
  133. westpa/work_managers/environment.py +134 -0
  134. westpa/work_managers/mpi.py +318 -0
  135. westpa/work_managers/processes.py +187 -0
  136. westpa/work_managers/serial.py +28 -0
  137. westpa/work_managers/threads.py +79 -0
  138. westpa/work_managers/zeromq/__init__.py +20 -0
  139. westpa/work_managers/zeromq/core.py +641 -0
  140. westpa/work_managers/zeromq/node.py +131 -0
  141. westpa/work_managers/zeromq/work_manager.py +526 -0
  142. westpa/work_managers/zeromq/worker.py +320 -0
  143. westpa-2022.12.dist-info/AUTHORS +22 -0
  144. westpa-2022.12.dist-info/LICENSE +21 -0
  145. westpa-2022.12.dist-info/METADATA +193 -0
  146. westpa-2022.12.dist-info/RECORD +149 -0
  147. westpa-2022.12.dist-info/WHEEL +6 -0
  148. westpa-2022.12.dist-info/entry_points.txt +29 -0
  149. westpa-2022.12.dist-info/top_level.txt +1 -0
@@ -0,0 +1,198 @@
1
+ import logging
2
+
3
+ import numpy as np
4
+
5
+ import westpa
6
+ from westpa.tools.core import WESTToolComponent
7
+ from westpa.core import h5io
8
+
9
+ log = logging.getLogger(__name__)
10
+
11
+
12
+ class IterRangeSelection(WESTToolComponent):
13
+ '''Select and record limits on iterations used in analysis and/or reporting.
14
+ This class provides both the user-facing command-line options and parsing, and
15
+ the application-side API for recording limits in HDF5.
16
+
17
+ HDF5 datasets calculated based on a restricted set of iterations should be tagged
18
+ with the following attributes:
19
+
20
+ ``first_iter``
21
+ The first iteration included in the calculation.
22
+
23
+ ``last_iter``
24
+ One past the last iteration included in the calculation.
25
+
26
+ ``iter_step``
27
+ Blocking or sampling period for iterations included in the calculation.
28
+ '''
29
+
30
+ def __init__(self, data_manager=None):
31
+ super().__init__()
32
+
33
+ self.data_manager = data_manager
34
+
35
+ # First iteration on which to perform analysis/reporting
36
+ self.iter_start = None
37
+
38
+ # One past the last iteration on which to perform analysis/reporting
39
+ self.iter_stop = None
40
+
41
+ # Step
42
+ self.iter_step = None
43
+
44
+ self.iter_count = None
45
+
46
+ self.include_args.update({'iter_start': True, 'iter_stop': True, 'iter_step': False})
47
+
48
+ def add_args(self, parser):
49
+ group = parser.add_argument_group('iteration range')
50
+
51
+ if self.include_args['iter_start']:
52
+ group.add_argument(
53
+ '--first-iter',
54
+ dest='first_iter',
55
+ type=int,
56
+ metavar='N_ITER',
57
+ default=1,
58
+ help='''Begin analysis at iteration N_ITER (default: %(default)d).''',
59
+ )
60
+ if self.include_args['iter_stop']:
61
+ group.add_argument(
62
+ '--last-iter',
63
+ dest='last_iter',
64
+ type=int,
65
+ metavar='N_ITER',
66
+ help='''Conclude analysis with N_ITER, inclusive (default: last completed iteration).''',
67
+ )
68
+ if self.include_args['iter_step']:
69
+ group.add_argument(
70
+ '--step-iter', dest='step_iter', type=int, metavar='STEP', help='''Analyze/report in blocks of STEP iterations.'''
71
+ )
72
+
73
+ def process_args(self, args, override_iter_start=None, override_iter_stop=None, default_iter_step=1):
74
+ if override_iter_start is not None:
75
+ self.iter_start = override_iter_start
76
+ elif args.first_iter is not None:
77
+ self.iter_start = args.first_iter
78
+ else:
79
+ self.iter_start = 1
80
+
81
+ if override_iter_stop is not None:
82
+ self.iter_stop = override_iter_stop
83
+ elif args.last_iter is not None:
84
+ self.iter_stop = args.last_iter + 1
85
+ else:
86
+ self.iter_stop = (self.data_manager or westpa.rc.get_data_manager()).current_iteration
87
+
88
+ if self.include_args['iter_step']:
89
+ self.iter_step = args.step_iter or default_iter_step
90
+
91
+ try:
92
+ self.iter_count = self.iter_stop - self.iter_start
93
+ except TypeError:
94
+ # one or both are None
95
+ pass
96
+
97
+ def iter_block_iter(self):
98
+ '''Return an iterable of (block_start,block_end) over the blocks of iterations
99
+ selected by --first-iter/--last-iter/--step-iter.'''
100
+
101
+ for blkfirst in range(self.iter_start, self.iter_stop, self.iter_step):
102
+ yield (blkfirst, min(self.iter_stop, blkfirst + self.iter_step))
103
+
104
+ def n_iter_blocks(self):
105
+ '''Return the number of blocks of iterations (as returned by ``iter_block_iter``)
106
+ selected by --first-iter/--last-iter/--step-iter.'''
107
+ npoints = self.iter_stop - self.iter_start
108
+ if npoints % self.iter_step == 0:
109
+ return npoints // self.iter_step
110
+ else:
111
+ return npoints // self.iter_step + 1
112
+
113
+ def record_data_iter_range(self, h5object, iter_start=None, iter_stop=None):
114
+ '''Store attributes ``iter_start`` and ``iter_stop`` on the given HDF5 object (group/dataset)'''
115
+ iter_start = self.iter_start if iter_start is None else iter_start
116
+ iter_stop = self.iter_stop if iter_stop is None else iter_stop
117
+ h5object.attrs['iter_start'] = iter_start
118
+ h5object.attrs['iter_stop'] = iter_stop
119
+
120
+ def record_data_iter_step(self, h5object, iter_step=None):
121
+ '''Store attribute ``iter_step`` on the given HDF5 object (group/dataset).'''
122
+ iter_step = self.iter_step if iter_step is None else iter_step
123
+ h5object.attrs['iter_step'] = iter_step
124
+
125
+ def check_data_iter_range_least(self, h5object, iter_start=None, iter_stop=None):
126
+ '''Check that the given HDF5 object contains (as denoted by its ``iter_start``/``iter_stop`` attributes)
127
+ data at least for the iteration range specified.'''
128
+ iter_start = self.iter_start if iter_start is None else iter_start
129
+ iter_stop = self.iter_stop if iter_stop is None else iter_stop
130
+
131
+ return h5io.check_iter_range_least(h5object, iter_start, iter_stop)
132
+
133
+ def check_data_iter_range_equal(self, h5object, iter_start=None, iter_stop=None):
134
+ '''Check that the given HDF5 object contains (as denoted by its ``iter_start``/``iter_stop`` attributes)
135
+ data exactly for the iteration range specified.'''
136
+
137
+ iter_start = self.iter_start if iter_start is None else iter_start
138
+ iter_stop = self.iter_stop if iter_stop is None else iter_stop
139
+
140
+ return h5io.check_iter_range_equal(h5object, iter_start, iter_stop)
141
+
142
+ def check_data_iter_step_conformant(self, h5object, iter_step=None):
143
+ '''Check that the given HDF5 object contains per-iteration data at an iteration stride suitable for extracting data
144
+ with the given stride (in other words, the given ``iter_step`` is a multiple of the stride with
145
+ which data was recorded).'''
146
+
147
+ iter_step = iter_step or self.iter_step
148
+ obj_iter_step = h5object.attrs.get('iter_step')
149
+ return obj_iter_step % iter_step == 0
150
+
151
+ def check_data_iter_step_equal(self, h5object, iter_step=None):
152
+ '''Check that the given HDF5 object contains per-iteration data at an iteration stride the same as
153
+ that specified.'''
154
+ iter_step = iter_step or self.iter_step
155
+ obj_iter_step = h5object.attrs.get('iter_step')
156
+ return obj_iter_step == iter_step
157
+
158
+ def slice_per_iter_data(self, dataset, iter_start=None, iter_stop=None, iter_step=None, axis=0):
159
+ '''Return the subset of the given dataset corresponding to the given iteration range and stride. Unless
160
+ otherwise specified, the first dimension of the dataset is the one sliced.'''
161
+
162
+ iter_start = self.iter_start if iter_start is None else iter_start
163
+ iter_stop = self.iter_stop if iter_stop is None else iter_stop
164
+ iter_step = self.iter_step if iter_step is None else iter_step
165
+
166
+ ds_iter_start = dataset.attrs['iter_start']
167
+ ds_iter_stop = dataset.attrs['iter_stop']
168
+ ds_iter_step = dataset.attrs.get('iter_step', 1)
169
+
170
+ if iter_start < ds_iter_start or iter_stop > ds_iter_stop or ds_iter_step % iter_step > 0:
171
+ raise IndexError(
172
+ 'Cannot slice requested iterations [{:d},{:d}) (stride={:d}) from dataset {!r} with range [{:d},{:d}) (stride={:d}).'.format(
173
+ iter_start, iter_stop, iter_step, dataset, ds_iter_start, ds_iter_stop, ds_iter_step
174
+ )
175
+ )
176
+
177
+ dimslices = []
178
+ for idim in range(len(dataset.shape)):
179
+ if idim == axis:
180
+ dimslices.append(slice(iter_start - ds_iter_start, iter_stop - ds_iter_stop + iter_step, iter_step))
181
+ else:
182
+ dimslices.append(slice(None, None, None))
183
+
184
+ dimslices = tuple(dimslices)
185
+ log.debug('slicing {!r} with {!r}'.format(dataset, dimslices))
186
+ data = dataset[dimslices]
187
+ log.debug('resulting data is of shape {!r}'.format(data.shape))
188
+ return data
189
+
190
+ def iter_range(self, iter_start=None, iter_stop=None, iter_step=None, dtype=None):
191
+ '''Return a sequence for the given iteration numbers and stride, filling
192
+ in missing values from those stored on ``self``. The smallest data type capable of
193
+ holding ``iter_stop`` is returned unless otherwise specified using the ``dtype``
194
+ argument.'''
195
+ iter_start = self.iter_start if iter_start is None else iter_start
196
+ iter_stop = self.iter_stop if iter_stop is None else iter_stop
197
+ iter_step = self.iter_step if iter_step is None else iter_step
198
+ return np.arange(iter_start, iter_stop, iter_step, dtype=(dtype or np.min_scalar_type(iter_stop)))
@@ -0,0 +1,340 @@
1
+ import numpy as np
2
+
3
+ from westpa.tools import WESTDataReader, IterRangeSelection, WESTSubcommand, ProgressIndicatorComponent
4
+
5
+ from westpa import mclib
6
+ from westpa.core import h5io
7
+
8
+ from westpa.tools.dtypes import iter_block_ci_dtype as ci_dtype
9
+
10
+
11
+ # A function to just help with creating future objects for the work manager.
12
+
13
+
14
+ def generate_future(work_manager, name, eval_block, kwargs):
15
+ submit_kwargs = {'name': name}
16
+ submit_kwargs.update(kwargs)
17
+ future = work_manager.submit(eval_block, kwargs=submit_kwargs)
18
+ return future
19
+
20
+
21
+ class WESTKineticsBase(WESTSubcommand):
22
+ '''
23
+ Common argument processing for w_direct/w_reweight subcommands.
24
+ Mostly limited to handling input and output from w_assign.
25
+ '''
26
+
27
+ def __init__(self, parent):
28
+ super().__init__(parent)
29
+
30
+ self.data_reader = WESTDataReader()
31
+ self.iter_range = IterRangeSelection()
32
+ self.progress = ProgressIndicatorComponent()
33
+
34
+ self.output_filename = None
35
+ # This is actually applicable to both.
36
+ self.assignment_filename = None
37
+
38
+ self.output_file = None
39
+ self.assignments_file = None
40
+
41
+ self.evolution_mode = None
42
+
43
+ self.mcbs_alpha = None
44
+ self.mcbs_acalpha = None
45
+ self.mcbs_nsets = None
46
+
47
+ # Now we're adding in things that come from the old w_kinetics
48
+ self.do_compression = True
49
+
50
+ def add_args(self, parser):
51
+ self.progress.add_args(parser)
52
+ self.data_reader.add_args(parser)
53
+ self.iter_range.include_args['iter_step'] = True
54
+ self.iter_range.add_args(parser)
55
+
56
+ iogroup = parser.add_argument_group('input/output options')
57
+ iogroup.add_argument(
58
+ '-a',
59
+ '--assignments',
60
+ default='assign.h5',
61
+ help='''Bin assignments and macrostate definitions are in ASSIGNMENTS
62
+ (default: %(default)s).''',
63
+ )
64
+
65
+ iogroup.add_argument(
66
+ '-o',
67
+ '--output',
68
+ dest='output',
69
+ default=self.default_output_file,
70
+ help='''Store results in OUTPUT (default: %(default)s).''',
71
+ )
72
+
73
+ def process_args(self, args):
74
+ self.progress.process_args(args)
75
+ self.data_reader.process_args(args)
76
+ with self.data_reader:
77
+ self.iter_range.process_args(args, default_iter_step=None)
78
+ if self.iter_range.iter_step is None:
79
+ # use about 10 blocks by default
80
+ self.iter_range.iter_step = max(1, (self.iter_range.iter_stop - self.iter_range.iter_start) // 10)
81
+
82
+ self.output_filename = args.output
83
+ self.assignments_filename = args.assignments
84
+
85
+
86
+ # This provides some convenience functions, modified from w_kinavg, to help with calculating evolution and averages for observables with the mclib library in a consistent manner.
87
+ # It's used in both w_direct and w_reweight.
88
+ class AverageCommands(WESTKineticsBase):
89
+ default_output_file = 'direct.h5'
90
+
91
+ def __init__(self, parent):
92
+ # Ideally, this is stuff general to all the calculations we want to perform.
93
+ super().__init__(parent)
94
+ self.kinetics_filename = None
95
+ self.kinetics_file = None
96
+
97
+ def add_args(self, parser):
98
+ iogroup = parser.add_argument_group('input/output options')
99
+ # self.default_kinetics_file will be picked up as a class attribute from the appropriate subclass
100
+ # We can do this with the output file, too...
101
+ # ... by default, however, we're going to use {direct/reweight}.h5 for everything.
102
+ # Modules which are called with different default values will, of course, still use those.
103
+ iogroup.add_argument(
104
+ '-k',
105
+ '--kinetics',
106
+ default=self.default_kinetics_file,
107
+ help='''Populations and transition rates are stored in KINETICS
108
+ (default: %(default)s).''',
109
+ )
110
+
111
+ cgroup = parser.add_argument_group('confidence interval calculation options')
112
+ cgroup.add_argument(
113
+ '--disable-bootstrap',
114
+ '-db',
115
+ dest='bootstrap',
116
+ action='store_const',
117
+ const=False,
118
+ help='''Enable the use of Monte Carlo Block Bootstrapping.''',
119
+ )
120
+ cgroup.add_argument(
121
+ '--disable-correl',
122
+ '-dc',
123
+ dest='correl',
124
+ action='store_const',
125
+ const=False,
126
+ help='''Disable the correlation analysis.''',
127
+ )
128
+ cgroup.add_argument(
129
+ '--alpha',
130
+ type=float,
131
+ default=0.05,
132
+ help='''Calculate a (1-ALPHA) confidence interval'
133
+ (default: %(default)s)''',
134
+ )
135
+ cgroup.add_argument(
136
+ '--autocorrel-alpha',
137
+ type=float,
138
+ dest='acalpha',
139
+ metavar='ACALPHA',
140
+ help='''Evaluate autocorrelation to (1-ACALPHA) significance.
141
+ Note that too small an ACALPHA will result in failure to detect autocorrelation
142
+ in a noisy flux signal. (Default: same as ALPHA.)''',
143
+ )
144
+ cgroup.add_argument('--nsets', type=int, help='''Use NSETS samples for bootstrapping (default: chosen based on ALPHA)''')
145
+
146
+ cogroup = parser.add_argument_group('calculation options')
147
+ cogroup.add_argument(
148
+ '-e',
149
+ '--evolution-mode',
150
+ choices=['cumulative', 'blocked', 'none'],
151
+ default='none',
152
+ help='''How to calculate time evolution of rate estimates.
153
+ ``cumulative`` evaluates rates over windows starting with --start-iter and getting progressively
154
+ wider to --stop-iter by steps of --step-iter.
155
+ ``blocked`` evaluates rates over windows of width --step-iter, the first of which begins at
156
+ --start-iter.
157
+ ``none`` (the default) disables calculation of the time evolution of rate estimates.''',
158
+ )
159
+ cogroup.add_argument(
160
+ '--window-frac',
161
+ type=float,
162
+ default=1.0,
163
+ help='''Fraction of iterations to use in each window when running in ``cumulative`` mode.
164
+ The (1 - frac) fraction of iterations will be discarded from the start of each window.''',
165
+ )
166
+
167
+ mgroup = parser.add_argument_group('misc options')
168
+ mgroup.add_argument(
169
+ '--disable-averages',
170
+ '-da',
171
+ dest='display_averages',
172
+ action='store_false',
173
+ help='''Whether or not the averages should be printed to the console (set to FALSE if flag is used).''',
174
+ )
175
+
176
+ def process_args(self, args):
177
+ self.kinetics_filename = args.kinetics
178
+
179
+ # Disable the bootstrap or the correlation analysis.
180
+ self.mcbs_enable = args.bootstrap if args.bootstrap is not None else True
181
+ self.do_correl = args.correl if args.correl is not None else True
182
+ self.mcbs_alpha = args.alpha
183
+ self.mcbs_acalpha = args.acalpha if args.acalpha else self.mcbs_alpha
184
+ self.mcbs_nsets = args.nsets if args.nsets else mclib.get_bssize(self.mcbs_alpha)
185
+
186
+ self.display_averages = args.display_averages
187
+
188
+ self.evolution_mode = args.evolution_mode
189
+ self.evol_window_frac = args.window_frac
190
+ if self.evol_window_frac <= 0 or self.evol_window_frac > 1:
191
+ raise ValueError('Parameter error -- fractional window defined by --window-frac must be in (0,1]')
192
+
193
+ def stamp_mcbs_info(self, dataset):
194
+ dataset.attrs['mcbs_alpha'] = self.mcbs_alpha
195
+ dataset.attrs['mcbs_acalpha'] = self.mcbs_acalpha
196
+ dataset.attrs['mcbs_nsets'] = self.mcbs_nsets
197
+
198
+ def open_files(self):
199
+ self.output_file = h5io.WESTPAH5File(self.output_filename, 'a', creating_program=True)
200
+ h5io.stamp_creator_data(self.output_file)
201
+ self.assignments_file = h5io.WESTPAH5File(self.assignments_filename, 'r') # , driver='core', backing_store=False)
202
+ self.kinetics_file = h5io.WESTPAH5File(self.kinetics_filename, 'r') # , driver='core', backing_store=False)
203
+ if not self.iter_range.check_data_iter_range_least(self.assignments_file):
204
+ raise ValueError('assignments data do not span the requested iterations')
205
+
206
+ def open_assignments(self):
207
+ # Actually, I should rename this, as we're not OPENING assignments.
208
+ # This seems to be stuff we're going to be using a lot, so.
209
+ self.nstates = self.assignments_file.attrs['nstates']
210
+ self.nbins = self.assignments_file.attrs['nbins']
211
+ self.state_labels = self.assignments_file['state_labels'][...]
212
+ assert self.nstates == len(self.state_labels)
213
+ self.start_iter, self.stop_iter, self.step_iter = (
214
+ self.iter_range.iter_start,
215
+ self.iter_range.iter_stop,
216
+ self.iter_range.iter_step,
217
+ )
218
+
219
+ # Import for the reweighting code.
220
+
221
+ # state_map = self.assignments_file['state_map'][...]
222
+
223
+ # We've moved this into a different step so that it's compatible with
224
+ # loading up from the all command.
225
+ # Otherwise, we try to load the kinetics (since we're just mixing subclasses)
226
+ # before it's actually run, and so we fail out.
227
+ if not self.iter_range.check_data_iter_range_least(self.kinetics_file):
228
+ raise ValueError('kinetics data do not span the requested iterations')
229
+
230
+ def print_averages(self, dataset, header, dim=1):
231
+ print(header)
232
+ maxlabellen = max(list(map(len, self.state_labels)))
233
+ for istate in range(self.nstates):
234
+ if dim == 1:
235
+ print(
236
+ '{:{maxlabellen}s}: mean={:21.15e} CI=({:21.15e}, {:21.15e}) * tau^-1'.format(
237
+ self.state_labels[istate],
238
+ dataset['expected'][istate],
239
+ dataset['ci_lbound'][istate],
240
+ dataset['ci_ubound'][istate],
241
+ maxlabellen=maxlabellen,
242
+ )
243
+ )
244
+
245
+ else:
246
+ for jstate in range(self.nstates):
247
+ if istate == jstate:
248
+ continue
249
+ print(
250
+ '{:{maxlabellen}s} -> {:{maxlabellen}s}: mean={:21.15e} CI=({:21.15e}, {:21.15e}) * tau^-1'.format(
251
+ self.state_labels[istate],
252
+ self.state_labels[jstate],
253
+ dataset['expected'][istate, jstate],
254
+ dataset['ci_lbound'][istate, jstate],
255
+ dataset['ci_ubound'][istate, jstate],
256
+ maxlabellen=maxlabellen,
257
+ )
258
+ )
259
+
260
+ def run_calculation(
261
+ self, pi, nstates, start_iter, stop_iter, step_iter, dataset, eval_block, name, dim, do_averages=False, **extra
262
+ ):
263
+ # We want to use the same codepath to run a quick average as we do the longer evolution sets, so...
264
+ if do_averages:
265
+ start_pts = [start_iter, stop_iter]
266
+ else:
267
+ start_pts = list(range(start_iter, stop_iter, step_iter))
268
+ # Our evolution dataset!
269
+ if dim == 2:
270
+ evolution_dataset = np.zeros((len(start_pts), nstates, nstates), dtype=ci_dtype)
271
+ elif dim == 1:
272
+ evolution_dataset = np.zeros((len(start_pts), nstates), dtype=ci_dtype)
273
+ else:
274
+ # Temp.
275
+ print("What's wrong?")
276
+
277
+ # This is appropriate for bootstrapped quantities, I think.
278
+
279
+ if True:
280
+ futures = []
281
+ for iblock, start in enumerate(start_pts):
282
+ stop = min(start + step_iter, stop_iter)
283
+ if self.evolution_mode == 'cumulative' or do_averages is True:
284
+ windowsize = int(self.evol_window_frac * (stop - start_iter))
285
+ block_start = max(start_iter, stop - windowsize)
286
+ else: # self.evolution_mode == 'blocked'
287
+ block_start = start
288
+
289
+ # Create a basic set of kwargs for this iteration slice.
290
+ future_kwargs = dict(
291
+ iblock=iblock,
292
+ start=block_start,
293
+ stop=stop,
294
+ nstates=nstates,
295
+ mcbs_alpha=self.mcbs_alpha,
296
+ mcbs_nsets=self.mcbs_nsets,
297
+ mcbs_acalpha=self.mcbs_acalpha,
298
+ do_correl=self.do_correl,
299
+ name=name,
300
+ mcbs_enable=self.mcbs_enable,
301
+ data_input={},
302
+ **extra
303
+ )
304
+
305
+ # Slice up the datasets for this iteration slice.
306
+ # We're assuming they're all h5io iter blocked datasets; it's up to the calling routine
307
+ # to ensure this is true.
308
+
309
+ # Actually, I'm less sure how to handle this for pre-calculated datasets. Need to consider this. But for now...
310
+ for key, value in dataset.items():
311
+ try:
312
+ future_kwargs['data_input'][key] = (
313
+ value.iter_slice(block_start, stop) if hasattr(value, 'iter_slice') else value[block_start:stop]
314
+ )
315
+ except Exception:
316
+ future_kwargs['data_input'][key] = (
317
+ value.iter_slice(block_start, stop) if hasattr(value, 'iter_slice') else value[block_start:stop, :]
318
+ )
319
+ # print(future_kwargs['data_input'][key])
320
+
321
+ # We create a future object with the appropriate name, and then append it to the work manager.
322
+ futures.append(generate_future(self.work_manager, name, eval_block, future_kwargs))
323
+
324
+ pi.new_operation('Calculating {}'.format(name), len(futures))
325
+
326
+ # Now, we wait to get the result back; we'll store it in the result, and return it.
327
+ for future in self.work_manager.as_completed(futures):
328
+ pi.progress = iblock / step_iter
329
+ future_result = future.get_result(discard=True)
330
+
331
+ if dim == 2:
332
+ for result in future_result:
333
+ name, iblock, istate, jstate, ci_result = result
334
+ evolution_dataset[iblock, istate, jstate] = ci_result
335
+ elif dim == 1:
336
+ for result in future_result:
337
+ name, iblock, istate, ci_result = result
338
+ evolution_dataset[iblock, istate] = ci_result
339
+
340
+ return evolution_dataset