asyncmd 0.3.3__py3-none-any.whl → 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,365 @@
1
+ # This file is part of asyncmd.
2
+ #
3
+ # asyncmd is free software: you can redistribute it and/or modify
4
+ # it under the terms of the GNU General Public License as published by
5
+ # the Free Software Foundation, either version 3 of the License, or
6
+ # (at your option) any later version.
7
+ #
8
+ # asyncmd is distributed in the hope that it will be useful,
9
+ # but WITHOUT ANY WARRANTY; without even the implied warranty of
10
+ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11
+ # GNU General Public License for more details.
12
+ #
13
+ # You should have received a copy of the GNU General Public License
14
+ # along with asyncmd. If not, see <https://www.gnu.org/licenses/>.
15
+ """
16
+ This module contains the implementations of the TrajectoryCache classes.
17
+
18
+ They are used in the asyncmd.Trajectory object to enable caching of CV values.
19
+ """
20
+ import io
21
+ import os
22
+ import abc
23
+ import logging
24
+ import zipfile
25
+ import collections
26
+
27
+ import numpy as np
28
+
29
+
30
+ from .._config import _GLOBALS
31
+
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ class ValuesAlreadyStoredError(ValueError):
37
+ """
38
+ Error raised by :class:`TrajectoryFunctionValueCache` classes when trying to
39
+ append values for a func_id that is already present.
40
+ """
41
+
42
+
43
+ class TrajectoryFunctionValueCache(collections.abc.Mapping):
44
+ """
45
+ Abstract base class defining the interface for TrajectoryFunctionValueCaches.
46
+
47
+ Note: We assume that stored CV values are immutable (except for adding
48
+ additional stored function values), since they are tied to the trajectory
49
+ (hash) and the func_id of the wrapped function (which is unique and includes
50
+ code and call_kwargs). I.e. as long as both the underlying trajectory and
51
+ the func_id of the cached function stay the same, the cached values are
52
+ current.
53
+ We therefore get away with a ``Mapping`` instead of a ``MutableMapping`` and
54
+ only need the additional methods ``append`` and ``clear_all_values`` instead
55
+ of generic setters.
56
+ """
57
+ def __init__(self, traj_hash: int, traj_files: list[str]) -> None:
58
+ """
59
+ Initialize a ``TrajectoryFunctionValueCache``.
60
+
61
+ Parameters
62
+ ----------
63
+ traj_hash : int
64
+ The hash of the associated ``asyncmd.Trajectory``.
65
+ traj_files : list[str]
66
+ The filenames of the associated trajectory files.
67
+ """
68
+ self._traj_hash = traj_hash
69
+ self._traj_files = traj_files
70
+
71
+ @abc.abstractmethod
72
+ def append(self, func_id: str, values: np.ndarray) -> None:
73
+ """
74
+ Append given ``values`` for ``func_id``.
75
+
76
+ Parameters
77
+ ----------
78
+ func_id : str
79
+ The ID of the function the values belong to.
80
+ values : np.ndarray
81
+ The function values.
82
+ """
83
+ raise NotImplementedError
84
+
85
+ @abc.abstractmethod
86
+ def clear_all_values(self) -> None:
87
+ """
88
+ Clear all cached function values from cache, i.e. empty the cache.
89
+ """
90
+ raise NotImplementedError
91
+
92
+
93
+ class TrajectoryFunctionValueCacheInMemory(TrajectoryFunctionValueCache):
94
+ """
95
+ Interface for caching trajectory function values in memory using a dict.
96
+ """
97
+ def __init__(self, traj_hash: int, traj_files: list[str]) -> None:
98
+ """
99
+ Initialize a ``TrajectoryFunctionValueCacheInMemory``.
100
+
101
+ Parameters
102
+ ----------
103
+ traj_hash : int
104
+ The hash of the associated ``asyncmd.Trajectory``.
105
+ traj_files : list[str]
106
+ The filenames of the associated trajectory files.
107
+ """
108
+ super().__init__(traj_hash, traj_files)
109
+ self._func_values_by_id: dict[str, np.ndarray] = {}
110
+
111
+ def __len__(self) -> int:
112
+ return len(self._func_values_by_id)
113
+
114
+ def __iter__(self) -> collections.abc.Generator[str]:
115
+ yield from self._func_values_by_id
116
+
117
+ def __getitem__(self, key: str) -> np.ndarray:
118
+ return self._func_values_by_id[key]
119
+
120
+ def append(self, func_id: str, values: np.ndarray) -> None:
121
+ if func_id in self._func_values_by_id:
122
+ # first check if it already in there
123
+ raise ValuesAlreadyStoredError(
124
+ "There are already values stored for func_id "
125
+ f"{func_id}. Changing the stored values is not "
126
+ "supported.")
127
+ self._func_values_by_id[func_id] = values
128
+
129
+ def clear_all_values(self) -> None:
130
+ self._func_values_by_id = {}
131
+
132
+
133
+ class TrajectoryFunctionValueCacheInNPZ(TrajectoryFunctionValueCache):
134
+ """
135
+ Interface for caching trajectory function values in a numpy npz file.
136
+
137
+ Will use one separate npz file for each Trajectory object, the file is
138
+ placed in the filesystem right next to the underlying trajectory file.
139
+ The name of the npz file is derived from the trajectory file name.
140
+ Additionally, the npz file stores the ``traj_hash`` and if the ``traj_hash``
141
+ changes, i.e. the trajectory changes (for the same npz filename), the npz
142
+ file and all cached values will be removed (for this trajectory filename).
143
+ """
144
+
145
+ # NOTE: npz appending inspired by: https://stackoverflow.com/a/66618141
146
+
147
+ # NOTE: It would be nice to use the MAX_FILES_OPEN semaphore
148
+ # but then we need async/await and then we need to go to a 'create'
149
+ # classmethod that is async and required for initialization
150
+ # (because __init__ cant be async).
151
+ # But since we (have to) open the npz file in the other magic methods
152
+ # to it does not really matter (as they can not be async either)?
153
+ # And as we also leave some room for non-semaphored file openings anyway...
154
+
155
+ _TRAJ_HASH_NPZ_KEY = "hash_of_trajs" # key of traj_hash in npz file
156
+
157
+ def __init__(self, traj_hash: int, traj_files: list[str]) -> None:
158
+ super().__init__(traj_hash=traj_hash, traj_files=traj_files)
159
+ self.fname_npz = self.get_cache_filename(traj_files=traj_files)
160
+ self._func_ids: list[str] = []
161
+ # sort out if we have an associated npz file already
162
+ # and if it is from/for the "right" trajectory file
163
+ self._ensure_consistent_npz()
164
+
165
+ def _ensure_consistent_npz(self) -> None:
166
+ # next line makes sure we only remember func_ids from the current npz
167
+ self._func_ids = []
168
+ if not os.path.isfile(self.fname_npz):
169
+ # no npz so nothing to do except making sure we have no func_ids
170
+ return
171
+ existing_npz_matches = False
172
+ with np.load(self.fname_npz, allow_pickle=False) as npzfile:
173
+ try:
174
+ # it is an array with 1 element, but pylint does not know that
175
+ # pylint: disable-next=unsubscriptable-object
176
+ saved_hash_traj = npzfile[self._TRAJ_HASH_NPZ_KEY][0]
177
+ except KeyError:
178
+ # we probably tripped over an old formatted npz
179
+ # so we will just rewrite it completely with hash
180
+ pass
181
+ else:
182
+ # old hash found, lets compare the two hashes
183
+ if (existing_npz_matches := self._traj_hash == saved_hash_traj):
184
+ # if they do populate self with the func_ids we have
185
+ # cached values for
186
+ for k in npzfile.keys():
187
+ if k != self._TRAJ_HASH_NPZ_KEY:
188
+ self._func_ids.append(str(k))
189
+ # now if the old npz did not match we should remove it
190
+ # then we will rewrite it with the first cached CV values
191
+ if not existing_npz_matches:
192
+ logger.debug("Found existing npz file (%s) but the "
193
+ "trajectory hash does not match. "
194
+ "Recreating the npz cache from scratch.",
195
+ self.fname_npz
196
+ )
197
+ os.unlink(self.fname_npz)
198
+
199
+ @classmethod
200
+ def get_cache_filename(cls, traj_files: list[str]) -> str:
201
+ """
202
+ Construct cachefilename from trajectory fname.
203
+
204
+ Parameters
205
+ ----------
206
+ traj_files : list[str]
207
+ Path to the trajectory for which we cache.
208
+
209
+ Returns
210
+ -------
211
+ str
212
+ Path to the cachefile associated with trajectory.
213
+ """
214
+ head, tail = os.path.split(traj_files[0])
215
+ return os.path.join(
216
+ head,
217
+ f".{tail}{'_MULTIPART' if len(traj_files) > 1 else ''}_asyncmd_cv_cache.npz"
218
+ )
219
+
220
+ def __len__(self) -> int:
221
+ return len(self._func_ids)
222
+
223
+ def __iter__(self) -> collections.abc.Generator[str]:
224
+ yield from self._func_ids
225
+
226
+ def __getitem__(self, key: str) -> np.ndarray:
227
+ if key in self._func_ids:
228
+ with np.load(self.fname_npz, allow_pickle=False) as npzfile:
229
+ return npzfile[key]
230
+ # Key not found/ no values stored for key
231
+ raise KeyError(f"No values for {key} cached (yet).")
232
+
233
+ def append(self, func_id: str, values: np.ndarray) -> None:
234
+ """
235
+ Append values for given func_id.
236
+
237
+ Parameters
238
+ ----------
239
+ func_id : str
240
+ Function identifier.
241
+ values : np.ndarray
242
+ Values of application of function with given func_id.
243
+
244
+ Raises
245
+ ------
246
+ TypeError
247
+ If ``func_id`` is not a string.
248
+ ValueError
249
+ If there are already values stored for ``func_id`` in self.
250
+ """
251
+ if func_id in self._func_ids:
252
+ # first check if it already in there
253
+ raise ValuesAlreadyStoredError(
254
+ "There are already values stored for func_id "
255
+ f"{func_id}. Changing the stored values is not "
256
+ "supported.")
257
+ if not self._func_ids:
258
+ # these are the first cached CV values for this traj
259
+ # so we just create the (empty) npz file
260
+ np.savez(self.fname_npz)
261
+ # and write the trajectory hash
262
+ self._append_data_to_npz(name=self._TRAJ_HASH_NPZ_KEY,
263
+ value=np.array([self._traj_hash]),
264
+ )
265
+ # now we can append either way
266
+ # either already something cached, or freshly created empty file
267
+ self._append_data_to_npz(name=func_id, value=values)
268
+ # add func_id to list of func_ids that we know are cached in npz
269
+ self._func_ids.append(func_id)
270
+
271
+ def _append_data_to_npz(self, name: str, value: np.ndarray) -> None:
272
+ # npz files are just zipped together collections of npy files
273
+ # so we just make a npy file saved into a BytesIO and then write that
274
+ # to the end of the npz file
275
+ bio = io.BytesIO()
276
+ np.save(bio, value)
277
+ with zipfile.ZipFile(file=self.fname_npz,
278
+ mode="a", # append!
279
+ # uncompressed (but) zip archive member
280
+ compression=zipfile.ZIP_STORED,
281
+ ) as zfile:
282
+ zfile.writestr(f"{name}.npy", data=bio.getvalue())
283
+
284
+ def clear_all_values(self) -> None:
285
+ self._func_ids = [] # clear internal storage of func_ids
286
+ if os.path.isfile(self.fname_npz):
287
+ os.unlink(self.fname_npz) # and remove the file if it exists
288
+
289
+
290
+ class TrajectoryFunctionValueCacheInH5PY(TrajectoryFunctionValueCache):
291
+ """
292
+ Interface for caching trajectory function values in a given h5py group.
293
+
294
+ Gets the centrally set ``H5PY_CACHE`` configuration variable in ``__init__``
295
+ and uses it as ``h5py.Group`` to store the cached values in.
296
+ The values will be stored in this group in a subgroup (defined by
297
+ ``self._H5PY_PATHS['prefix']``) and then by trajectory hash, i.e. the full path
298
+ becomes '$self._H5PY_PATHS['prefix']/$TRAJ_HASH/'.
299
+ Within this path/group there are two subgroups, one for the func_ids and one
300
+ for the cached values (names again defined by ``self._H5PY_PATHS['prefix']``).
301
+ Inside these groups datasets are named with an index, and the index is used
302
+ to associate func_ids and cached values, i.e. values with index i correspond
303
+ to the func_id with index i.
304
+ """
305
+
306
+ # mapping of shorthands to h5py-paths/ group names used by this class
307
+ _H5PY_PATHS = {"ids": "FunctionIDs",
308
+ "vals": "FunctionValues",
309
+ "prefix": "asyncmd/TrajectoryFunctionValueCache",
310
+ }
311
+
312
+ def __init__(self, traj_hash: int, traj_files: list[str]) -> None:
313
+ super().__init__(traj_hash, traj_files)
314
+ try:
315
+ self._h5py_cache = _GLOBALS["H5PY_CACHE"]
316
+ except KeyError as e:
317
+ raise RuntimeError(
318
+ f"Can not initialize a {type(self)} without global h5py cache set!"
319
+ " Try calling `asyncmd.config.register_h5py_cache` first."
320
+ ) from e
321
+
322
+ self._root_grp = self._h5py_cache.require_group(
323
+ f"{self._H5PY_PATHS['prefix']}/{self._traj_hash}"
324
+ )
325
+ self._ids_grp = self._root_grp.require_group(self._H5PY_PATHS["ids"])
326
+ self._vals_grp = self._root_grp.require_group(self._H5PY_PATHS["vals"])
327
+ # keep a list of func_ids we have cached values for in memory
328
+ # NOTE: remember to add func_ids we cache values for here also!
329
+ self._func_ids: list[str] = [self._ids_grp[str(idx)].asstr()[()]
330
+ for idx in range(len(self._ids_grp.keys()))
331
+ ]
332
+
333
+ def __len__(self) -> int:
334
+ return len(self._func_ids)
335
+
336
+ def __iter__(self) -> collections.abc.Generator[str]:
337
+ yield from self._func_ids
338
+
339
+ def __getitem__(self, key: str) -> np.ndarray:
340
+ if key in self._func_ids:
341
+ idx = self._func_ids.index(key)
342
+ return self._vals_grp[str(idx)][:]
343
+ # if we got until here the key is not in there
344
+ raise KeyError(f"Key not found (key={key}).")
345
+
346
+ def append(self, func_id: str, values: np.ndarray) -> None:
347
+ if func_id in self:
348
+ raise ValuesAlreadyStoredError(
349
+ "There are already values stored for func_id "
350
+ f"{func_id}. Changing the stored values is not "
351
+ "supported.")
352
+ name = str(len(self))
353
+ _ = self._ids_grp.create_dataset(name, data=func_id)
354
+ _ = self._vals_grp.create_dataset(name, data=values)
355
+ # append func id for newly stored func (values) to internal in memory state
356
+ self._func_ids.append(func_id)
357
+
358
+ def clear_all_values(self) -> None:
359
+ # delete and recreate the id and values h5py subgroups
360
+ del self._root_grp[self._H5PY_PATHS["ids"]]
361
+ del self._root_grp[self._H5PY_PATHS["vals"]]
362
+ self._ids_grp = self._root_grp.require_group(self._H5PY_PATHS["ids"])
363
+ self._vals_grp = self._root_grp.require_group(self._H5PY_PATHS["vals"])
364
+ # and empty the in-memory func-id list
365
+ self._func_ids = []
asyncmd/utils.py CHANGED
@@ -12,6 +12,14 @@
12
12
  #
13
13
  # You should have received a copy of the GNU General Public License
14
14
  # along with asyncmd. If not, see <https://www.gnu.org/licenses/>.
15
+ """
16
+ This file implements commonly used utility functions related to MD usage with asyncmd.
17
+
18
+ This includes various functions to get all trajectory (or other files) related
19
+ to a given engine, naming scheme, and/or file-ending.
20
+ It also includes various functions to retrieve or ensure important parameters from
21
+ MDConfig/MDEngine combinations, such as nstout_from_mdconfig and ensure_mdconfig_options.
22
+ """
15
23
  from .mdengine import MDEngine
16
24
  from .mdconfig import MDConfig
17
25
  from .trajectory.trajectory import Trajectory
@@ -20,7 +28,7 @@ from .gromacs import mdengine as gmx_engine
20
28
  from .gromacs import mdconfig as gmx_config
21
29
 
22
30
 
23
- async def get_all_traj_parts(folder: str, deffnm: str, engine: MDEngine) -> "list[Trajectory]":
31
+ async def get_all_traj_parts(folder: str, deffnm: str, engine: MDEngine) -> list[Trajectory]:
24
32
  """
25
33
  List all trajectories in folder by given engine class with given deffnm.
26
34
 
@@ -48,13 +56,12 @@ async def get_all_traj_parts(folder: str, deffnm: str, engine: MDEngine) -> "lis
48
56
  return await gmx_utils.get_all_traj_parts(folder=folder, deffnm=deffnm,
49
57
  traj_type=engine.output_traj_type,
50
58
  )
51
- else:
52
- raise ValueError(f"Engine {engine} is not a known MDEngine class."
53
- + " Maybe someone just forgot to add the function?")
59
+ raise ValueError(f"Engine {engine} is not a known MDEngine class."
60
+ + " Maybe someone just forgot to add the function?")
54
61
 
55
62
 
56
63
  async def get_all_file_parts(folder: str, deffnm: str, file_ending: str,
57
- ) -> "list[str]":
64
+ ) -> list[str]:
58
65
  """
59
66
  Find and return all files with given ending produced by a `MDEngine`.
60
67
 
@@ -67,7 +74,7 @@ async def get_all_file_parts(folder: str, deffnm: str, file_ending: str,
67
74
  deffnm : str
68
75
  deffnm (prefix of filenames) used in the simulation.
69
76
  file_ending : str
70
- File ending of the requested filetype (with or without preceeding ".").
77
+ File ending of the requested filetype (with or without preceding ".").
71
78
 
72
79
  Returns
73
80
  -------
@@ -105,9 +112,8 @@ def nstout_from_mdconfig(mdconfig: MDConfig, output_traj_type: str) -> int:
105
112
  return gmx_utils.nstout_from_mdp(mdp=mdconfig,
106
113
  traj_type=output_traj_type,
107
114
  )
108
- else:
109
- raise ValueError(f"mdconfig {mdconfig} is not a known MDConfig class."
110
- + " Maybe someone just forgot to add the function?")
115
+ raise ValueError(f"mdconfig {mdconfig} is not a known MDConfig class."
116
+ + " Maybe someone just forgot to add the function?")
111
117
 
112
118
 
113
119
  def ensure_mdconfig_options(mdconfig: MDConfig, genvel: str = "no",
@@ -136,13 +142,12 @@ def ensure_mdconfig_options(mdconfig: MDConfig, genvel: str = "no",
136
142
  Raises
137
143
  ------
138
144
  ValueError
139
- If the MDConfig belongs to an unknown subclass not dispatcheable to any
145
+ If the MDConfig belongs to an unknown subclass not dispatchable to any
140
146
  specific engine submodule.
141
147
  """
142
148
  if isinstance(mdconfig, gmx_config.MDP):
143
149
  return gmx_utils.ensure_mdp_options(mdp=mdconfig, genvel=genvel,
144
150
  continuation=continuation,
145
151
  )
146
- else:
147
- raise ValueError(f"mdconfig {mdconfig} is not a known MDConfig class."
148
- + " Maybe someone just forgot to add the function?")
152
+ raise ValueError(f"mdconfig {mdconfig} is not a known MDConfig class."
153
+ + " Maybe someone just forgot to add the function?")
@@ -0,0 +1,90 @@
1
+ Metadata-Version: 2.4
2
+ Name: asyncmd
3
+ Version: 0.4.0
4
+ Summary: asyncmd is a library to write concurrent code to run and analyze molecular dynamics simulations using pythons async/await syntax.
5
+ Author-email: Hendrik Jung <hendrik.jung@biophys.mpg.de>
6
+ Maintainer-email: Hendrik Jung <hendrik.jung@biophys.mpg.de>
7
+ Project-URL: Documentation, https://asyncmd.readthedocs.io/en/latest/
8
+ Project-URL: Repository, https://github.com/bio-phys/asyncmd.git
9
+ Project-URL: Issues, https://github.com/bio-phys/asyncmd/issues
10
+ Keywords: molecular dynamics,molecular-dynamics,MD,high performance computing,HPC,slurm,SLURM,gromacs,GROMACS
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Intended Audience :: Science/Research
13
+ Classifier: Natural Language :: English
14
+ Classifier: License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)
15
+ Classifier: Operating System :: OS Independent
16
+ Classifier: Programming Language :: Python
17
+ Classifier: Programming Language :: Python :: 3
18
+ Classifier: Topic :: Scientific/Engineering
19
+ Classifier: Topic :: Scientific/Engineering :: Chemistry
20
+ Classifier: Topic :: Scientific/Engineering :: Physics
21
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
22
+ Requires-Python: >=3.10
23
+ Description-Content-Type: text/markdown
24
+ License-File: LICENSE
25
+ Requires-Dist: aiofiles
26
+ Requires-Dist: mdanalysis
27
+ Requires-Dist: numpy
28
+ Requires-Dist: scipy
29
+ Provides-Extra: docs
30
+ Requires-Dist: sphinx; extra == "docs"
31
+ Requires-Dist: myst-nb; extra == "docs"
32
+ Requires-Dist: sphinx-book-theme; extra == "docs"
33
+ Provides-Extra: tests
34
+ Requires-Dist: pytest; extra == "tests"
35
+ Requires-Dist: pytest-asyncio; extra == "tests"
36
+ Provides-Extra: tests-all
37
+ Requires-Dist: asyncmd[tests]; extra == "tests-all"
38
+ Requires-Dist: h5py; extra == "tests-all"
39
+ Requires-Dist: coverage; extra == "tests-all"
40
+ Requires-Dist: pytest-cov; extra == "tests-all"
41
+ Provides-Extra: dev
42
+ Requires-Dist: asyncmd[docs,tests-all]; extra == "dev"
43
+ Requires-Dist: jupyterlab; extra == "dev"
44
+ Requires-Dist: ipywidgets; extra == "dev"
45
+ Requires-Dist: tqdm; extra == "dev"
46
+ Requires-Dist: pylint; extra == "dev"
47
+ Dynamic: license-file
48
+
49
+ # asyncmd
50
+
51
+ [![codecov][codecov-badge]][codecov-link] [![Documentation Status][rtd-badge]][rtd-link] [![PyPI][pypi-badge]][pypi-link]
52
+
53
+ asyncmd is a library to write **concurrent** code to run and analyze molecular dynamics simulations using pythons **async/await** syntax.
54
+ Computationally costly operations can be performed locally or submitted to a queuing system.
55
+
56
+ asyncmd enables users to construct complex molecular dynamics (MD) workflows or develop and implement trajectory based enhanced sampling methods with the following key features:
57
+
58
+ - flexible, programmatic and parallel setup, control, and analysis of an arbitrary number of MD simulations
59
+ - dictionary-like interface to the MD parameters
60
+ - parallelized application of user defined (python) functions on trajectories (including the automatic caching of calculated values)
61
+ - propagation of MD until any or all user-supplied conditions are fulfilled on the trajectory
62
+ - extract molecular configurations from trajectories to (re)start an arbitrary number of MD simulations from it
63
+
64
+ ## Installation
65
+
66
+ The following command will install asyncmd from [PyPi][pypi-link]:
67
+
68
+ ```bash
69
+ pip install asyncmd
70
+ ```
71
+
72
+ ## Documentation
73
+
74
+ See the [asyncmd documentation][rtd-link] for more information.
75
+
76
+ ## Contributing
77
+
78
+ All contributions are appreciated! Please refer to the [documentation][rtd-link] for information.
79
+
80
+ ---
81
+ <sub>This README.md is printed from 100% recycled electrons.</sub>
82
+
83
+ [codecov-link]: https://app.codecov.io/gh/bio-phys/asyncmd
84
+ [codecov-badge]: https://img.shields.io/codecov/c/github/bio-phys/asyncmd
85
+
86
+ [rtd-link]: https://asyncmd.readthedocs.io/en/latest/
87
+ [rtd-badge]: https://readthedocs.org/projects/asyncmd/badge/?version=latest
88
+
89
+ [pypi-link]: https://pypi.org/project/asyncmd/
90
+ [pypi-badge]: https://img.shields.io/pypi/v/asyncmd
@@ -0,0 +1,24 @@
1
+ asyncmd/__init__.py,sha256=fRTzLO5-xSg8r5gFnSLMUgMRc6KmsTDYn1TkvCj1-Hs,975
2
+ asyncmd/_config.py,sha256=FpyhM720KhQDneQ5tikh1ou-pPsZdA0dexPygDqpNik,1451
3
+ asyncmd/_version.py,sha256=KB8fZfx9ELJ3ICfIucZiL8hJrJnLzn4V9qXPHZipQcU,2455
4
+ asyncmd/config.py,sha256=hV0RWSKpEJx4679QZoltQPQETpK1yPdHVFO3MqcEqgQ,9100
5
+ asyncmd/mdconfig.py,sha256=FVPwog7qo7Et_fg4Tqw1bzk5jthegsm5c3HNH6HOy5o,14346
6
+ asyncmd/mdengine.py,sha256=MG0Ef8AkaocrnvpyGyWo9pbO2kPlwNuswfT8zdp-PkI,6269
7
+ asyncmd/slurm.py,sha256=-got2WXTW1eSzlzfxcnlAICedn13CrheZuvYSnR8McQ,60063
8
+ asyncmd/tools.py,sha256=pfLyxVnzsgfJ2QJTqxsag5j8bg2ktDcHPFdLEMHbH8w,12617
9
+ asyncmd/utils.py,sha256=Ypf0u0KJjhQstSVwGEM73Ds4gQ9DVj2QHrWznpN6yLk,5673
10
+ asyncmd/gromacs/__init__.py,sha256=5d_Mtu7cvqEUeCSHhg_YzLDBRBWK24S851YwWPSIVao,813
11
+ asyncmd/gromacs/mdconfig.py,sha256=iDUCm2ZIcmK6nvDNdXmzOj_emkDHUqfgM-e1TScW0ZE,17981
12
+ asyncmd/gromacs/mdengine.py,sha256=ifwhO_SoXSVm3X47HKsQNBYaAeFj_lKEiGhapnbpY4I,54690
13
+ asyncmd/gromacs/utils.py,sha256=XEoDUUByXWdsBrE86JxqGLeMY5VOX1DRm75M-20r4J8,7425
14
+ asyncmd/trajectory/__init__.py,sha256=QHwGJfqPgfbKgGZB7VLZfOiy9cqr2V9gacUVVZK1KSs,1960
15
+ asyncmd/trajectory/convert.py,sha256=FgAHwphTdJBnaA_s_JYyAu1ljGZU7M-sH64pk_Gs1lo,26586
16
+ asyncmd/trajectory/functionwrapper.py,sha256=lGGindpSIDnk7H0WBqssvGFYuPE0yn8iHUIZv-n1UOE,27515
17
+ asyncmd/trajectory/propagate.py,sha256=rckMlwQH6LrBYbKAqt5vcvDbDKV8Jf4IzOJK93R45Bg,48444
18
+ asyncmd/trajectory/trajectory.py,sha256=RSeiFLQeYusYFy3jpsttBf0MP6d9J4K9M2wn1dB1orI,38201
19
+ asyncmd/trajectory/trajectory_cache.py,sha256=32Mh-Jv52TRMDSFiOux1Iny94EqSl8bkssRBQuZGNu4,15007
20
+ asyncmd-0.4.0.dist-info/licenses/LICENSE,sha256=tqi_Y64slbCqJW7ndGgNe9GPIfRX2nVGb3YQs7FqzE4,34670
21
+ asyncmd-0.4.0.dist-info/METADATA,sha256=TBm4NyRZ5qzvWQCnJKpwTdl_io5W9rRE1gkRMcdbhoc,3907
22
+ asyncmd-0.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
23
+ asyncmd-0.4.0.dist-info/top_level.txt,sha256=YG6cpLOyBjjelv7a8p2xYEHNVBgXSW8PvM8-9S9hMb8,8
24
+ asyncmd-0.4.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.3.1)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5