imdclient 0.1.3__py3-none-any.whl → 0.2.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. imdclient/IMDClient.py +43 -12
  2. imdclient/IMDProtocol.py +1 -0
  3. imdclient/__init__.py +0 -5
  4. imdclient/data/gromacs/md/gromacs_v3_nst1.mdp +3 -3
  5. imdclient/data/namd/md/namd3 +0 -0
  6. imdclient/data/namd/md/namd_v3_nst_1.namd +1 -1
  7. imdclient/tests/base.py +108 -83
  8. imdclient/tests/conftest.py +0 -39
  9. imdclient/tests/datafiles.py +16 -1
  10. imdclient/tests/docker_testing/docker.md +1 -1
  11. imdclient/tests/hpc_testing/gromacs/README.md +112 -0
  12. imdclient/tests/hpc_testing/gromacs/gmx_gpu_test.mdp +58 -0
  13. imdclient/tests/hpc_testing/gromacs/gmx_gpu_test.top +11764 -0
  14. imdclient/tests/hpc_testing/gromacs/struct.gro +21151 -0
  15. imdclient/tests/hpc_testing/gromacs/validate_gmx.sh +90 -0
  16. imdclient/tests/hpc_testing/lammps/README.md +62 -0
  17. imdclient/tests/hpc_testing/lammps/lammps_v3_nst_1.in +71 -0
  18. imdclient/tests/hpc_testing/lammps/topology_after_min.data +8022 -0
  19. imdclient/tests/hpc_testing/lammps/validate_lmp.sh +66 -0
  20. imdclient/tests/hpc_testing/namd/README.md +147 -0
  21. imdclient/tests/hpc_testing/namd/alanin.params +402 -0
  22. imdclient/tests/hpc_testing/namd/alanin.pdb +77 -0
  23. imdclient/tests/hpc_testing/namd/alanin.psf +206 -0
  24. imdclient/tests/hpc_testing/namd/namd_v3_nst_1.namd +59 -0
  25. imdclient/tests/hpc_testing/namd/validate_namd.sh +71 -0
  26. imdclient/tests/minimalreader.py +86 -0
  27. imdclient/tests/server.py +6 -14
  28. imdclient/tests/test_gromacs.py +15 -3
  29. imdclient/tests/test_imdclient.py +26 -7
  30. imdclient/tests/test_lammps.py +22 -19
  31. imdclient/tests/test_manual.py +224 -66
  32. imdclient/tests/test_namd.py +39 -16
  33. imdclient/tests/test_utils.py +31 -0
  34. imdclient/utils.py +50 -17
  35. {imdclient-0.1.3.dist-info → imdclient-0.2.0b0.dist-info}/METADATA +60 -39
  36. imdclient-0.2.0b0.dist-info/RECORD +53 -0
  37. {imdclient-0.1.3.dist-info → imdclient-0.2.0b0.dist-info}/WHEEL +1 -1
  38. {imdclient-0.1.3.dist-info → imdclient-0.2.0b0.dist-info/licenses}/AUTHORS.md +4 -1
  39. {imdclient-0.1.3.dist-info → imdclient-0.2.0b0.dist-info/licenses}/LICENSE +3 -1
  40. imdclient/IMD.py +0 -130
  41. imdclient/backends.py +0 -352
  42. imdclient/results.py +0 -332
  43. imdclient/streamanalysis.py +0 -1056
  44. imdclient/streambase.py +0 -199
  45. imdclient/tests/test_imdreader.py +0 -658
  46. imdclient/tests/test_stream_analysis.py +0 -61
  47. imdclient-0.1.3.dist-info/RECORD +0 -42
  48. {imdclient-0.1.3.dist-info → imdclient-0.2.0b0.dist-info}/top_level.txt +0 -0
imdclient/backends.py DELETED
@@ -1,352 +0,0 @@
1
- # Copy of backends from MDA 2.8.0
2
- """Analysis backends --- :mod:`MDAnalysis.analysis.backends`
3
- ============================================================
4
-
5
- .. versionadded:: 2.8.0
6
-
7
-
8
- The :mod:`backends` module provides :class:`BackendBase` base class to
9
- implement custom execution backends for
10
- :meth:`MDAnalysis.analysis.base.AnalysisBase.run` and its
11
- subclasses.
12
-
13
- .. SeeAlso:: :ref:`parallel-analysis`
14
-
15
- .. _backends:
16
-
17
- Backends
18
- --------
19
-
20
- Three built-in backend classes are provided:
21
-
22
- * *serial*: :class:`BackendSerial`, that is equivalent to using no
23
- parallelization and is the default
24
-
25
- * *multiprocessing*: :class:`BackendMultiprocessing` that supports
26
- parallelization via standard Python :mod:`multiprocessing` module
27
- and uses default :mod:`pickle` serialization
28
-
29
- * *dask*: :class:`BackendDask`, that uses the same process-based
30
- parallelization as :class:`BackendMultiprocessing`, but different
31
- serialization algorithm via `dask <https://dask.org/>`_ (see `dask
32
- serialization algorithms
33
- <https://distributed.dask.org/en/latest/serialization.html>`_ for details)
34
-
35
- Classes
36
- -------
37
-
38
- """
39
- import warnings
40
- from typing import Callable
41
- import importlib.util
42
-
43
-
44
- def is_installed(modulename: str):
45
- """Checks if module is installed
46
-
47
- Parameters
48
- ----------
49
- modulename : str
50
- name of the module to be tested
51
-
52
-
53
- .. versionadded:: 2.8.0
54
- """
55
- return importlib.util.find_spec(modulename) is not None
56
-
57
-
58
- class BackendBase:
59
- """Base class for backend implementation.
60
-
61
- Initializes an instance and performs checks for its validity, such as
62
- ``n_workers`` and possibly other ones.
63
-
64
- Parameters
65
- ----------
66
- n_workers : int
67
- number of workers (usually, processes) over which the work is split
68
-
69
- Examples
70
- --------
71
- .. code-block:: python
72
-
73
- from MDAnalysis.analysis.backends import BackendBase
74
-
75
- class ThreadsBackend(BackendBase):
76
- def apply(self, func, computations):
77
- from multiprocessing.dummy import Pool
78
-
79
- with Pool(processes=self.n_workers) as pool:
80
- results = pool.map(func, computations)
81
- return results
82
-
83
- import MDAnalysis as mda
84
- from MDAnalysis.tests.datafiles import PSF, DCD
85
- from MDAnalysis.analysis.rms import RMSD
86
-
87
- u = mda.Universe(PSF, DCD)
88
- ref = mda.Universe(PSF, DCD)
89
-
90
- R = RMSD(u, ref)
91
-
92
- n_workers = 2
93
- backend = ThreadsBackend(n_workers=n_workers)
94
- R.run(backend=backend, unsupported_backend=True)
95
-
96
- .. warning::
97
- Using `ThreadsBackend` above will lead to erroneous results, since it
98
- is an educational example. Do not use it for real analysis.
99
-
100
-
101
- .. versionadded:: 2.8.0
102
-
103
- """
104
-
105
- def __init__(self, n_workers: int):
106
- self.n_workers = n_workers
107
- self._validate()
108
-
109
- def _get_checks(self):
110
- """Get dictionary with ``condition: error_message`` pairs that ensure the
111
- validity of the backend instance
112
-
113
- Returns
114
- -------
115
- dict
116
- dictionary with ``condition: error_message`` pairs that will get
117
- checked during ``_validate()`` run
118
- """
119
- return {
120
- isinstance(self.n_workers, int)
121
- and self.n_workers
122
- > 0: f"n_workers should be positive integer, got {self.n_workers=}",
123
- }
124
-
125
- def _get_warnings(self):
126
- """Get dictionary with ``condition: warning_message`` pairs that ensure
127
- the good usage of the backend instance
128
-
129
- Returns
130
- -------
131
- dict
132
- dictionary with ``condition: warning_message`` pairs that will get
133
- checked during ``_validate()`` run
134
- """
135
- return dict()
136
-
137
- def _validate(self):
138
- """Check correctness (e.g. ``dask`` is installed if using ``backend='dask'``)
139
- and good usage (e.g. ``n_workers=1`` if backend is serial) of the backend
140
-
141
- Raises
142
- ------
143
- ValueError
144
- if one of the conditions in :meth:`_get_checks` is ``True``
145
- """
146
- for check, msg in self._get_checks().items():
147
- if not check:
148
- raise ValueError(msg)
149
- for check, msg in self._get_warnings().items():
150
- if not check:
151
- warnings.warn(msg)
152
-
153
- def apply(self, func: Callable, computations: list) -> list:
154
- """map function `func` to all tasks in the `computations` list
155
-
156
- Main method that will get called when using an instance of
157
- ``BackendBase``. It is equivalent to running ``[func(item) for item in
158
- computations]`` while using the parallel backend capabilities.
159
-
160
- Parameters
161
- ----------
162
- func : Callable
163
- function to be called on each of the tasks in computations list
164
- computations : list
165
- computation tasks to apply function to
166
-
167
- Returns
168
- -------
169
- list
170
- list of results of the function
171
-
172
- """
173
- raise NotImplementedError
174
-
175
-
176
- class BackendSerial(BackendBase):
177
- """A built-in backend that does serial execution of the function, without any
178
- parallelization.
179
-
180
- Parameters
181
- ----------
182
- n_workers : int
183
- Is ignored in this class, and if ``n_workers`` > 1, a warning will be
184
- given.
185
-
186
-
187
- .. versionadded:: 2.8.0
188
- """
189
-
190
- def _get_warnings(self):
191
- """Get dictionary with ``condition: warning_message`` pairs that ensure
192
- the good usage of the backend instance. Here, it checks if the number
193
- of workers is not 1, otherwise gives warning.
194
-
195
- Returns
196
- -------
197
- dict
198
- dictionary with ``condition: warning_message`` pairs that will get
199
- checked during ``_validate()`` run
200
- """
201
- return {
202
- self.n_workers
203
- == 1: "n_workers is ignored when executing with backend='serial'"
204
- }
205
-
206
- def apply(self, func: Callable, computations: list) -> list:
207
- """
208
- Serially applies `func` to each task object in ``computations``.
209
-
210
- Parameters
211
- ----------
212
- func : Callable
213
- function to be called on each of the tasks in computations list
214
- computations : list
215
- computation tasks to apply function to
216
-
217
- Returns
218
- -------
219
- list
220
- list of results of the function
221
- """
222
- return [func(task) for task in computations]
223
-
224
-
225
- class BackendMultiprocessing(BackendBase):
226
- """A built-in backend that executes a given function using the
227
- :meth:`multiprocessing.Pool.map <multiprocessing.pool.Pool.map>` method.
228
-
229
- Parameters
230
- ----------
231
- n_workers : int
232
- number of processes in :class:`multiprocessing.Pool
233
- <multiprocessing.pool.Pool>` to distribute the workload
234
- between. Must be a positive integer.
235
-
236
- Examples
237
- --------
238
-
239
- .. code-block:: python
240
-
241
- from MDAnalysis.analysis.backends import BackendMultiprocessing
242
- import multiprocessing as mp
243
-
244
- backend_obj = BackendMultiprocessing(n_workers=mp.cpu_count())
245
-
246
-
247
- .. versionadded:: 2.8.0
248
-
249
- """
250
-
251
- def apply(self, func: Callable, computations: list) -> list:
252
- """Applies `func` to each object in ``computations`` using `multiprocessing`'s `Pool.map`.
253
-
254
- Parameters
255
- ----------
256
- func : Callable
257
- function to be called on each of the tasks in computations list
258
- computations : list
259
- computation tasks to apply function to
260
-
261
- Returns
262
- -------
263
- list
264
- list of results of the function
265
- """
266
- from multiprocessing import Pool
267
-
268
- with Pool(processes=self.n_workers) as pool:
269
- results = pool.map(func, computations)
270
- return results
271
-
272
-
273
- class BackendDask(BackendBase):
274
- """A built-in backend that executes a given function with *dask*.
275
-
276
- Execution is performed with the :func:`dask.compute` function of
277
- :class:`dask.delayed.Delayed` object (created with
278
- :func:`dask.delayed.delayed`) with ``scheduler='processes'`` and
279
- ``chunksize=1`` (this ensures uniform distribution of tasks among
280
- processes). Requires the `dask package <https://docs.dask.org/en/stable/>`_
281
- to be `installed <https://docs.dask.org/en/stable/install.html>`_.
282
-
283
- Parameters
284
- ----------
285
- n_workers : int
286
- number of processes in to distribute the workload
287
- between. Must be a positive integer. Workers are actually
288
- :class:`multiprocessing.pool.Pool` processes, but they use a different and
289
- more flexible `serialization protocol
290
- <https://docs.dask.org/en/stable/phases-of-computation.html#graph-serialization>`_.
291
-
292
- Examples
293
- --------
294
-
295
- .. code-block:: python
296
-
297
- from MDAnalysis.analysis.backends import BackendDask
298
- import multiprocessing as mp
299
-
300
- backend_obj = BackendDask(n_workers=mp.cpu_count())
301
-
302
-
303
- .. versionadded:: 2.8.0
304
-
305
- """
306
-
307
- def apply(self, func: Callable, computations: list) -> list:
308
- """Applies `func` to each object in ``computations``.
309
-
310
- Parameters
311
- ----------
312
- func : Callable
313
- function to be called on each of the tasks in computations list
314
- computations : list
315
- computation tasks to apply function to
316
-
317
- Returns
318
- -------
319
- list
320
- list of results of the function
321
- """
322
- from dask.delayed import delayed
323
- import dask
324
-
325
- computations = [delayed(func)(task) for task in computations]
326
- results = dask.compute(
327
- computations,
328
- scheduler="processes",
329
- chunksize=1,
330
- num_workers=self.n_workers,
331
- )[0]
332
- return results
333
-
334
- def _get_checks(self):
335
- """Get dictionary with ``condition: error_message`` pairs that ensure the
336
- validity of the backend instance. Here checks if ``dask`` module is
337
- installed in the environment.
338
-
339
- Returns
340
- -------
341
- dict
342
- dictionary with ``condition: error_message`` pairs that will get
343
- checked during ``_validate()`` run
344
- """
345
- base_checks = super()._get_checks()
346
- checks = {
347
- is_installed("dask"): (
348
- "module 'dask' is missing. Please install 'dask': "
349
- "https://docs.dask.org/en/stable/install.html"
350
- )
351
- }
352
- return base_checks | checks
imdclient/results.py DELETED
@@ -1,332 +0,0 @@
1
- # Copy of MDAnalysis.analysis.results from 2.8.0
2
-
3
- """Analysis results and their aggregation --- :mod:`MDAnalysis.analysis.results`
4
- ================================================================================
5
-
6
- Module introduces two classes, :class:`Results` and :class:`ResultsGroup`,
7
- used for storing and aggregating data in
8
- :meth:`MDAnalysis.analysis.base.AnalysisBase.run()`, respectively.
9
-
10
-
11
- Classes
12
- -------
13
-
14
- The :class:`Results` class is an extension of a built-in dictionary
15
- type, that holds all assigned attributes in :attr:`self.data` and
16
- allows for access either via dict-like syntax, or via class-like syntax:
17
-
18
- .. code-block:: python
19
-
20
- from MDAnalysis.analysis.results import Results
21
- r = Results()
22
- r.array = [1, 2, 3, 4]
23
- assert r['array'] == r.array == [1, 2, 3, 4]
24
-
25
-
26
- The :class:`ResultsGroup` can merge multiple :class:`Results` objects.
27
- It is mainly used by :class:`MDAnalysis.analysis.base.AnalysisBase` class,
28
- that uses :meth:`ResultsGroup.merge()` method to aggregate results from
29
- multiple workers, initialized during a parallel run:
30
-
31
- .. code-block:: python
32
-
33
- from MDAnalysis.analysis.results import Results, ResultsGroup
34
- import numpy as np
35
-
36
- r1, r2 = Results(), Results()
37
- r1.masses = [1, 2, 3, 4, 5]
38
- r2.masses = [0, 0, 0, 0]
39
- r1.vectors = np.arange(10).reshape(5, 2)
40
- r2.vectors = np.arange(8).reshape(4, 2)
41
-
42
- group = ResultsGroup(
43
- lookup = {
44
- 'masses': ResultsGroup.flatten_sequence,
45
- 'vectors': ResultsGroup.ndarray_vstack
46
- }
47
- )
48
-
49
- r = group.merge([r1, r2])
50
- assert r.masses == list((*r1.masses, *r2.masses))
51
- assert (r.vectors == np.vstack([r1.vectors, r2.vectors])).all()
52
- """
53
-
54
- from collections import UserDict
55
- import numpy as np
56
- from typing import Callable, Sequence
57
-
58
-
59
- class Results(UserDict):
60
- r"""Container object for storing results.
61
-
62
- :class:`Results` are dictionaries that provide two ways by which values
63
- can be accessed: by dictionary key ``results["value_key"]`` or by object
64
- attribute, ``results.value_key``. :class:`Results` stores all results
65
- obtained from an analysis after calling :meth:`~AnalysisBase.run()`.
66
-
67
- The implementation is similar to the :class:`sklearn.utils.Bunch`
68
- class in `scikit-learn`_.
69
-
70
- .. _`scikit-learn`: https://scikit-learn.org/
71
- .. _`sklearn.utils.Bunch`: https://scikit-learn.org/stable/modules/generated/sklearn.utils.Bunch.html
72
-
73
- Raises
74
- ------
75
- AttributeError
76
- If an assigned attribute has the same name as a default attribute.
77
-
78
- ValueError
79
- If a key is not of type ``str`` and therefore is not able to be
80
- accessed by attribute.
81
-
82
- Examples
83
- --------
84
- >>> from MDAnalysis.analysis.base import Results
85
- >>> results = Results(a=1, b=2)
86
- >>> results['b']
87
- 2
88
- >>> results.b
89
- 2
90
- >>> results.a = 3
91
- >>> results['a']
92
- 3
93
- >>> results.c = [1, 2, 3, 4]
94
- >>> results['c']
95
- [1, 2, 3, 4]
96
-
97
-
98
- .. versionadded:: 2.0.0
99
-
100
- .. versionchanged:: 2.8.0
101
- Moved :class:`Results` to :mod:`MDAnalysis.analysis.results`
102
- """
103
-
104
- def _validate_key(self, key):
105
- if key in dir(self):
106
- raise AttributeError(
107
- f"'{key}' is a protected dictionary attribute"
108
- )
109
- elif isinstance(key, str) and not key.isidentifier():
110
- raise ValueError(f"'{key}' is not a valid attribute")
111
-
112
- def __init__(self, *args, **kwargs):
113
- kwargs = dict(*args, **kwargs)
114
- if "data" in kwargs.keys():
115
- raise AttributeError(f"'data' is a protected dictionary attribute")
116
- self.__dict__["data"] = {}
117
- self.update(kwargs)
118
-
119
- def __setitem__(self, key, item):
120
- self._validate_key(key)
121
- super().__setitem__(key, item)
122
-
123
- def __setattr__(self, attr, val):
124
- if attr == "data":
125
- super().__setattr__(attr, val)
126
- else:
127
- self.__setitem__(attr, val)
128
-
129
- def __getattr__(self, attr):
130
- try:
131
- return self[attr]
132
- except KeyError as err:
133
- raise AttributeError(
134
- f"'Results' object has no attribute '{attr}'"
135
- ) from err
136
-
137
- def __delattr__(self, attr):
138
- try:
139
- del self[attr]
140
- except KeyError as err:
141
- raise AttributeError(
142
- f"'Results' object has no attribute '{attr}'"
143
- ) from err
144
-
145
- def __getstate__(self):
146
- return self.data
147
-
148
- def __setstate__(self, state):
149
- self.data = state
150
-
151
-
152
- class ResultsGroup:
153
- """
154
- Management and aggregation of results stored in :class:`Results` instances.
155
-
156
- A :class:`ResultsGroup` is an optional description for :class:`Result` "dictionaries"
157
- that are used in analysis classes based on :class:`AnalysisBase`. For each *key* in a
158
- :class:`Result` it describes how multiple pieces of the data held under the key are
159
- to be aggregated. This approach is necessary when parts of a trajectory are analyzed
160
- independently (e.g., in parallel) and then need to me merged (with :meth:`merge`) to
161
- obtain a complete data set.
162
-
163
- Parameters
164
- ----------
165
- lookup : dict[str, Callable], optional
166
- aggregation functions lookup dict, by default None
167
-
168
- Examples
169
- --------
170
-
171
- .. code-block:: python
172
-
173
- from MDAnalysis.analysis.results import ResultsGroup, Results
174
- group = ResultsGroup(lookup={'mass': ResultsGroup.float_mean})
175
- obj1 = Results(mass=1)
176
- obj2 = Results(mass=3)
177
- assert {'mass': 2.0} == group.merge([obj1, obj2])
178
-
179
-
180
- .. code-block:: python
181
-
182
- # you can also set `None` for those attributes that you want to skip
183
- lookup = {'mass': ResultsGroup.float_mean, 'trajectory': None}
184
- group = ResultsGroup(lookup)
185
- objects = [Results(mass=1, skip=None), Results(mass=3, skip=object)]
186
- assert group.merge(objects, require_all_aggregators=False) == {'mass': 2.0}
187
-
188
- .. versionadded:: 2.8.0
189
- """
190
-
191
- def __init__(self, lookup: dict[str, Callable] = None):
192
- self._lookup = lookup
193
-
194
- def merge(
195
- self, objects: Sequence[Results], require_all_aggregators: bool = True
196
- ) -> Results:
197
- """Merge multiple Results into a single Results instance.
198
-
199
- Merge multiple :class:`Results` instances into a single one, using the
200
- `lookup` dictionary to determine the appropriate aggregator functions for
201
- each named results attribute. If the resulting object only contains a single
202
- element, it just returns it without using any aggregators.
203
-
204
- Parameters
205
- ----------
206
- objects : Sequence[Results]
207
- Multiple :class:`Results` instances with the same data attributes.
208
- require_all_aggregators : bool, optional
209
- if True, raise an exception when no aggregation function for a
210
- particular argument is found. Allows to skip aggregation for the
211
- parameters that aren't needed in the final object --
212
- see :class:`ResultsGroup`.
213
-
214
- Returns
215
- -------
216
- Results
217
- merged :class:`Results`
218
-
219
- Raises
220
- ------
221
- ValueError
222
- if no aggregation function for a key is found and ``require_all_aggregators=True``
223
- """
224
- if len(objects) == 1:
225
- merged_results = objects[0]
226
- return merged_results
227
-
228
- merged_results = Results()
229
- for key in objects[0].keys():
230
- agg_function = self._lookup.get(key, None)
231
- if agg_function is not None:
232
- results_of_t = [obj[key] for obj in objects]
233
- merged_results[key] = agg_function(results_of_t)
234
- elif require_all_aggregators:
235
- raise ValueError(f"No aggregation function for {key=}")
236
- return merged_results
237
-
238
- @staticmethod
239
- def flatten_sequence(arrs: list[list]):
240
- """Flatten a list of lists into a list
241
-
242
- Parameters
243
- ----------
244
- arrs : list[list]
245
- list of lists
246
-
247
- Returns
248
- -------
249
- list
250
- flattened list
251
- """
252
- return [item for sublist in arrs for item in sublist]
253
-
254
- @staticmethod
255
- def ndarray_sum(arrs: list[np.ndarray]):
256
- """sums an ndarray along ``axis=0``
257
-
258
- Parameters
259
- ----------
260
- arrs : list[np.ndarray]
261
- list of input arrays. Must have the same shape.
262
-
263
- Returns
264
- -------
265
- np.ndarray
266
- sum of input arrays
267
- """
268
- return np.array(arrs).sum(axis=0)
269
-
270
- @staticmethod
271
- def ndarray_mean(arrs: list[np.ndarray]):
272
- """calculates mean of input ndarrays along ``axis=0``
273
-
274
- Parameters
275
- ----------
276
- arrs : list[np.ndarray]
277
- list of input arrays. Must have the same shape.
278
-
279
- Returns
280
- -------
281
- np.ndarray
282
- mean of input arrays
283
- """
284
- return np.array(arrs).mean(axis=0)
285
-
286
- @staticmethod
287
- def float_mean(floats: list[float]):
288
- """calculates mean of input float values
289
-
290
- Parameters
291
- ----------
292
- floats : list[float]
293
- list of float values
294
-
295
- Returns
296
- -------
297
- float
298
- mean value
299
- """
300
- return np.array(floats).mean()
301
-
302
- @staticmethod
303
- def ndarray_hstack(arrs: list[np.ndarray]):
304
- """Performs horizontal stack of input arrays
305
-
306
- Parameters
307
- ----------
308
- arrs : list[np.ndarray]
309
- input numpy arrays
310
-
311
- Returns
312
- -------
313
- np.ndarray
314
- result of stacking
315
- """
316
- return np.hstack(arrs)
317
-
318
- @staticmethod
319
- def ndarray_vstack(arrs: list[np.ndarray]):
320
- """Performs vertical stack of input arrays
321
-
322
- Parameters
323
- ----------
324
- arrs : list[np.ndarray]
325
- input numpy arrays
326
-
327
- Returns
328
- -------
329
- np.ndarray
330
- result of stacking
331
- """
332
- return np.vstack(arrs)