junifer 0.0.7.dev105__py3-none-any.whl → 0.0.7.dev121__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
junifer/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.0.7.dev105'
21
- __version_tuple__ = version_tuple = (0, 0, 7, 'dev105')
20
+ __version__ = version = '0.0.7.dev121'
21
+ __version_tuple__ = version_tuple = (0, 0, 7, 'dev121')
junifer/api/decorators.py CHANGED
@@ -6,8 +6,13 @@
6
6
  # License: AGPL
7
7
 
8
8
  from ..data import DataDispatcher
9
- from ..pipeline import PipelineComponentRegistry
9
+ from ..pipeline import (
10
+ AssetDumperDispatcher,
11
+ AssetLoaderDispatcher,
12
+ PipelineComponentRegistry,
13
+ )
10
14
  from ..typing import (
15
+ DataDumpAssetLike,
11
16
  DataGrabberLike,
12
17
  DataRegistryLike,
13
18
  MarkerLike,
@@ -17,6 +22,7 @@ from ..typing import (
17
22
 
18
23
 
19
24
  __all__ = [
25
+ "register_data_dump_asset",
20
26
  "register_data_registry",
21
27
  "register_datagrabber",
22
28
  "register_datareader",
@@ -184,3 +190,49 @@ def register_data_registry(name: str) -> DataRegistryLike:
184
190
  return klass
185
191
 
186
192
  return decorator
193
+
194
+
195
+ def register_data_dump_asset(
196
+ types: list[type], exts: list[str]
197
+ ) -> DataDumpAssetLike:
198
+ """Asset registration decorator.
199
+
200
+ Registers the data dump asset for ``types`` with ``exts``.
201
+
202
+ Parameters
203
+ ----------
204
+ types : list of class
205
+ The classes to dump.
206
+ exts : list of str
207
+ The extensions to load.
208
+
209
+ Returns
210
+ -------
211
+ class
212
+ The unmodified input class.
213
+
214
+ """
215
+
216
+ def decorator(klass: DataDumpAssetLike) -> DataDumpAssetLike:
217
+ """Actual decorator.
218
+
219
+ Parameters
220
+ ----------
221
+ klass : class
222
+ The class of the data dump asset to register.
223
+
224
+ Returns
225
+ -------
226
+ class
227
+ The unmodified input class.
228
+
229
+ """
230
+ # Add asset dumper
231
+ for t in types:
232
+ AssetDumperDispatcher()[t] = klass
233
+ # Add asset loader
234
+ for e in exts:
235
+ AssetLoaderDispatcher()[e] = klass
236
+ return klass
237
+
238
+ return decorator
@@ -3,8 +3,18 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from junifer.api.decorators import register_data_registry
6
+ import pickle
7
+
8
+ from junifer.api.decorators import (
9
+ register_data_dump_asset,
10
+ register_data_registry,
11
+ )
7
12
  from junifer.data import BasePipelineDataRegistry, DataDispatcher
13
+ from junifer.pipeline import (
14
+ AssetDumperDispatcher,
15
+ AssetLoaderDispatcher,
16
+ BaseDataDumpAsset,
17
+ )
8
18
 
9
19
 
10
20
  def test_register_data_registry() -> None:
@@ -30,3 +40,39 @@ def test_register_data_registry() -> None:
30
40
  assert "dumb" in DataDispatcher()
31
41
  _ = DataDispatcher().pop("dumb")
32
42
  assert "dumb" not in DataDispatcher()
43
+
44
+
45
+ def test_register_data_dump_asset() -> None:
46
+ """Test data dump asset registration."""
47
+
48
+ class Int(int): ...
49
+
50
+ class Float(float): ...
51
+
52
+ @register_data_dump_asset([Int, Float], [".int", ".float"])
53
+ class DumAsset(BaseDataDumpAsset):
54
+ def dump(self):
55
+ suffix = ""
56
+ if isinstance(self.data, Int):
57
+ suffix = ".int"
58
+ else:
59
+ suffix = ".float"
60
+ pickle.dump(self.data, self.path_without_ext.with_suffix(suffix))
61
+
62
+ @classmethod
63
+ def load(cls, path):
64
+ return pickle.load(path)
65
+
66
+ assert Int in AssetDumperDispatcher()
67
+ assert Float in AssetDumperDispatcher()
68
+ _ = AssetDumperDispatcher().pop(Int)
69
+ _ = AssetDumperDispatcher().pop(Float)
70
+ assert Int not in AssetDumperDispatcher()
71
+ assert Float not in AssetDumperDispatcher()
72
+
73
+ assert ".int" in AssetLoaderDispatcher()
74
+ assert ".float" in AssetLoaderDispatcher()
75
+ _ = AssetLoaderDispatcher().pop(".int")
76
+ _ = AssetLoaderDispatcher().pop(".float")
77
+ assert ".int" not in AssetLoaderDispatcher()
78
+ assert ".float" not in AssetLoaderDispatcher()
@@ -5,6 +5,7 @@
5
5
 
6
6
  from typing import Optional
7
7
 
8
+ import numpy as np
8
9
  import pandas as pd
9
10
 
10
11
  from ..typing import StorageLike
@@ -19,6 +20,7 @@ def read_transform(
19
20
  transform: str,
20
21
  feature_name: Optional[str] = None,
21
22
  feature_md5: Optional[str] = None,
23
+ nan_policy: Optional[str] = "bypass",
22
24
  transform_args: Optional[tuple] = None,
23
25
  transform_kw_args: Optional[dict] = None,
24
26
  ) -> pd.DataFrame:
@@ -35,6 +37,16 @@ def read_transform(
35
37
  Name of the feature to read (default None).
36
38
  feature_md5 : str, optional
37
39
  MD5 hash of the feature to read (default None).
40
+ nan_policy : str, optional
41
+ The policy to handle NaN values (default "ignore").
42
+ Options are:
43
+
44
+ * "bypass": Do nothing and pass NaN values to the transform function.
45
+ * "drop_element": Drop (skip) elements with NaN values.
46
+ * "drop_rows": Drop (skip) rows with NaN values.
47
+ * "drop_columns": Drop (skip) columns with NaN values.
48
+ * "drop_symmetric": Drop (skip) symmetric pairs with NaN values.
49
+
38
50
  transform_args : tuple, optional
39
51
  The positional arguments for the callable of ``transform``
40
52
  (default None).
@@ -47,6 +59,18 @@ def read_transform(
47
59
  pandas.DataFrame
48
60
  The transformed feature as a dataframe.
49
61
 
62
+ Raises
63
+ ------
64
+ ValueError
65
+ If ``nan_policy`` is invalid or
66
+ if *package* is invalid.
67
+ RuntimeError
68
+ If *package* is ``bctpy`` and stored data kind is not ``matrix``.
69
+ ImportError
70
+ If ``bctpy`` cannot be imported.
71
+ AttributeError
72
+ If *function* to be invoked in invalid.
73
+
50
74
  Notes
51
75
  -----
52
76
  This function has been only tested for:
@@ -63,6 +87,18 @@ def read_transform(
63
87
  transform_args = transform_args or ()
64
88
  transform_kw_args = transform_kw_args or {}
65
89
 
90
+ if nan_policy not in [
91
+ "bypass",
92
+ "drop_element",
93
+ "drop_rows",
94
+ "drop_columns",
95
+ "drop_symmetric",
96
+ ]:
97
+ raise_error(
98
+ f"Unknown nan_policy: {nan_policy}",
99
+ klass=ValueError,
100
+ )
101
+
66
102
  # Read storage
67
103
  stored_data = storage.read(
68
104
  feature_name=feature_name, feature_md5=feature_md5
@@ -106,22 +142,52 @@ def read_transform(
106
142
  except AttributeError as err:
107
143
  raise_error(msg=str(err), klass=AttributeError)
108
144
 
109
- # Apply function and store subject-wise
145
+ # Apply function and store element-wise
110
146
  output_list = []
147
+ element_list = []
111
148
  logger.debug(
112
149
  f"Computing '{package}.{func_str}' for feature "
113
150
  f"{feature_name or feature_md5} ..."
114
151
  )
115
- for subject in range(stored_data["data"].shape[2]):
152
+ for i_element, element in enumerate(stored_data["element"]):
153
+ t_data = stored_data["data"][:, :, i_element]
154
+ has_nan = np.isnan(np.min(t_data))
155
+ if nan_policy == "drop_element" and has_nan:
156
+ logger.debug(
157
+ f"Skipping element {element} due to NaN values ..."
158
+ )
159
+ continue
160
+ elif nan_policy == "drop_rows" and has_nan:
161
+ logger.debug(
162
+ f"Skipping rows with NaN values in element {element} ..."
163
+ )
164
+ t_data = t_data[~np.isnan(t_data).any(axis=1)]
165
+ elif nan_policy == "drop_columns" and has_nan:
166
+ logger.debug(
167
+ f"Skipping columns with NaN values in element {element} "
168
+ "..."
169
+ )
170
+ t_data = t_data[:, ~np.isnan(t_data).any(axis=0)]
171
+ elif nan_policy == "drop_symmetric":
172
+ logger.debug(
173
+ f"Skipping pairs of rows/columns with NaN values in "
174
+ f"element {element}..."
175
+ )
176
+ good_rows = ~np.isnan(t_data).any(axis=1)
177
+ good_columns = ~np.isnan(t_data).any(axis=0)
178
+ good_idx = np.logical_and(good_rows, good_columns)
179
+ t_data = t_data[good_idx][:, good_idx]
180
+
116
181
  output = func(
117
- stored_data["data"][:, :, subject],
182
+ t_data,
118
183
  *transform_args,
119
184
  **transform_kw_args,
120
185
  )
121
186
  output_list.append(output)
187
+ element_list.append(element)
122
188
 
123
189
  # Create dataframe for index
124
- idx_df = pd.DataFrame(data=stored_data["element"])
190
+ idx_df = pd.DataFrame(data=element_list)
125
191
  # Create multiindex from dataframe
126
192
  logger.debug(
127
193
  "Generating pandas.MultiIndex for feature "
@@ -64,6 +64,36 @@ def matrix_storage(tmp_path: Path) -> HDF5FeatureStorage:
64
64
  return storage
65
65
 
66
66
 
67
+ @pytest.fixture
68
+ def matrix_storage_with_nan(tmp_path: Path) -> HDF5FeatureStorage:
69
+ """Return a HDF5FeatureStorage with matrix data.
70
+
71
+ Parameters
72
+ ----------
73
+ tmp_path : pathlib.Path
74
+ The path to the test directory.
75
+
76
+ """
77
+ storage = HDF5FeatureStorage(tmp_path / "matrix_store_nan.hdf5")
78
+ data = np.arange(36).reshape(3, 3, 4).astype(float)
79
+ data[1, 1, 2] = np.nan
80
+ data[1, 2, 2] = np.nan
81
+ for i in range(4):
82
+ storage.store(
83
+ kind="matrix",
84
+ meta={
85
+ "element": {"subject": f"test{i + 1}"},
86
+ "dependencies": [],
87
+ "marker": {"name": "matrix"},
88
+ "type": "BOLD",
89
+ },
90
+ data=data[:, :, i],
91
+ col_names=["f1", "f2", "f3"],
92
+ row_names=["g1", "g2", "g3"],
93
+ )
94
+ return storage
95
+
96
+
67
97
  def test_incorrect_package(matrix_storage: HDF5FeatureStorage) -> None:
68
98
  """Test error check for incorrect package name.
69
99
 
@@ -176,3 +206,57 @@ def test_bctpy_function(
176
206
  )
177
207
  assert "Computing" in caplog.text
178
208
  assert "Generating" in caplog.text
209
+
210
+
211
+ @pytest.mark.parametrize(
212
+ "nan_policy, error_msg",
213
+ [
214
+ ("drop_element", None),
215
+ ("drop_rows", "square"),
216
+ ("drop_columns", "square"),
217
+ ("drop_symmetric", None),
218
+ ("bypass", "NaNs"),
219
+ ("wrong", "Unknown"),
220
+ ],
221
+ )
222
+ def test_bctpy_nans(
223
+ matrix_storage_with_nan: HDF5FeatureStorage,
224
+ caplog: pytest.LogCaptureFixture,
225
+ nan_policy: str,
226
+ error_msg: str,
227
+ ) -> None:
228
+ """Test working function of bctpy.
229
+
230
+ Parameters
231
+ ----------
232
+ matrix_storage_with_nan : HDF5FeatureStorage
233
+ The HDF5FeatureStorage with matrix data, as fixture.
234
+ caplog : pytest.LogCaptureFixture
235
+ The pytest.LogCaptureFixture object.
236
+ nan_policy : str
237
+ The NAN policy to test.
238
+ error_msg : str
239
+ The expected error message snippet. If None, no error should be raised.
240
+
241
+ """
242
+ # Skip test if import fails
243
+ pytest.importorskip("bct")
244
+
245
+ with caplog.at_level(logging.DEBUG):
246
+ if error_msg is None:
247
+ read_transform(
248
+ storage=matrix_storage_with_nan, # type: ignore
249
+ feature_name="BOLD_matrix",
250
+ transform="bctpy_eigenvector_centrality_und",
251
+ nan_policy=nan_policy,
252
+ )
253
+ assert "Computing" in caplog.text
254
+ assert "Generating" in caplog.text
255
+ else:
256
+ with pytest.raises(ValueError, match=error_msg):
257
+ read_transform(
258
+ storage=matrix_storage_with_nan, # type: ignore
259
+ feature_name="BOLD_matrix",
260
+ transform="bctpy_eigenvector_centrality_und",
261
+ nan_policy=nan_policy,
262
+ )
@@ -1,4 +1,8 @@
1
1
  __all__ = [
2
+ "AssetDumperDispatcher",
3
+ "AssetLoaderDispatcher",
4
+ "BaseDataDumpAsset",
5
+ "DataObjectDumper",
2
6
  "PipelineComponentRegistry",
3
7
  "PipelineStepMixin",
4
8
  "UpdateMetaMixin",
@@ -6,6 +10,12 @@ __all__ = [
6
10
  "MarkerCollection",
7
11
  ]
8
12
 
13
+ from ._data_object_dumper import (
14
+ AssetDumperDispatcher,
15
+ AssetLoaderDispatcher,
16
+ BaseDataDumpAsset,
17
+ DataObjectDumper,
18
+ )
9
19
  from .pipeline_component_registry import PipelineComponentRegistry
10
20
  from .pipeline_step_mixin import PipelineStepMixin
11
21
  from .update_meta_mixin import UpdateMetaMixin
@@ -0,0 +1,347 @@
1
+ """Provide pipeline data object dumper and data dump asset classes."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from abc import ABC, abstractmethod
7
+ from collections.abc import Iterator, MutableMapping
8
+ from copy import deepcopy
9
+ from pathlib import Path
10
+ from typing import Any
11
+
12
+ import nibabel
13
+ import pandas
14
+
15
+ from ..utils import raise_error, yaml
16
+
17
+
18
+ __all__ = [
19
+ "AssetDumperDispatcher",
20
+ "AssetLoaderDispatcher",
21
+ "BaseDataDumpAsset",
22
+ "DataObjectDumper",
23
+ ]
24
+
25
+
26
+ class BaseDataDumpAsset(ABC):
27
+ """Abstract base class for a data dump asset.
28
+
29
+ Parameters
30
+ ----------
31
+ data : Any
32
+ Data to save.
33
+ path_without_ext : pathlib.Path
34
+ Path to the asset without extension.
35
+ The subclass should add the extension when saving.
36
+
37
+ """
38
+
39
+ def __init__(self, data: Any, path_without_ext: Path) -> None:
40
+ """Initialize the class."""
41
+ self.data = data
42
+ self.path_without_ext = path_without_ext
43
+
44
+ @abstractmethod
45
+ def dump(self) -> None:
46
+ """Dump asset."""
47
+ raise_error(
48
+ msg="Concrete classes need to implement dump().",
49
+ klass=NotImplementedError,
50
+ )
51
+
52
+ @classmethod
53
+ @abstractmethod
54
+ def load(cls: type["BaseDataDumpAsset"], path: Path) -> Any:
55
+ """Load asset from path."""
56
+ raise_error(
57
+ msg="Concrete classes need to implement load().",
58
+ klass=NotImplementedError,
59
+ )
60
+
61
+
62
+ class Nifti1ImageAsset(BaseDataDumpAsset):
63
+ """Class for ``nibabel.Nifti1Image`` dumper."""
64
+
65
+ def dump(self) -> None:
66
+ nibabel.save(self.data, self.path_without_ext.with_suffix(".nii.gz"))
67
+
68
+ @classmethod
69
+ def load(cls: "Nifti1ImageAsset", path: Path) -> nibabel.Nifti1Image:
70
+ return nibabel.load(path)
71
+
72
+
73
+ class PandasDataFrameAsset(BaseDataDumpAsset):
74
+ """Class for ``pandas.DataFrame`` dumper."""
75
+
76
+ def dump(self) -> None:
77
+ self.data.to_csv(self.path_without_ext.with_suffix(".csv"))
78
+
79
+ @classmethod
80
+ def load(cls: "PandasDataFrameAsset", path: Path) -> pandas.DataFrame:
81
+ return pandas.read_csv(path, index_col=0)
82
+
83
+
84
+ class AssetDumperDispatcher(MutableMapping):
85
+ """Class for helping dynamic asset dumper dispatch."""
86
+
87
+ _instance = None
88
+
89
+ def __new__(cls):
90
+ # Make class singleton
91
+ if cls._instance is None:
92
+ cls._instance = super().__new__(cls)
93
+ # Set dumpers
94
+ cls._dumpers: dict[type, type[BaseDataDumpAsset]] = {}
95
+ cls._builtin: dict[type, type[BaseDataDumpAsset]] = {}
96
+ cls._external: dict[type, type[BaseDataDumpAsset]] = {}
97
+ cls._builtin.update(
98
+ {
99
+ nibabel.Nifti1Image: Nifti1ImageAsset,
100
+ pandas.DataFrame: PandasDataFrameAsset,
101
+ }
102
+ )
103
+ cls._dumpers.update(cls._builtin)
104
+ return cls._instance
105
+
106
+ def __getitem__(self, key: type) -> type[BaseDataDumpAsset]:
107
+ return self._dumpers[key]
108
+
109
+ def __iter__(self) -> Iterator[type]:
110
+ return iter(self._dumpers)
111
+
112
+ def __len__(self) -> int:
113
+ return len(self._dumpers)
114
+
115
+ def __delitem__(self, key: type) -> None:
116
+ # Internal check
117
+ if key in self._builtin:
118
+ raise_error(f"Cannot delete in-built key: {key}")
119
+ # Non-existing key
120
+ if key not in self._external:
121
+ raise_error(klass=KeyError, msg=str(key))
122
+ # Update external
123
+ _ = self._external.pop(key)
124
+ # Update global
125
+ _ = self._dumpers.pop(key)
126
+
127
+ def __setitem__(self, key: type, value: type[BaseDataDumpAsset]) -> None:
128
+ # Internal check
129
+ if key in self._builtin:
130
+ raise_error(f"Cannot set value for in-built key: {key}")
131
+ # Value type check
132
+ if not issubclass(value, BaseDataDumpAsset):
133
+ raise_error(f"Invalid value type: {type(value)}")
134
+ # Update external
135
+ self._external[key] = value
136
+ # Update global
137
+ self._dumpers[key] = value
138
+
139
+ def popitem():
140
+ """Not implemented."""
141
+ pass
142
+
143
+ def clear(self):
144
+ """Not implemented."""
145
+ pass
146
+
147
+ def setdefault(self, key: type, value=None):
148
+ """Not implemented."""
149
+ pass
150
+
151
+
152
+ class AssetLoaderDispatcher(MutableMapping):
153
+ """Class for helping dynamic asset loader dispatch."""
154
+
155
+ _instance = None
156
+
157
+ def __new__(cls):
158
+ # Make class singleton
159
+ if cls._instance is None:
160
+ cls._instance = super().__new__(cls)
161
+ # Set loaders
162
+ cls._loaders: dict[str, type[BaseDataDumpAsset]] = {}
163
+ cls._builtin: dict[str, type[BaseDataDumpAsset]] = {}
164
+ cls._external: dict[str, type[BaseDataDumpAsset]] = {}
165
+ cls._builtin.update(
166
+ {
167
+ ".nii.gz": Nifti1ImageAsset,
168
+ ".nii": Nifti1ImageAsset,
169
+ ".csv": PandasDataFrameAsset,
170
+ }
171
+ )
172
+ cls._loaders.update(cls._builtin)
173
+ return cls._instance
174
+
175
+ def __getitem__(self, key: str) -> type[BaseDataDumpAsset]:
176
+ return self._loaders[key]
177
+
178
+ def __iter__(self) -> Iterator[str]:
179
+ return iter(self._loaders)
180
+
181
+ def __len__(self) -> int:
182
+ return len(self._loaders)
183
+
184
+ def __delitem__(self, key: str) -> None:
185
+ # Internal check
186
+ if key in self._builtin:
187
+ raise_error(f"Cannot delete in-built key: {key}")
188
+ # Non-existing key
189
+ if key not in self._external:
190
+ raise_error(klass=KeyError, msg=key)
191
+ # Update external
192
+ _ = self._external.pop(key)
193
+ # Update global
194
+ _ = self._loaders.pop(key)
195
+
196
+ def __setitem__(self, key: str, value: type[BaseDataDumpAsset]) -> None:
197
+ # Internal check
198
+ if key in self._builtin:
199
+ raise_error(f"Cannot set value for in-built key: {key}")
200
+ # Value type check
201
+ if not issubclass(value, BaseDataDumpAsset):
202
+ raise_error(f"Invalid value type: {type(value)}")
203
+ # Update external
204
+ self._external[key] = value
205
+ # Update global
206
+ self._loaders[key] = value
207
+
208
+ def popitem():
209
+ """Not implemented."""
210
+ pass
211
+
212
+ def clear(self):
213
+ """Not implemented."""
214
+ pass
215
+
216
+ def setdefault(self, key: str, value=None):
217
+ """Not implemented."""
218
+ pass
219
+
220
+
221
+ class DataObjectDumper:
222
+ """Class for pipeline data object dumping."""
223
+
224
+ _instance = None
225
+
226
+ def __new__(cls):
227
+ """Overridden to make the class singleton."""
228
+ # Make class singleton
229
+ if cls._instance is None:
230
+ cls._instance = super().__new__(cls)
231
+ return cls._instance
232
+
233
+ def dump(self, data: dict, path: Path, step: str) -> None:
234
+ """Dump data object at path.
235
+
236
+ Parameters
237
+ ----------
238
+ data : dict
239
+ The data object state to dump.
240
+ path : pathlib.Path
241
+ The path to dump the data object.
242
+ step : str
243
+ The step name. Also sets the dump directory.
244
+
245
+ """
246
+ # Make a deep copy of data
247
+ data_copy = deepcopy(data)
248
+ # Initialize list for storing assets to save
249
+ assets = []
250
+
251
+ dump_file_root = path / step
252
+
253
+ for k, v in data_copy.items():
254
+ # Conditional for Warp type; kept separate for low cognitive load
255
+ if isinstance(v, list):
256
+ for idx, _ in enumerate(v):
257
+ data_copy[k][idx]["path"] = str(data_copy[k][idx]["path"])
258
+ continue
259
+
260
+ # Transform Path to str
261
+ data_copy[k]["path"] = str(data_copy[k]["path"])
262
+ # Pop out first level assets; some data types might not have
263
+ if "data" in v:
264
+ dumper = AssetDumperDispatcher()[type(v["data"])]
265
+ assets.append(
266
+ dumper(
267
+ data=v.pop("data"),
268
+ path_without_ext=dump_file_root / k,
269
+ )
270
+ )
271
+ for kk, vv in v.items():
272
+ if isinstance(vv, dict) and kk != "meta":
273
+ # Transform Path to str
274
+ data_copy[k][kk]["path"] = str(data_copy[k][kk]["path"])
275
+ # Pop out second level assets
276
+ if "data" in vv:
277
+ dumper = AssetDumperDispatcher()[type(vv["data"])]
278
+ assets.append(
279
+ dumper(
280
+ data=vv.pop("data"),
281
+ path_without_ext=dump_file_root / f"{k}_{kk}",
282
+ )
283
+ )
284
+
285
+ # Save yaml
286
+ dump_file_path = dump_file_root / "data.yaml"
287
+ dump_file_path.parent.mkdir(parents=True, exist_ok=True)
288
+ yaml.dump(data_copy, stream=dump_file_path)
289
+
290
+ # Save assets
291
+ for x in assets:
292
+ x.dump()
293
+
294
+ def load(self, path: Path) -> dict:
295
+ """Load data object from path.
296
+
297
+ Parameters
298
+ ----------
299
+ path : pathlib.Path
300
+ The path to the dumped data object.
301
+
302
+ Returns
303
+ -------
304
+ dict
305
+ The restored data object dump.
306
+
307
+ """
308
+ data = yaml.load(path)
309
+ # Load assets; stem => path mapping
310
+ assets = {
311
+ child.stem.split(".")[0]: child
312
+ for child in path.parent.iterdir()
313
+ if "".join(child.suffixes) in AssetLoaderDispatcher()
314
+ }
315
+
316
+ for k, v in data.items():
317
+ # Conditional for Warp type; kept separate for low cognitive load
318
+ if isinstance(v, list):
319
+ for idx, _ in enumerate(v):
320
+ data[k][idx]["path"] = Path(data[k][idx]["path"])
321
+ continue
322
+
323
+ # Transform str to Path
324
+ data[k]["path"] = Path(data[k]["path"])
325
+ # Insert first level assets if matching asset is found
326
+ if k in assets:
327
+ # Get path
328
+ p = assets[k]
329
+ data[k]["path"] = p
330
+ # Get correct loader using extension
331
+ loader = AssetLoaderDispatcher()["".join(p.suffixes)]
332
+ data[k]["data"] = loader.load(p)
333
+ for kk, vv in v.items():
334
+ if isinstance(vv, dict) and kk != "meta":
335
+ # Transform str to Path
336
+ data[k][kk]["path"] = Path(data[k][kk]["path"])
337
+ # Insert second level assets
338
+ key = f"{k}_{kk}"
339
+ if key in assets:
340
+ # Get path
341
+ pp = assets[key]
342
+ data[k][kk]["path"] = pp
343
+ # Get correct loader using extension
344
+ loader = AssetLoaderDispatcher()["".join(pp.suffixes)]
345
+ data[k][kk]["data"] = loader.load(pp)
346
+
347
+ return data
@@ -5,12 +5,13 @@
5
5
  # License: AGPL
6
6
 
7
7
  from collections import Counter
8
+ from pathlib import Path
8
9
  from typing import Optional
9
10
 
10
11
  from ..datareader import DefaultDataReader
11
- from ..pipeline import PipelineStepMixin, WorkDirManager
12
+ from ..pipeline import DataObjectDumper, PipelineStepMixin, WorkDirManager
12
13
  from ..typing import DataGrabberLike, MarkerLike, PreprocessorLike, StorageLike
13
- from ..utils import logger, raise_error
14
+ from ..utils import config, logger, raise_error
14
15
 
15
16
 
16
17
  __all__ = ["MarkerCollection"]
@@ -80,16 +81,53 @@ class MarkerCollection:
80
81
 
81
82
  # Fetch actual data using datareader
82
83
  data = self._datareader.fit_transform(input)
84
+ # Conditional data dump
85
+ if (
86
+ config.get("preprocessing.dump.location") is not None
87
+ and config.get("preprocessing.dump.granularity") == "full"
88
+ ):
89
+ DataObjectDumper().dump(
90
+ data=data,
91
+ path=Path(config.get("preprocessing.dump.location")),
92
+ step=f"0_datareader_{self._datareader.__class__.__name__}",
93
+ )
83
94
 
84
95
  # Apply preprocessing steps
85
96
  if self._preprocessors is not None:
86
- for preprocessor in self._preprocessors:
97
+ for idx, preprocessor in enumerate(self._preprocessors):
87
98
  logger.info(
88
99
  "Preprocessing data with "
89
100
  f"{preprocessor.__class__.__name__}"
90
101
  )
91
102
  # Mutate data after every iteration
92
103
  data = preprocessor.fit_transform(data)
104
+ # Conditional data dump
105
+ if (
106
+ config.get("preprocessing.dump.location") is not None
107
+ and config.get("preprocessing.dump.granularity") == "full"
108
+ ):
109
+ DataObjectDumper().dump(
110
+ data=data,
111
+ path=Path(config.get("preprocessing.dump.location")),
112
+ step=(
113
+ f"{idx + 1}_preprocessor_"
114
+ f"{preprocessor.__class__.__name__}"
115
+ ),
116
+ )
117
+
118
+ # Conditional data dump
119
+ if (
120
+ config.get("preprocessing.dump.location") is not None
121
+ and config.get("preprocessing.dump.granularity") == "final"
122
+ ):
123
+ DataObjectDumper().dump(
124
+ data=data,
125
+ path=Path(config.get("preprocessing.dump.location")),
126
+ step=(
127
+ f"final_preprocessor_"
128
+ f"{self._preprocessors[-1].__class__.__name__}"
129
+ ),
130
+ )
93
131
 
94
132
  # Compute markers
95
133
  out = {}
@@ -0,0 +1,225 @@
1
+ """Provide tests for data object dumping."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import pickle
7
+ from pathlib import Path
8
+ from typing import Union
9
+
10
+ import nibabel
11
+ import pytest
12
+
13
+ from junifer.markers import FunctionalConnectivitySpheres
14
+ from junifer.pipeline import (
15
+ AssetDumperDispatcher,
16
+ AssetLoaderDispatcher,
17
+ BaseDataDumpAsset,
18
+ DataObjectDumper,
19
+ MarkerCollection,
20
+ )
21
+ from junifer.preprocess import fMRIPrepConfoundRemover
22
+ from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
23
+ from junifer.utils import config
24
+
25
+
26
+ @pytest.mark.parametrize(
27
+ "dispatcher, inbuilt_key, ext_key, val",
28
+ [
29
+ (
30
+ AssetDumperDispatcher,
31
+ nibabel.Nifti1Image,
32
+ nibabel.Nifti2Image,
33
+ dict,
34
+ ),
35
+ (AssetLoaderDispatcher, ".nii", ".tsv", dict),
36
+ ],
37
+ )
38
+ def test_dispatcher_addition_errors(
39
+ dispatcher: Union[AssetDumperDispatcher, AssetLoaderDispatcher],
40
+ inbuilt_key: Union[str, type],
41
+ ext_key: Union[str, type],
42
+ val: type,
43
+ ) -> None:
44
+ """Test asset dumper / loader addition errors.
45
+
46
+ Parameters
47
+ ----------
48
+ dispatcher : AssetDumperDispatcher or AssetLoaderDispatcher,
49
+ The parametrized dispatcher.
50
+ inbuilt_key : str or type
51
+ The parametrized in-built key.
52
+ ext_key : str or type
53
+ The parametrized external key.
54
+ val : type
55
+ The parametrized value.
56
+
57
+ """
58
+ with pytest.raises(ValueError, match="Cannot set"):
59
+ dispatcher()[inbuilt_key] = val
60
+
61
+ with pytest.raises(ValueError, match="Invalid"):
62
+ dispatcher()[ext_key] = val
63
+
64
+
65
+ @pytest.mark.parametrize(
66
+ "dispatcher, inbuilt_key, ext_key",
67
+ [
68
+ (AssetDumperDispatcher, nibabel.Nifti1Image, nibabel.Nifti2Image),
69
+ (AssetLoaderDispatcher, ".nii", ".tsv"),
70
+ ],
71
+ )
72
+ def test_dispatcher_removal_errors(
73
+ dispatcher: Union[AssetDumperDispatcher, AssetLoaderDispatcher],
74
+ inbuilt_key: Union[str, type],
75
+ ext_key: Union[str, type],
76
+ ) -> None:
77
+ """Test asset dumper / loader removal errors.
78
+
79
+ Parameters
80
+ ----------
81
+ dispatcher : AssetDumperDispatcher or AssetLoaderDispatcher,
82
+ The parametrized dispatcher.
83
+ inbuilt_key : str or type
84
+ The parametrized in-built key.
85
+ ext_key : str or type
86
+ The parametrized external key.
87
+
88
+ """
89
+ with pytest.raises(ValueError, match="Cannot delete"):
90
+ _ = dispatcher().pop(inbuilt_key)
91
+
92
+ with pytest.raises(KeyError, match=f"{ext_key}"):
93
+ del dispatcher()[ext_key]
94
+
95
+
96
+ def test_dispatcher() -> None:
97
+ """Test asset dumper / loader addition and removal."""
98
+
99
+ class Int(int): ...
100
+
101
+ class Float(float): ...
102
+
103
+ class DumAsset(BaseDataDumpAsset):
104
+ def dump(self):
105
+ suffix = ""
106
+ if isinstance(self.data, Int):
107
+ suffix = ".int"
108
+ else:
109
+ suffix = ".float"
110
+ pickle.dump(self.data, self.path_without_ext.with_suffix(suffix))
111
+
112
+ @classmethod
113
+ def load(cls, path):
114
+ return pickle.load(path)
115
+
116
+ AssetDumperDispatcher().update({nibabel.Nifti2Image: DumAsset})
117
+ assert nibabel.Nifti2Image in AssetDumperDispatcher()
118
+ _ = AssetDumperDispatcher().pop(nibabel.Nifti2Image)
119
+ assert nibabel.Nifti2Image not in AssetDumperDispatcher()
120
+
121
+ AssetLoaderDispatcher().update({".n+2": DumAsset})
122
+ assert ".n+2" in AssetLoaderDispatcher()
123
+ _ = AssetLoaderDispatcher().pop(".n+2")
124
+ assert ".n+2" not in AssetLoaderDispatcher()
125
+
126
+
127
+ @pytest.mark.parametrize(
128
+ "granularity, expected_dir_count",
129
+ [
130
+ ("full", 2),
131
+ ("final", 1),
132
+ ],
133
+ )
134
+ def test_data_object_dumper(
135
+ tmp_path: Path, granularity: str, expected_dir_count: int
136
+ ) -> None:
137
+ """Test data object dumper.
138
+
139
+ Parameters
140
+ ----------
141
+ tmp_path : pathlib.Path
142
+ The path to the test directory.
143
+ granularity : str
144
+ The parametrized granularity.
145
+ expected_dir_count : int
146
+ The parametrized expected directory count.
147
+
148
+ """
149
+ config.set(key="preprocessing.dump.location", val=tmp_path)
150
+ config.set(key="preprocessing.dump.granularity", val=granularity)
151
+
152
+ mc = MarkerCollection(
153
+ preprocessors=[
154
+ fMRIPrepConfoundRemover(
155
+ strategy={
156
+ "motion": "full",
157
+ "wm_csf": "full",
158
+ },
159
+ detrend=True,
160
+ standardize=True,
161
+ low_pass=0.08,
162
+ high_pass=0.01,
163
+ ),
164
+ ],
165
+ markers=[
166
+ FunctionalConnectivitySpheres(
167
+ name="dmnbuckner_5mm_fc_spheres",
168
+ coords="DMNBuckner",
169
+ radius=5.0,
170
+ conn_method="correlation",
171
+ ),
172
+ ],
173
+ )
174
+ dg = PartlyCloudyTestingDataGrabber()
175
+
176
+ with dg:
177
+ mc.fit(dg["sub-01"])
178
+
179
+ dirs = list(tmp_path.iterdir())
180
+ assert len(dirs) == expected_dir_count
181
+
182
+ dump_load = DataObjectDumper().load(dirs[-1] / "data.yaml")
183
+ assert "BOLD" in dump_load
184
+
185
+ config.delete("preprocessing.dump.location")
186
+ config.delete("preprocessing.dump.granularity")
187
+
188
+
189
+ def test_data_object_dumper_with_warp(tmp_path: Path) -> None:
190
+ """Test data object dumper with Warp data type.
191
+
192
+ Parameters
193
+ ----------
194
+ tmp_path : pathlib.Path
195
+ The path to the test directory.
196
+
197
+ """
198
+ DataObjectDumper().dump(
199
+ data={
200
+ "Warp": [
201
+ {
202
+ "path": (
203
+ tmp_path / "from-MNI152NLin2009cAsym_to-T1w_"
204
+ "mode-image_xfm.h5"
205
+ ),
206
+ "src": "MNI152NLin2009cAsym",
207
+ "dst": "native",
208
+ "warper": "ants",
209
+ },
210
+ {
211
+ "path": (
212
+ tmp_path / "from-T1w_to-MNI152NLin2009cAsym_"
213
+ "mode-image_xfm.h5"
214
+ ),
215
+ "src": "native",
216
+ "dst": "MNI152NLin2009cAsym",
217
+ "warper": "ants",
218
+ },
219
+ ],
220
+ },
221
+ path=tmp_path,
222
+ step="warp_test",
223
+ )
224
+ dump_load = DataObjectDumper().load(tmp_path / "warp_test" / "data.yaml")
225
+ assert "Warp" in dump_load
@@ -1,4 +1,5 @@
1
1
  __all__ = [
2
+ "DataDumpAssetLike",
2
3
  "DataGrabberLike",
3
4
  "DataRegistryLike",
4
5
  "PreprocessorLike",
@@ -16,6 +17,7 @@ __all__ = [
16
17
  ]
17
18
 
18
19
  from ._typing import (
20
+ DataDumpAssetLike,
19
21
  DataGrabberLike,
20
22
  DataRegistryLike,
21
23
  PreprocessorLike,
junifer/typing/_typing.py CHANGED
@@ -22,6 +22,7 @@ if TYPE_CHECKING:
22
22
  __all__ = [
23
23
  "ConditionalDependencies",
24
24
  "ConfigVal",
25
+ "DataDumpAssetLike",
25
26
  "DataGrabberLike",
26
27
  "DataGrabberPatterns",
27
28
  "DataRegistryLike",
@@ -37,6 +38,7 @@ __all__ = [
37
38
  ]
38
39
 
39
40
 
41
+ DataDumpAssetLike = type["DataDumpAssetLike"]
40
42
  DataRegistryLike = type["BasePipelineDataRegistry"]
41
43
  DataGrabberLike = type["BaseDataGrabber"]
42
44
  PreprocessorLike = type["BasePreprocessor"]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: junifer
3
- Version: 0.0.7.dev105
3
+ Version: 0.0.7.dev121
4
4
  Summary: JUelich NeuroImaging FEature extractoR
5
5
  Author-email: Fede Raimondo <f.raimondo@fz-juelich.de>, Synchon Mandal <s.mandal@fz-juelich.de>
6
6
  Maintainer-email: Fede Raimondo <f.raimondo@fz-juelich.de>, Synchon Mandal <s.mandal@fz-juelich.de>
@@ -1,12 +1,12 @@
1
1
  junifer/__init__.py,sha256=2McgH1yNue6Z1V26-uN_mfMjbTcx4CLhym-DMBl5xA4,266
2
2
  junifer/__init__.pyi,sha256=SsTvgq2Dod6UqJN96GH1lCphH6hJQQurEJHGNhHjGUI,508
3
- junifer/_version.py,sha256=jFH3nrzs5AspealBCIydRQVFvENTPE2XjxPHECcRoME,528
3
+ junifer/_version.py,sha256=EcmNCxfnmwXc_dYkB9704nW6N01vTPj7Wl2RQTVG5-c,528
4
4
  junifer/conftest.py,sha256=PWYkkRDU8ly2lYwv7VBKMHje4et6HX7Yey3Md_I2KbA,613
5
5
  junifer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  junifer/stats.py,sha256=e9aaagMGtgpRfW3Wdpz9ocpnYld1IWylCDcjFUgX9Mk,6225
7
7
  junifer/api/__init__.py,sha256=aAXW_KAEGQ8aAP5Eni2G1R4MWBF7UgjKOgM6akLuJco,252
8
8
  junifer/api/__init__.pyi,sha256=UJu55ApMFd43N0xlQyNKrYpCdzqhAxA3Jjaj0ETwCXU,169
9
- junifer/api/decorators.py,sha256=hBmJbJIedXvCxzvxWBQond3Nu9oSTd2e7dvJ_QZ9zF0,3635
9
+ junifer/api/decorators.py,sha256=7yFhb63zRuyd9uyV_8e7gr4oD0yvZAC4So6CYlwm4tY,4723
10
10
  junifer/api/functions.py,sha256=LXKPqsfWINq1iSUShdryGB8hPOyyydc1ldHfr68bP20,14226
11
11
  junifer/api/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
12
  junifer/api/queue_context/__init__.py,sha256=glr8x4aMm4EvVrHywDIlugdNlwD1RzqV2FTDNPqYQZ4,204
@@ -40,7 +40,7 @@ junifer/api/res/fsl/flirt,sha256=tSjiUco8ui8AbHD7mTzChEwbR0Rf_4iJTgzYTPF_WuQ,42
40
40
  junifer/api/res/fsl/img2imgcoord,sha256=Zmaw3oJYrEltcXiPyEubXry9ppAq3SND52tdDWGgeZk,49
41
41
  junifer/api/res/fsl/run_fsl_docker.sh,sha256=pq-fcNdLuvHzVIQePN4GebZGlcE2UF-xj5rBIqAMz4g,1122
42
42
  junifer/api/res/fsl/std2imgcoord,sha256=-X5wRH6XMl0yqnTACJX6MFhO8DFOEWg42MHRxGvimXg,49
43
- junifer/api/tests/test_decorators.py,sha256=GN4wSRDJncqa1Jz9krNY_Ls9e6ge9xaGyOpaiSSXibk,759
43
+ junifer/api/tests/test_decorators.py,sha256=2-ajsMWqXvGdHDnNU4ueSd5GcJ8ROIYOX8tFrJINbaI,2062
44
44
  junifer/api/tests/test_functions.py,sha256=HcJIBCtcgL1xJlDwtGHrGOWBMjXgCoAFoVxQW6n2Tds,20676
45
45
  junifer/cli/__init__.py,sha256=LRmpmMe0DdZKYZTV61onUiLLxYZ_ZYSfmRbH55bBJMg,500
46
46
  junifer/cli/__init__.pyi,sha256=PiV4znUnzSeuSSJGz-RT8N21PiMqoSMwYcypi7nt2Js,40
@@ -237,17 +237,19 @@ junifer/markers/tests/test_parcel_aggregation.py,sha256=04OqtY_Z-KW4W1jU5K6GeWnL
237
237
  junifer/markers/tests/test_sphere_aggregation.py,sha256=HPaLD6xKdewTt0iANz3nYOD7ZI-g7BqMTiRdV-4sM8M,10669
238
238
  junifer/onthefly/__init__.py,sha256=TA6tPuw54ynDlumb9Ii-2p59hw2rGoCMe1-vQ89JzZ8,238
239
239
  junifer/onthefly/_brainprint.py,sha256=-BswaAV9SLHU8mmWJ2KbPL7FgERJzIQIbSdV-NYiiYI,3802
240
- junifer/onthefly/read_transform.py,sha256=JfTJIiZnautkJ6DzsjeWQ7AEDRHV4omfinvDkow3FFM,4272
241
- junifer/onthefly/tests/test_read_transform.py,sha256=Ed6gtj8bsD11fe0Y1AxG2JndtITDSBje4g3hx3wkbAo,4718
240
+ junifer/onthefly/read_transform.py,sha256=pUwwsO4oBwq6u4ybRpnQ5s6MujtwD_1AOMv-RdavAFg,6690
241
+ junifer/onthefly/tests/test_read_transform.py,sha256=U8BwImmgH9e2eA_WXVWyKgGzFQNEoD0teCNv2Udlhok,7246
242
242
  junifer/pipeline/__init__.py,sha256=rxKQGRwc6_sts1KhVIcVVpuXeiFABf11mQQ2h5jgA3U,194
243
- junifer/pipeline/__init__.pyi,sha256=hhcvNcABhtLaUQiZdTjo5sMWC3rtDkwVshL0sxD5JAE,399
244
- junifer/pipeline/marker_collection.py,sha256=1Kmf5f0E2MFhDpO9OBui046b_6h1u9U64AdEqrxso-o,5377
243
+ junifer/pipeline/__init__.pyi,sha256=T2SzqOHE8bD7j3s2HUZrcVB_To1Sv8HDhXTG9YFsWtM,642
244
+ junifer/pipeline/_data_object_dumper.py,sha256=UV0h6onoVOIgD2q80XB8OU9Xe8NZYygoaYEoI2mmHmE,10870
245
+ junifer/pipeline/marker_collection.py,sha256=bVEcrc8Gf3Bm96Ez3FJa6U-NUTVjg4x10x74egbKMQk,7000
245
246
  junifer/pipeline/pipeline_component_registry.py,sha256=N80XfOZB33tscuqUlrri0r8sMUGVkPL6Li01Of70qrA,9517
246
247
  junifer/pipeline/pipeline_step_mixin.py,sha256=oXfJh27yifHs1V3V_tMPCanRiHX1ggOVIbHTvMzq3cY,7853
247
248
  junifer/pipeline/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
248
249
  junifer/pipeline/update_meta_mixin.py,sha256=yzGCx8AUbc9mMnWKRu4qaIXTBBSIxtNlGH5zIQIUvzM,1812
249
250
  junifer/pipeline/utils.py,sha256=qS0Xg_43ri-xtLeMJR838Axj9FkQ6s2H4r8CmSD58X8,10285
250
251
  junifer/pipeline/workdir_manager.py,sha256=L9_roiBW1IgCSPVuFhCouGcH2PCLFetk4OKhHIu6bNA,8605
252
+ junifer/pipeline/tests/test_data_object_dumper.py,sha256=ONn3J21Un5SrJqZWXDHpf-3jeHUxdcSCUbY6I2ABMCk,6252
251
253
  junifer/pipeline/tests/test_marker_collection.py,sha256=FwxJvjYQ3mh_e3uFZSlOnuGu0EIx4L-Niqt4UOKU6YM,6968
252
254
  junifer/pipeline/tests/test_pipeline_component_registry.py,sha256=mrbz285K_TzSILRn9X-AyzcNXuPRHGBZY6dQiq5_9So,5776
253
255
  junifer/pipeline/tests/test_pipeline_step_mixin.py,sha256=KCdhFdThm9TGkUvhGzQF3zR9SoZ9ont1z8yZELB2TtQ,7752
@@ -309,8 +311,8 @@ junifer/testing/tests/test_testing_registry.py,sha256=MK4a_q4MHieCvYhnhuPm_dH76l
309
311
  junifer/tests/test_main.py,sha256=GMff7jlisGM9_FsiUwWDte43j-KQJGFRYZpwRRqTkd8,373
310
312
  junifer/tests/test_stats.py,sha256=NljoGFu2JOPADbi9W0WeUHwpf8nZSdOkcCgCv-Z1fY4,4149
311
313
  junifer/typing/__init__.py,sha256=e0UbuxozXUIxz8h8pLokMOxZV629Q1lnA7vvgm95WF0,215
312
- junifer/typing/__init__.pyi,sha256=5jzVAkras38Eou5abUvdP1AXhbpCSnPAllLx88YuPB8,640
313
- junifer/typing/_typing.py,sha256=JogiI9wCZWHuqgTaZarjk89aA5pR0yTvFx2JfheLT_Y,1783
314
+ junifer/typing/__init__.pyi,sha256=l_AHfe7LkM6lhaUxnlZ5frBxtZeKbblVUFY3yyWLg70,688
315
+ junifer/typing/_typing.py,sha256=kzlXa-mv2fZytwrmTFGBeod3qipm0zJJTh09iNsoAoA,1854
314
316
  junifer/utils/__init__.py,sha256=I3tYaePAD_ZEU-36-TJ_OYeqW_aMmi5MZ3jmqie6RfU,260
315
317
  junifer/utils/__init__.pyi,sha256=CMb4rq1VcQ00IRuiBFfAWu07Vb-vA4qtVLAoY0ll-bA,422
316
318
  junifer/utils/_config.py,sha256=cfxyv1bfklID2atQseu6y3J7mZrCXPwnGEfBSImG9CM,3054
@@ -324,10 +326,10 @@ junifer/utils/tests/test_config.py,sha256=7ltIXuwb_W4Mv_1dxQWyiyM10XgUAfsWKV6D_i
324
326
  junifer/utils/tests/test_fs.py,sha256=WQS7cKlKEZ742CIuiOYYpueeAhY9PqlastfDVpVVtvE,923
325
327
  junifer/utils/tests/test_helpers.py,sha256=k5qqfxK8dFyuewTJyR1Qn6-nFaYNuVr0ysc18bfPjyU,929
326
328
  junifer/utils/tests/test_logging.py,sha256=W4tFKmaf8_CxnWZ-o_-XxM7DQbhGG18RsLZJk8bZelI,8163
327
- junifer-0.0.7.dev105.dist-info/licenses/AUTHORS.rst,sha256=rmULKpchpSol4ExWFdm-qu4fkpSZPYqIESVJBZtGb6E,163
328
- junifer-0.0.7.dev105.dist-info/licenses/LICENSE.md,sha256=MqCnOBu8uXsEOzRZWh9EBVfVz-kE9NkXcLCrtGXo2yU,34354
329
- junifer-0.0.7.dev105.dist-info/METADATA,sha256=y9qwZWakS8Oc_XurJ3dN1hQ-ndxPyy5BdikbsZsaIG8,8388
330
- junifer-0.0.7.dev105.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
331
- junifer-0.0.7.dev105.dist-info/entry_points.txt,sha256=6O8ru0BP-SP7YMUZiizFNoaZ2HvJpadO2G7nKk4PwjI,48
332
- junifer-0.0.7.dev105.dist-info/top_level.txt,sha256=4bAq1R2QFQ4b3hohjys2JBvxrl0GKk5LNFzYvz9VGcA,8
333
- junifer-0.0.7.dev105.dist-info/RECORD,,
329
+ junifer-0.0.7.dev121.dist-info/licenses/AUTHORS.rst,sha256=rmULKpchpSol4ExWFdm-qu4fkpSZPYqIESVJBZtGb6E,163
330
+ junifer-0.0.7.dev121.dist-info/licenses/LICENSE.md,sha256=MqCnOBu8uXsEOzRZWh9EBVfVz-kE9NkXcLCrtGXo2yU,34354
331
+ junifer-0.0.7.dev121.dist-info/METADATA,sha256=T-IK6ff8Ahg-PIyefj_5qmUGlkzQs9N0weQFGMMlj48,8388
332
+ junifer-0.0.7.dev121.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
333
+ junifer-0.0.7.dev121.dist-info/entry_points.txt,sha256=6O8ru0BP-SP7YMUZiizFNoaZ2HvJpadO2G7nKk4PwjI,48
334
+ junifer-0.0.7.dev121.dist-info/top_level.txt,sha256=4bAq1R2QFQ4b3hohjys2JBvxrl0GKk5LNFzYvz9VGcA,8
335
+ junifer-0.0.7.dev121.dist-info/RECORD,,