dcnum 0.13.3__py3-none-any.whl → 0.15.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dcnum might be problematic. Click here for more details.
- dcnum/_version.py +2 -2
- dcnum/feat/__init__.py +1 -0
- dcnum/feat/event_extractor_manager_thread.py +3 -0
- dcnum/feat/feat_background/__init__.py +2 -12
- dcnum/feat/feat_background/base.py +51 -33
- dcnum/feat/feat_brightness/__init__.py +1 -0
- dcnum/feat/feat_moments/__init__.py +1 -0
- dcnum/feat/feat_texture/__init__.py +1 -0
- dcnum/feat/gate.py +62 -41
- dcnum/feat/queue_event_extractor.py +80 -40
- dcnum/logic/__init__.py +4 -0
- dcnum/logic/ctrl.py +501 -0
- dcnum/logic/job.py +123 -0
- dcnum/meta/ppid.py +48 -7
- dcnum/read/hdf5_data.py +47 -2
- dcnum/segm/__init__.py +1 -13
- dcnum/segm/segm_thresh.py +1 -0
- dcnum/segm/segmenter.py +58 -17
- dcnum/segm/segmenter_cpu.py +2 -0
- dcnum/segm/segmenter_gpu.py +1 -0
- dcnum/write/deque_writer_thread.py +1 -1
- dcnum/write/writer.py +45 -4
- {dcnum-0.13.3.dist-info → dcnum-0.15.0.dist-info}/METADATA +1 -1
- dcnum-0.15.0.dist-info/RECORD +43 -0
- {dcnum-0.13.3.dist-info → dcnum-0.15.0.dist-info}/WHEEL +1 -1
- dcnum-0.13.3.dist-info/RECORD +0 -40
- {dcnum-0.13.3.dist-info → dcnum-0.15.0.dist-info}/LICENSE +0 -0
- {dcnum-0.13.3.dist-info → dcnum-0.15.0.dist-info}/top_level.txt +0 -0
dcnum/meta/ppid.py
CHANGED
|
@@ -4,17 +4,38 @@ import collections
|
|
|
4
4
|
import hashlib
|
|
5
5
|
import inspect
|
|
6
6
|
import pathlib
|
|
7
|
+
from typing import Dict, List, Protocol
|
|
8
|
+
import warnings
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
#: Increment this string if there are breaking changes that make
|
|
10
12
|
#: previous pipelines unreproducible.
|
|
11
|
-
DCNUM_PPID_GENERATION = "
|
|
13
|
+
DCNUM_PPID_GENERATION = "7"
|
|
12
14
|
|
|
13
15
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
+
class ClassWithPPIDCapabilities(Protocol):
|
|
17
|
+
def get_ppid(self) -> str:
|
|
18
|
+
"""full pipeline identifier for the class (instance method)"""
|
|
19
|
+
pass
|
|
20
|
+
|
|
21
|
+
def get_ppid_code(self) -> str:
|
|
22
|
+
"""string representing the class in the pipeline (classmethod)"""
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
def get_ppid_from_ppkw(self) -> str:
|
|
26
|
+
"""pipeline identifier from specific pipeline keywords (classmethod)"""
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
def get_ppkw_from_ppid(self) -> Dict:
|
|
30
|
+
"""class keywords from full pipeline identifier (staticmethod)"""
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def compute_pipeline_hash(*, bg_id, seg_id, feat_id, gate_id,
|
|
35
|
+
dat_id="unknown", gen_id=DCNUM_PPID_GENERATION):
|
|
16
36
|
hasher = hashlib.md5()
|
|
17
|
-
hasher.update("|".join([
|
|
37
|
+
hasher.update("|".join([
|
|
38
|
+
gen_id, dat_id, bg_id, seg_id, feat_id, gate_id]).encode())
|
|
18
39
|
pph = hasher.hexdigest()
|
|
19
40
|
return pph
|
|
20
41
|
|
|
@@ -37,7 +58,8 @@ def convert_to_dtype(value, dtype):
|
|
|
37
58
|
return value
|
|
38
59
|
|
|
39
60
|
|
|
40
|
-
def get_class_method_info(class_obj,
|
|
61
|
+
def get_class_method_info(class_obj: ClassWithPPIDCapabilities,
|
|
62
|
+
static_kw_methods: List = None):
|
|
41
63
|
"""Return dictionary of class info with static keyword methods docs
|
|
42
64
|
|
|
43
65
|
Parameters
|
|
@@ -49,8 +71,14 @@ def get_class_method_info(class_obj, static_kw_methods=None):
|
|
|
49
71
|
are extracted.
|
|
50
72
|
"""
|
|
51
73
|
doc = class_obj.__doc__ or class_obj.__init__.__doc__
|
|
74
|
+
if hasattr(class_obj, "key"):
|
|
75
|
+
warnings.warn(f"{class_obj.__class__} implements `key` which is "
|
|
76
|
+
f"deprecated. Please rename to `get_ppid_code`.",
|
|
77
|
+
DeprecationWarning)
|
|
78
|
+
setattr(class_obj, "get_ppid_code", class_obj.key)
|
|
52
79
|
info = {
|
|
53
|
-
"
|
|
80
|
+
"code": class_obj.get_ppid_code(),
|
|
81
|
+
"key": class_obj.get_ppid_code(), # Deprecated
|
|
54
82
|
"doc": doc,
|
|
55
83
|
"title": doc.split("\n")[0],
|
|
56
84
|
}
|
|
@@ -67,12 +95,25 @@ def get_class_method_info(class_obj, static_kw_methods=None):
|
|
|
67
95
|
return info
|
|
68
96
|
|
|
69
97
|
|
|
70
|
-
def kwargs_to_ppid(cls
|
|
98
|
+
def kwargs_to_ppid(cls: ClassWithPPIDCapabilities,
|
|
99
|
+
method: str,
|
|
100
|
+
kwargs: Dict,
|
|
101
|
+
allow_invalid_keys: bool = True):
|
|
71
102
|
info = get_class_method_info(cls, [method])
|
|
72
103
|
|
|
73
104
|
concat_strings = []
|
|
74
105
|
if info["defaults"][method]:
|
|
75
106
|
kwdefaults = info["defaults"][method]
|
|
107
|
+
kw_false = set(kwargs.keys()) - set(kwdefaults.keys())
|
|
108
|
+
if kw_false:
|
|
109
|
+
# This should not have happened.
|
|
110
|
+
msg = f"Invalid kwargs {kw_false} specified for method " \
|
|
111
|
+
f"'{method}'! Valid kwargs are {sorted(kwdefaults.keys())}."
|
|
112
|
+
if allow_invalid_keys:
|
|
113
|
+
warnings.warn(msg + " Please cleanup your code!",
|
|
114
|
+
DeprecationWarning)
|
|
115
|
+
else:
|
|
116
|
+
raise KeyError(msg)
|
|
76
117
|
kwannot = info["annotations"][method]
|
|
77
118
|
kws = list(kwdefaults.keys())
|
|
78
119
|
kws_abrv = get_unique_prefix(kws)
|
dcnum/read/hdf5_data.py
CHANGED
|
@@ -169,6 +169,11 @@ class HDF5Data:
|
|
|
169
169
|
and (self.pixel_size < 0.255 or self.pixel_size > 0.275)):
|
|
170
170
|
warnings.warn(
|
|
171
171
|
f"Correcting for invalid pixel size in '{self.path}'!")
|
|
172
|
+
warnings.warn(
|
|
173
|
+
"Correcting the pixel size is deprecated in dcnum. Please "
|
|
174
|
+
"make sure your input data are clean before processing.",
|
|
175
|
+
DeprecationWarning
|
|
176
|
+
)
|
|
172
177
|
# Set default pixel size for Rivercyte devices
|
|
173
178
|
self.pixel_size = 0.2645
|
|
174
179
|
|
|
@@ -221,7 +226,10 @@ class HDF5Data:
|
|
|
221
226
|
return self.meta.get("imaging:pixel size", 0)
|
|
222
227
|
|
|
223
228
|
@pixel_size.setter
|
|
224
|
-
def pixel_size(self, pixel_size):
|
|
229
|
+
def pixel_size(self, pixel_size: float):
|
|
230
|
+
# Reduce pixel_size accuracy to 8 digits after the point to
|
|
231
|
+
# enforce pipeline reproducibility (see get_ppid_from_ppkw).
|
|
232
|
+
pixel_size = float(f"{pixel_size:.8f}")
|
|
225
233
|
self.meta["imaging:pixel size"] = pixel_size
|
|
226
234
|
|
|
227
235
|
@property
|
|
@@ -234,7 +242,7 @@ class HDF5Data:
|
|
|
234
242
|
"""
|
|
235
243
|
if self._feats is None:
|
|
236
244
|
feats = []
|
|
237
|
-
for feat in self.
|
|
245
|
+
for feat in self.keys():
|
|
238
246
|
if feat in PROTECTED_FEATURES:
|
|
239
247
|
feats.append(feat)
|
|
240
248
|
self._feats = feats
|
|
@@ -249,6 +257,33 @@ class HDF5Data:
|
|
|
249
257
|
self._basin_data.clear()
|
|
250
258
|
self.h5.close()
|
|
251
259
|
|
|
260
|
+
def get_ppid(self):
|
|
261
|
+
return self.get_ppid_from_ppkw({"pixel_size": self.pixel_size})
|
|
262
|
+
|
|
263
|
+
@classmethod
|
|
264
|
+
def get_ppid_code(cls):
|
|
265
|
+
return "hdf"
|
|
266
|
+
|
|
267
|
+
@classmethod
|
|
268
|
+
def get_ppid_from_ppkw(cls, kwargs):
|
|
269
|
+
# Data does not really fit into the PPID scheme we use for the rest
|
|
270
|
+
# of the pipeline. This implementation here is custom.
|
|
271
|
+
code = cls.get_ppid_code()
|
|
272
|
+
kwid = f"p={kwargs['pixel_size']:.8f}".rstrip("0")
|
|
273
|
+
return ":".join([code, kwid])
|
|
274
|
+
|
|
275
|
+
@staticmethod
|
|
276
|
+
def get_ppkw_from_ppid(dat_ppid):
|
|
277
|
+
# Data does not fit in the PPID scheme we use, but we still
|
|
278
|
+
# would like to pass pixel_size to __init__ if we need it.
|
|
279
|
+
code, pp_dat_kwargs = dat_ppid.split(":")
|
|
280
|
+
if code != HDF5Data.get_ppid_code():
|
|
281
|
+
raise ValueError(f"Could not find data method '{code}'!")
|
|
282
|
+
p, val = pp_dat_kwargs.split("=")
|
|
283
|
+
if p != "p":
|
|
284
|
+
raise ValueError(f"Invalid parameter '{p}'!")
|
|
285
|
+
return {"pixel_size": float(val)}
|
|
286
|
+
|
|
252
287
|
def get_basin_data(self, index):
|
|
253
288
|
"""Return HDF5Data info for a basin index in `self.basins`
|
|
254
289
|
|
|
@@ -280,6 +315,16 @@ class HDF5Data:
|
|
|
280
315
|
h5dat = HDF5Data(path)
|
|
281
316
|
features = bn_dict.get("features")
|
|
282
317
|
if features is None:
|
|
318
|
+
# Only get the features from the actual HDF5 file.
|
|
319
|
+
# If this file has basins as well, the basin metadata
|
|
320
|
+
# should have been copied over to the parent file. This
|
|
321
|
+
# makes things a little cleaner, because basins are not
|
|
322
|
+
# nested, but all basins are available in the top file.
|
|
323
|
+
# See :func:`write.store_metadata` for copying metadata
|
|
324
|
+
# between files.
|
|
325
|
+
# The writer can still specify "features" in the basin
|
|
326
|
+
# metadata, then these basins are indeed nested, and
|
|
327
|
+
# we consider that ok as well.
|
|
283
328
|
features = sorted(h5dat.h5["events"].keys())
|
|
284
329
|
self._basin_data[index] = (h5dat, features)
|
|
285
330
|
return self._basin_data[index]
|
dcnum/segm/__init__.py
CHANGED
|
@@ -1,18 +1,6 @@
|
|
|
1
1
|
# flake8: noqa: F401
|
|
2
|
-
import
|
|
3
|
-
|
|
4
|
-
from .segmenter import Segmenter
|
|
2
|
+
from .segmenter import Segmenter, get_available_segmenters
|
|
5
3
|
from .segmenter_cpu import CPUSegmenter
|
|
6
4
|
from .segmenter_gpu import GPUSegmenter
|
|
7
5
|
from .segmenter_manager_thread import SegmenterManagerThread
|
|
8
6
|
from . import segm_thresh
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
@functools.cache
|
|
12
|
-
def get_available_segmenters():
|
|
13
|
-
"""Return dictionary of available segmenters"""
|
|
14
|
-
segmenters = {}
|
|
15
|
-
for scls in Segmenter.__subclasses__():
|
|
16
|
-
for cls in scls.__subclasses__():
|
|
17
|
-
segmenters[cls.key()] = cls
|
|
18
|
-
return segmenters
|
dcnum/segm/segm_thresh.py
CHANGED
dcnum/segm/segmenter.py
CHANGED
|
@@ -1,17 +1,21 @@
|
|
|
1
1
|
import abc
|
|
2
|
+
import copy
|
|
2
3
|
import functools
|
|
3
4
|
import inspect
|
|
4
5
|
import logging
|
|
6
|
+
import warnings
|
|
5
7
|
|
|
6
8
|
import cv2
|
|
7
9
|
import numpy as np
|
|
8
10
|
import scipy.ndimage as ndi
|
|
9
11
|
from skimage import morphology
|
|
10
12
|
|
|
11
|
-
from ..meta.ppid import kwargs_to_ppid
|
|
13
|
+
from ..meta.ppid import kwargs_to_ppid, ppid_to_kwargs
|
|
12
14
|
|
|
13
15
|
|
|
14
16
|
class Segmenter(abc.ABC):
|
|
17
|
+
#: Required hardware ("cpu" or "gpu") defined in first-level subclass.
|
|
18
|
+
hardware_processor = "none"
|
|
15
19
|
#: Whether to enable mask post-processing. If disabled, you should
|
|
16
20
|
#: make sure that your mask is properly defined and cleaned or you
|
|
17
21
|
#: have to call `process_mask` in your `segment_approach` implementation.
|
|
@@ -55,13 +59,11 @@ class Segmenter(abc.ABC):
|
|
|
55
59
|
"`kwargs_mask` has been specified, but mask post-processing "
|
|
56
60
|
f"is disabled for segmenter {self.__class__}")
|
|
57
61
|
|
|
58
|
-
@
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
key = key[7:]
|
|
64
|
-
return key
|
|
62
|
+
@staticmethod
|
|
63
|
+
@functools.cache
|
|
64
|
+
def get_disk(radius):
|
|
65
|
+
"""Cached `skimage.morphology.disk(radius)`"""
|
|
66
|
+
return morphology.disk(radius)
|
|
65
67
|
|
|
66
68
|
def get_ppid(self):
|
|
67
69
|
"""Return a unique segmentation pipeline identifier
|
|
@@ -85,36 +87,58 @@ class Segmenter(abc.ABC):
|
|
|
85
87
|
|
|
86
88
|
KW_MASK represents keyword arguments for `process_mask`.
|
|
87
89
|
"""
|
|
88
|
-
return self.
|
|
90
|
+
return self.get_ppid_from_ppkw(self.kwargs, self.kwargs_mask)
|
|
91
|
+
|
|
92
|
+
@classmethod
|
|
93
|
+
def get_ppid_code(cls):
|
|
94
|
+
"""The unique code/name of this segmenter class"""
|
|
95
|
+
code = cls.__name__.lower()
|
|
96
|
+
if code.startswith("segment"):
|
|
97
|
+
code = code[7:]
|
|
98
|
+
return code
|
|
89
99
|
|
|
90
100
|
@classmethod
|
|
91
|
-
def
|
|
101
|
+
def get_ppid_from_ppkw(cls, kwargs, kwargs_mask=None):
|
|
92
102
|
"""Return the pipeline ID from given keyword arguments
|
|
93
103
|
|
|
94
104
|
See Also
|
|
95
105
|
--------
|
|
96
106
|
get_ppid: Same method for class instances
|
|
97
107
|
"""
|
|
108
|
+
kwargs = copy.deepcopy(kwargs)
|
|
98
109
|
if kwargs_mask is None and kwargs.get("kwargs_mask", None) is None:
|
|
99
110
|
raise KeyError("`kwargs_mask` must be either specified as "
|
|
100
111
|
"keyword argument to this method or as a key "
|
|
101
112
|
"in `kwargs`!")
|
|
102
113
|
if kwargs_mask is None:
|
|
103
114
|
# see check above (kwargs_mask may also be {})
|
|
104
|
-
kwargs_mask = kwargs
|
|
115
|
+
kwargs_mask = kwargs.pop("kwargs_mask")
|
|
105
116
|
# Start with the default mask kwargs defined for this subclass
|
|
106
|
-
kwargs_mask_used = cls.mask_default_kwargs
|
|
117
|
+
kwargs_mask_used = copy.deepcopy(cls.mask_default_kwargs)
|
|
107
118
|
kwargs_mask_used.update(kwargs_mask)
|
|
108
|
-
key = cls.
|
|
119
|
+
key = cls.get_ppid_code()
|
|
109
120
|
csegm = kwargs_to_ppid(cls, "segment_approach", kwargs)
|
|
110
121
|
cmask = kwargs_to_ppid(cls, "process_mask", kwargs_mask_used)
|
|
111
122
|
return ":".join([key, csegm, cmask])
|
|
112
123
|
|
|
113
124
|
@staticmethod
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
125
|
+
def get_ppkw_from_ppid(segm_ppid):
|
|
126
|
+
"""Return keyword arguments for this pipeline identifier"""
|
|
127
|
+
code, pp_kwargs, pp_kwargs_mask = segm_ppid.split(":")
|
|
128
|
+
for cls_code in get_available_segmenters():
|
|
129
|
+
if cls_code == code:
|
|
130
|
+
cls = get_available_segmenters()[cls_code]
|
|
131
|
+
break
|
|
132
|
+
else:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
f"Could not find segmenter '{code}'!")
|
|
135
|
+
kwargs = ppid_to_kwargs(cls=cls,
|
|
136
|
+
method="segment_approach",
|
|
137
|
+
ppid=pp_kwargs)
|
|
138
|
+
kwargs["kwargs_mask"] = ppid_to_kwargs(cls=cls,
|
|
139
|
+
method="process_mask",
|
|
140
|
+
ppid=pp_kwargs_mask)
|
|
141
|
+
return kwargs
|
|
118
142
|
|
|
119
143
|
@staticmethod
|
|
120
144
|
def process_mask(labels, *,
|
|
@@ -240,3 +264,20 @@ class Segmenter(abc.ABC):
|
|
|
240
264
|
@abc.abstractmethod
|
|
241
265
|
def segment_batch(self, data, start=None, stop=None):
|
|
242
266
|
"""Return the integer labels for an entire batch"""
|
|
267
|
+
|
|
268
|
+
@classmethod
|
|
269
|
+
def get_ppid_from_kwargs(cls, *args, **kwargs):
|
|
270
|
+
warnings.warn(
|
|
271
|
+
"Please use get_ppid_from_ppkw instead of get_ppid_from_kwargs.",
|
|
272
|
+
DeprecationWarning)
|
|
273
|
+
return cls.get_ppid_from_ppkw(*args, **kwargs)
|
|
274
|
+
|
|
275
|
+
|
|
276
|
+
@functools.cache
|
|
277
|
+
def get_available_segmenters():
|
|
278
|
+
"""Return dictionary of available segmenters"""
|
|
279
|
+
segmenters = {}
|
|
280
|
+
for scls in Segmenter.__subclasses__():
|
|
281
|
+
for cls in scls.__subclasses__():
|
|
282
|
+
segmenters[cls.get_ppid_code()] = cls
|
|
283
|
+
return segmenters
|
dcnum/segm/segmenter_cpu.py
CHANGED
|
@@ -14,6 +14,8 @@ mp_spawn = mp.get_context('spawn')
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class CPUSegmenter(Segmenter, abc.ABC):
|
|
17
|
+
hardware_processor = "cpu"
|
|
18
|
+
|
|
17
19
|
def __init__(self, num_workers=None, *args, **kwargs):
|
|
18
20
|
super(CPUSegmenter, self).__init__(*args, **kwargs)
|
|
19
21
|
self.num_workers = num_workers or mp.cpu_count()
|
dcnum/segm/segmenter_gpu.py
CHANGED
dcnum/write/writer.py
CHANGED
|
@@ -2,6 +2,7 @@ import hashlib
|
|
|
2
2
|
import json
|
|
3
3
|
import pathlib
|
|
4
4
|
from typing import List
|
|
5
|
+
import warnings
|
|
5
6
|
|
|
6
7
|
import h5py
|
|
7
8
|
import hdf5plugin
|
|
@@ -10,6 +11,11 @@ import numpy as np
|
|
|
10
11
|
from .._version import version
|
|
11
12
|
|
|
12
13
|
|
|
14
|
+
class CreatingFileWithoutBasinWarning(UserWarning):
|
|
15
|
+
"""Issued when creating a basin-based dataset without basins"""
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
|
|
13
19
|
class HDF5Writer:
|
|
14
20
|
def __init__(self, path, mode="a", ds_kwds=None):
|
|
15
21
|
"""Write deformability cytometry HDF5 data"""
|
|
@@ -136,6 +142,33 @@ class HDF5Writer:
|
|
|
136
142
|
ds.resize(offset + dsize, axis=0)
|
|
137
143
|
ds[offset:offset + dsize] = data
|
|
138
144
|
|
|
145
|
+
def store_log(self,
|
|
146
|
+
log: str,
|
|
147
|
+
data: List[str],
|
|
148
|
+
override: bool = False):
|
|
149
|
+
"""Store log data
|
|
150
|
+
|
|
151
|
+
Store the log data under the key `log`. The `data`
|
|
152
|
+
kwarg must be a list of strings. If the log entry
|
|
153
|
+
already exists, `ValueError` is raised unless
|
|
154
|
+
`override` is set to True.
|
|
155
|
+
"""
|
|
156
|
+
logs = self.h5.require_group("logs")
|
|
157
|
+
if log in logs:
|
|
158
|
+
if override:
|
|
159
|
+
del logs[log]
|
|
160
|
+
else:
|
|
161
|
+
raise ValueError(
|
|
162
|
+
f"Log '{log}' already exists in {self.h5.filename}!")
|
|
163
|
+
logs.create_dataset(
|
|
164
|
+
name=log,
|
|
165
|
+
data=data,
|
|
166
|
+
shape=(len(data),),
|
|
167
|
+
# maximum line length
|
|
168
|
+
dtype=f"S{max([len(ll) for ll in data])}",
|
|
169
|
+
chunks=True,
|
|
170
|
+
**self.ds_kwds)
|
|
171
|
+
|
|
139
172
|
|
|
140
173
|
def create_with_basins(
|
|
141
174
|
path_out: str | pathlib.Path,
|
|
@@ -154,6 +187,10 @@ def create_with_basins(
|
|
|
154
187
|
commonly used for relative and absolute paths).
|
|
155
188
|
"""
|
|
156
189
|
path_out = pathlib.Path(path_out)
|
|
190
|
+
if not basin_paths:
|
|
191
|
+
warnings.warn(f"Creating basin-based file '{path_out}' without any "
|
|
192
|
+
f"basins, since the list `basin_paths' is empty!",
|
|
193
|
+
CreatingFileWithoutBasinWarning)
|
|
157
194
|
with HDF5Writer(path_out, mode="w") as hw:
|
|
158
195
|
# Get the metadata from the first available basin path
|
|
159
196
|
|
|
@@ -184,7 +221,7 @@ def create_with_basins(
|
|
|
184
221
|
# Copy the metadata from the representative path.
|
|
185
222
|
if prep is not None:
|
|
186
223
|
# copy metadata
|
|
187
|
-
with h5py.File(prep) as h5:
|
|
224
|
+
with h5py.File(prep, libver="latest") as h5:
|
|
188
225
|
copy_metadata(h5_src=h5, h5_dst=hw.h5)
|
|
189
226
|
# extract features
|
|
190
227
|
features = sorted(h5["events"].keys())
|
|
@@ -202,13 +239,14 @@ def create_with_basins(
|
|
|
202
239
|
|
|
203
240
|
|
|
204
241
|
def copy_metadata(h5_src: h5py.File,
|
|
205
|
-
h5_dst: h5py.File
|
|
242
|
+
h5_dst: h5py.File,
|
|
243
|
+
copy_basins=True):
|
|
206
244
|
"""Copy attributes, tables, logs, and basins from one H5File to another
|
|
207
245
|
|
|
208
246
|
Notes
|
|
209
247
|
-----
|
|
210
248
|
Metadata in `h5_dst` are never overridden, only metadata that
|
|
211
|
-
are not defined are added.
|
|
249
|
+
are not defined already are added.
|
|
212
250
|
"""
|
|
213
251
|
# compress data
|
|
214
252
|
ds_kwds = {}
|
|
@@ -219,8 +257,11 @@ def copy_metadata(h5_src: h5py.File,
|
|
|
219
257
|
src_attrs = dict(h5_src.attrs)
|
|
220
258
|
for kk in src_attrs:
|
|
221
259
|
h5_dst.attrs.setdefault(kk, src_attrs[kk])
|
|
260
|
+
copy_data = ["logs", "tables"]
|
|
261
|
+
if copy_basins:
|
|
262
|
+
copy_data.append("basins")
|
|
222
263
|
# copy other metadata
|
|
223
|
-
for topic in
|
|
264
|
+
for topic in copy_data:
|
|
224
265
|
if topic in h5_src:
|
|
225
266
|
for key in h5_src[topic]:
|
|
226
267
|
h5_dst.require_group(topic)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
dcnum/__init__.py,sha256=hcawIKS7utYiOyVhOAX9t7K3xYzP1b9862VV0b6qSrQ,74
|
|
2
|
+
dcnum/_version.py,sha256=oHv-EAjiXbJma3jZ0Tq6UPimiWYyyw2Ao9S8zdq9uWs,413
|
|
3
|
+
dcnum/feat/__init__.py,sha256=JqlgzOgDJhoTk8WVYcIiKTWq9EAM16_jGivzOtN6JGo,325
|
|
4
|
+
dcnum/feat/event_extractor_manager_thread.py,sha256=V2idRAlC7bdsA8I40RAUkqz3jtWmTeb4cjPXpRjr8Ik,6145
|
|
5
|
+
dcnum/feat/gate.py,sha256=srobj5p2RDr_S2SUtbwGbTKatnc_aPSndt0cR2P9zoY,7060
|
|
6
|
+
dcnum/feat/queue_event_extractor.py,sha256=RYz0VNtV8OVGDFn9MrYWM5NaB4rXLiJch9MTKDnccs0,14453
|
|
7
|
+
dcnum/feat/feat_background/__init__.py,sha256=nIKAGKBFYlLvm8UdkEzBB6V_FC_lhxo3xG-u-p2y3es,305
|
|
8
|
+
dcnum/feat/feat_background/base.py,sha256=N1SL5NCZ7gTS5AQONxEH31PFJBx0zvVjCaA4mprheuY,7974
|
|
9
|
+
dcnum/feat/feat_background/bg_roll_median.py,sha256=E86AiSzpw0RZ0nYL4UdKKkskS5ywKJCLeHlWYfTPS0k,12781
|
|
10
|
+
dcnum/feat/feat_background/bg_sparse_median.py,sha256=COJeY8suHmrHBWiXxTLw3C17-QOnc4R75GMdZiuU3cc,17670
|
|
11
|
+
dcnum/feat/feat_brightness/__init__.py,sha256=o6AebVlmydwNgVF5kW6ITqJyFreoKrU3Ki_3EC8If-s,155
|
|
12
|
+
dcnum/feat/feat_brightness/bright_all.py,sha256=Z5b-xkw7g7ejMpbGmdUqrxGRymqFhAQsZ938gaGXk9Y,3102
|
|
13
|
+
dcnum/feat/feat_brightness/common.py,sha256=JX49EszYDmnvoOKXFVV1CalEIWRmOuY5EryNbqGbdac,156
|
|
14
|
+
dcnum/feat/feat_moments/__init__.py,sha256=9eKmhBZGAZTLRnfCNHDiYM0a7qErrJCFLEgy3OlF9no,125
|
|
15
|
+
dcnum/feat/feat_moments/ct_opencv.py,sha256=_qyHCGvylVxruMWafvVbVOzhWGXLoFi10LReNxGcWhY,463
|
|
16
|
+
dcnum/feat/feat_moments/mt_legacy.py,sha256=tp85oeQ1GwVNdo6nXWhtbUGjMaXR8C6NMMWhobzThq0,4490
|
|
17
|
+
dcnum/feat/feat_texture/__init__.py,sha256=6StM9S540UVtdFFR3bHa7nfCTomeVdoo7Uy9CjuTgH0,137
|
|
18
|
+
dcnum/feat/feat_texture/common.py,sha256=COXHpXS-7DMouGu3WF83I76L02Sr7P9re4lxajh6g0E,439
|
|
19
|
+
dcnum/feat/feat_texture/tex_all.py,sha256=eGjjNfPpfZw7FA_VNFCIMiU38KD0qcGbxLciYy-tCiA,4097
|
|
20
|
+
dcnum/logic/__init__.py,sha256=5hgAQMp2YGsqpWoeTQ9qxGAWfxPOKQjJsYyNsS49t0g,131
|
|
21
|
+
dcnum/logic/ctrl.py,sha256=dTXS7KY3fEq6SVA-e37h7b3doOsk85dVLb9EOOFV9vU,20263
|
|
22
|
+
dcnum/logic/job.py,sha256=M0Q-Rfcm-zkTXTQc79W6YSNUjUlgmRPG0Ikbdn1aOpY,4608
|
|
23
|
+
dcnum/meta/__init__.py,sha256=cQT_HN5yDKzMnZM8CUyNmeA68OhE3ENO_rvFmgDj95c,40
|
|
24
|
+
dcnum/meta/ppid.py,sha256=eM3MwNslQEuZmRM5-hyi7PDiOrbdfJF14-Fob6n_n2U,7615
|
|
25
|
+
dcnum/read/__init__.py,sha256=iV2wrBMdwJgpXaphNiiAVybndDzTTv0CAGRNXyvxcLY,157
|
|
26
|
+
dcnum/read/cache.py,sha256=mr2DBJZYgNIAiz64TQ4cgkPmRt8nJWBvgkOpaz-p6Yg,5467
|
|
27
|
+
dcnum/read/const.py,sha256=SVlvEJiRIHyTyUlWG24_ogcnT5nTxCi0CRslNuNP56I,282
|
|
28
|
+
dcnum/read/hdf5_data.py,sha256=jUPoXgn52eJZrF4uOpR2-fBLaQX9Ezw3tcuAlI5LnF8,18817
|
|
29
|
+
dcnum/segm/__init__.py,sha256=iiq_1A9DU5wMUcKnsZ53E7NyzCkbZCJeUDimzunE-OM,247
|
|
30
|
+
dcnum/segm/segm_thresh.py,sha256=aLVTydPjbrgKDkZFY3Ew5CX-miwOw71meHfxcO5EjCc,1176
|
|
31
|
+
dcnum/segm/segmenter.py,sha256=Woq7j0AmsB-1m1baSIiOt0q8PJi0nJrHrNRcrdzIYoE,10590
|
|
32
|
+
dcnum/segm/segmenter_cpu.py,sha256=3XMW_uOoFjYrY-JPwyOM9t_RZFlqDbplQ5HbJWkGZT0,10104
|
|
33
|
+
dcnum/segm/segmenter_gpu.py,sha256=-EUe2bN-nNbhJ7tiagtOvw38pO6igw38O9-l4kpfT5I,1267
|
|
34
|
+
dcnum/segm/segmenter_manager_thread.py,sha256=xtuk7gnk7xhoRoV_J97rrv7IR3JgeRvVewCDT-chqpk,5172
|
|
35
|
+
dcnum/write/__init__.py,sha256=6vAQECatcd7DJMXFEuab1wdvEiaxisbY8_qmK5tzIwY,207
|
|
36
|
+
dcnum/write/deque_writer_thread.py,sha256=R4x3p-HZUls3upCBX3vV1VqSdSmaiHdrAswMJj_tVpk,1643
|
|
37
|
+
dcnum/write/queue_collector_thread.py,sha256=BivSe5ZA-rTPH4sridXU1yFB6CP7LYzIFudLMbN481s,11793
|
|
38
|
+
dcnum/write/writer.py,sha256=8DB4O14tXFisiMDdHawHBdQHOg_uXZkRFbgXNdWdCHQ,10167
|
|
39
|
+
dcnum-0.15.0.dist-info/LICENSE,sha256=YRChA1C8A2E-amJbudwMcbTCZy_HzmeY0hMIvduh1MM,1089
|
|
40
|
+
dcnum-0.15.0.dist-info/METADATA,sha256=ITzdZGcaKxRw_llMIvman5gh8uOuJ7Ryn2W9raEMy1E,2172
|
|
41
|
+
dcnum-0.15.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
|
42
|
+
dcnum-0.15.0.dist-info/top_level.txt,sha256=Hmh38rgG_MFTVDpUDGuO2HWTSq80P585Het4COQzFTg,6
|
|
43
|
+
dcnum-0.15.0.dist-info/RECORD,,
|
dcnum-0.13.3.dist-info/RECORD
DELETED
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
dcnum/__init__.py,sha256=hcawIKS7utYiOyVhOAX9t7K3xYzP1b9862VV0b6qSrQ,74
|
|
2
|
-
dcnum/_version.py,sha256=VXQPyzbmOGe0nuuEEus9EQY40THABR5BB9WWse8Z8LI,413
|
|
3
|
-
dcnum/feat/__init__.py,sha256=0oX765AyfL1BcVt-FI6R_i6x5LgYYLeyO5lkxSauI0Y,299
|
|
4
|
-
dcnum/feat/event_extractor_manager_thread.py,sha256=5vcCzAnyg59bpNj-8IV8YM7uHTzqs5du3-KgORNwDok,6020
|
|
5
|
-
dcnum/feat/gate.py,sha256=UEHbj3hkMWNm4tlY8Tz8sOsruhByjJxgO1s-ztQ7WTw,6235
|
|
6
|
-
dcnum/feat/queue_event_extractor.py,sha256=RdA-8OW0uB8svAFRQtUBMp7GBKPisEFvX7mc-RfisqE,13170
|
|
7
|
-
dcnum/feat/feat_background/__init__.py,sha256=mL8QJYK6m3hxTqF6Cuosu__Fm5tZUMa-hTgSGcNw9AE,458
|
|
8
|
-
dcnum/feat/feat_background/base.py,sha256=XcefqydfyOt9vNnIjOOIljdLbN78amTM1oGU65mgg5Y,7367
|
|
9
|
-
dcnum/feat/feat_background/bg_roll_median.py,sha256=E86AiSzpw0RZ0nYL4UdKKkskS5ywKJCLeHlWYfTPS0k,12781
|
|
10
|
-
dcnum/feat/feat_background/bg_sparse_median.py,sha256=COJeY8suHmrHBWiXxTLw3C17-QOnc4R75GMdZiuU3cc,17670
|
|
11
|
-
dcnum/feat/feat_brightness/__init__.py,sha256=j-Gen6zutc74VopPGJsr4eHS-_CRnGnnB73HjKsz1C4,102
|
|
12
|
-
dcnum/feat/feat_brightness/bright_all.py,sha256=Z5b-xkw7g7ejMpbGmdUqrxGRymqFhAQsZ938gaGXk9Y,3102
|
|
13
|
-
dcnum/feat/feat_brightness/common.py,sha256=JX49EszYDmnvoOKXFVV1CalEIWRmOuY5EryNbqGbdac,156
|
|
14
|
-
dcnum/feat/feat_moments/__init__.py,sha256=RxDTbl-XVVk8HIgihTuqWdmD0ciNGdfg715ShHEGUHs,68
|
|
15
|
-
dcnum/feat/feat_moments/ct_opencv.py,sha256=_qyHCGvylVxruMWafvVbVOzhWGXLoFi10LReNxGcWhY,463
|
|
16
|
-
dcnum/feat/feat_moments/mt_legacy.py,sha256=tp85oeQ1GwVNdo6nXWhtbUGjMaXR8C6NMMWhobzThq0,4490
|
|
17
|
-
dcnum/feat/feat_texture/__init__.py,sha256=SjYRb917PrFN231M2nVxq12DDH2y72WocsS9yY7xqaI,84
|
|
18
|
-
dcnum/feat/feat_texture/common.py,sha256=COXHpXS-7DMouGu3WF83I76L02Sr7P9re4lxajh6g0E,439
|
|
19
|
-
dcnum/feat/feat_texture/tex_all.py,sha256=eGjjNfPpfZw7FA_VNFCIMiU38KD0qcGbxLciYy-tCiA,4097
|
|
20
|
-
dcnum/meta/__init__.py,sha256=cQT_HN5yDKzMnZM8CUyNmeA68OhE3ENO_rvFmgDj95c,40
|
|
21
|
-
dcnum/meta/ppid.py,sha256=tHbn7rZWEQYCoMzvt8QXhnBDEY6cRJlKxMlbmMwFhVM,5951
|
|
22
|
-
dcnum/read/__init__.py,sha256=iV2wrBMdwJgpXaphNiiAVybndDzTTv0CAGRNXyvxcLY,157
|
|
23
|
-
dcnum/read/cache.py,sha256=mr2DBJZYgNIAiz64TQ4cgkPmRt8nJWBvgkOpaz-p6Yg,5467
|
|
24
|
-
dcnum/read/const.py,sha256=SVlvEJiRIHyTyUlWG24_ogcnT5nTxCi0CRslNuNP56I,282
|
|
25
|
-
dcnum/read/hdf5_data.py,sha256=Ugu_whnsOmgQStuB08sCb22xvBCxSJXJrWiL2JBWMTg,16686
|
|
26
|
-
dcnum/segm/__init__.py,sha256=BNFn7VvWsRmOzxCtvr4AXi3mmWsJSsQlEmGlq7umCac,510
|
|
27
|
-
dcnum/segm/segm_thresh.py,sha256=fim5HRNWq0DUhVRwLf6nmu4gOHAFtydGDRS1Ww_XzFo,1134
|
|
28
|
-
dcnum/segm/segmenter.py,sha256=o5G9JnHG-dHnNwG8IdPK8MideF0IlSDbuYwCdrSr31w,9002
|
|
29
|
-
dcnum/segm/segmenter_cpu.py,sha256=oFiK73swm7UVPZcTLX4cpFHxp1DIyn9AjQ93JB0_Eh4,10072
|
|
30
|
-
dcnum/segm/segmenter_gpu.py,sha256=RKas-IOZqyg99nvjUUD2NuNTd1sYqjzjHEJeunmmjaY,1236
|
|
31
|
-
dcnum/segm/segmenter_manager_thread.py,sha256=xtuk7gnk7xhoRoV_J97rrv7IR3JgeRvVewCDT-chqpk,5172
|
|
32
|
-
dcnum/write/__init__.py,sha256=6vAQECatcd7DJMXFEuab1wdvEiaxisbY8_qmK5tzIwY,207
|
|
33
|
-
dcnum/write/deque_writer_thread.py,sha256=UUn5OYxDvckvhLw3llLYu7y8MI7RfsOhdJhMonKKB3k,1625
|
|
34
|
-
dcnum/write/queue_collector_thread.py,sha256=BivSe5ZA-rTPH4sridXU1yFB6CP7LYzIFudLMbN481s,11793
|
|
35
|
-
dcnum/write/writer.py,sha256=_6aSz6vJYQfgUftwIiIHEtKMAAm83WcaH_OgplqRGzk,8785
|
|
36
|
-
dcnum-0.13.3.dist-info/LICENSE,sha256=YRChA1C8A2E-amJbudwMcbTCZy_HzmeY0hMIvduh1MM,1089
|
|
37
|
-
dcnum-0.13.3.dist-info/METADATA,sha256=oP0SRQ3OIklXExsoAR8U9hYVlOtU4fDuG3_s9wwtEek,2172
|
|
38
|
-
dcnum-0.13.3.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92
|
|
39
|
-
dcnum-0.13.3.dist-info/top_level.txt,sha256=Hmh38rgG_MFTVDpUDGuO2HWTSq80P585Het4COQzFTg,6
|
|
40
|
-
dcnum-0.13.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|