dclab 0.67.0__cp314-cp314-macosx_10_13_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dclab might be problematic. Click here for more details.
- dclab/__init__.py +41 -0
- dclab/_version.py +34 -0
- dclab/cached.py +97 -0
- dclab/cli/__init__.py +10 -0
- dclab/cli/common.py +237 -0
- dclab/cli/task_compress.py +126 -0
- dclab/cli/task_condense.py +223 -0
- dclab/cli/task_join.py +229 -0
- dclab/cli/task_repack.py +98 -0
- dclab/cli/task_split.py +154 -0
- dclab/cli/task_tdms2rtdc.py +186 -0
- dclab/cli/task_verify_dataset.py +75 -0
- dclab/definitions/__init__.py +79 -0
- dclab/definitions/feat_const.py +202 -0
- dclab/definitions/feat_logic.py +182 -0
- dclab/definitions/meta_const.py +252 -0
- dclab/definitions/meta_logic.py +111 -0
- dclab/definitions/meta_parse.py +94 -0
- dclab/downsampling.cpython-314-darwin.so +0 -0
- dclab/downsampling.pyx +230 -0
- dclab/external/__init__.py +4 -0
- dclab/external/packaging/LICENSE +3 -0
- dclab/external/packaging/LICENSE.APACHE +177 -0
- dclab/external/packaging/LICENSE.BSD +23 -0
- dclab/external/packaging/__init__.py +6 -0
- dclab/external/packaging/_structures.py +61 -0
- dclab/external/packaging/version.py +505 -0
- dclab/external/skimage/LICENSE +28 -0
- dclab/external/skimage/__init__.py +2 -0
- dclab/external/skimage/_find_contours.py +216 -0
- dclab/external/skimage/_find_contours_cy.cpython-314-darwin.so +0 -0
- dclab/external/skimage/_find_contours_cy.pyx +188 -0
- dclab/external/skimage/_pnpoly.cpython-314-darwin.so +0 -0
- dclab/external/skimage/_pnpoly.pyx +99 -0
- dclab/external/skimage/_shared/__init__.py +1 -0
- dclab/external/skimage/_shared/geometry.cpython-314-darwin.so +0 -0
- dclab/external/skimage/_shared/geometry.pxd +6 -0
- dclab/external/skimage/_shared/geometry.pyx +55 -0
- dclab/external/skimage/measure.py +7 -0
- dclab/external/skimage/pnpoly.py +53 -0
- dclab/external/statsmodels/LICENSE +35 -0
- dclab/external/statsmodels/__init__.py +6 -0
- dclab/external/statsmodels/nonparametric/__init__.py +1 -0
- dclab/external/statsmodels/nonparametric/_kernel_base.py +203 -0
- dclab/external/statsmodels/nonparametric/kernel_density.py +165 -0
- dclab/external/statsmodels/nonparametric/kernels.py +36 -0
- dclab/features/__init__.py +9 -0
- dclab/features/bright.py +81 -0
- dclab/features/bright_bc.py +93 -0
- dclab/features/bright_perc.py +63 -0
- dclab/features/contour.py +161 -0
- dclab/features/emodulus/__init__.py +339 -0
- dclab/features/emodulus/load.py +252 -0
- dclab/features/emodulus/lut_HE-2D-FEM-22.txt +16432 -0
- dclab/features/emodulus/lut_HE-3D-FEM-22.txt +1276 -0
- dclab/features/emodulus/lut_LE-2D-FEM-19.txt +13082 -0
- dclab/features/emodulus/pxcorr.py +135 -0
- dclab/features/emodulus/scale_linear.py +247 -0
- dclab/features/emodulus/viscosity.py +260 -0
- dclab/features/fl_crosstalk.py +95 -0
- dclab/features/inert_ratio.py +377 -0
- dclab/features/volume.py +242 -0
- dclab/http_utils.py +322 -0
- dclab/isoelastics/__init__.py +468 -0
- dclab/isoelastics/iso_HE-2D-FEM-22-area_um-deform.txt +2440 -0
- dclab/isoelastics/iso_HE-2D-FEM-22-volume-deform.txt +2635 -0
- dclab/isoelastics/iso_HE-3D-FEM-22-area_um-deform.txt +1930 -0
- dclab/isoelastics/iso_HE-3D-FEM-22-volume-deform.txt +2221 -0
- dclab/isoelastics/iso_LE-2D-FEM-19-area_um-deform.txt +2151 -0
- dclab/isoelastics/iso_LE-2D-FEM-19-volume-deform.txt +2250 -0
- dclab/isoelastics/iso_LE-2D-ana-18-area_um-deform.txt +1266 -0
- dclab/kde/__init__.py +1 -0
- dclab/kde/base.py +459 -0
- dclab/kde/contours.py +222 -0
- dclab/kde/methods.py +313 -0
- dclab/kde_contours.py +10 -0
- dclab/kde_methods.py +11 -0
- dclab/lme4/__init__.py +5 -0
- dclab/lme4/lme4_template.R +94 -0
- dclab/lme4/rsetup.py +204 -0
- dclab/lme4/wrapr.py +386 -0
- dclab/polygon_filter.py +398 -0
- dclab/rtdc_dataset/__init__.py +15 -0
- dclab/rtdc_dataset/check.py +902 -0
- dclab/rtdc_dataset/config.py +533 -0
- dclab/rtdc_dataset/copier.py +353 -0
- dclab/rtdc_dataset/core.py +896 -0
- dclab/rtdc_dataset/export.py +867 -0
- dclab/rtdc_dataset/feat_anc_core/__init__.py +24 -0
- dclab/rtdc_dataset/feat_anc_core/af_basic.py +75 -0
- dclab/rtdc_dataset/feat_anc_core/af_emodulus.py +160 -0
- dclab/rtdc_dataset/feat_anc_core/af_fl_max_ctc.py +133 -0
- dclab/rtdc_dataset/feat_anc_core/af_image_contour.py +113 -0
- dclab/rtdc_dataset/feat_anc_core/af_ml_class.py +102 -0
- dclab/rtdc_dataset/feat_anc_core/ancillary_feature.py +320 -0
- dclab/rtdc_dataset/feat_anc_ml/__init__.py +32 -0
- dclab/rtdc_dataset/feat_anc_plugin/__init__.py +3 -0
- dclab/rtdc_dataset/feat_anc_plugin/plugin_feature.py +329 -0
- dclab/rtdc_dataset/feat_basin.py +762 -0
- dclab/rtdc_dataset/feat_temp.py +102 -0
- dclab/rtdc_dataset/filter.py +263 -0
- dclab/rtdc_dataset/fmt_dcor/__init__.py +7 -0
- dclab/rtdc_dataset/fmt_dcor/access_token.py +52 -0
- dclab/rtdc_dataset/fmt_dcor/api.py +173 -0
- dclab/rtdc_dataset/fmt_dcor/base.py +299 -0
- dclab/rtdc_dataset/fmt_dcor/basin.py +73 -0
- dclab/rtdc_dataset/fmt_dcor/logs.py +26 -0
- dclab/rtdc_dataset/fmt_dcor/tables.py +66 -0
- dclab/rtdc_dataset/fmt_dict.py +103 -0
- dclab/rtdc_dataset/fmt_hdf5/__init__.py +6 -0
- dclab/rtdc_dataset/fmt_hdf5/base.py +192 -0
- dclab/rtdc_dataset/fmt_hdf5/basin.py +30 -0
- dclab/rtdc_dataset/fmt_hdf5/events.py +276 -0
- dclab/rtdc_dataset/fmt_hdf5/feat_defect.py +164 -0
- dclab/rtdc_dataset/fmt_hdf5/logs.py +33 -0
- dclab/rtdc_dataset/fmt_hdf5/tables.py +60 -0
- dclab/rtdc_dataset/fmt_hierarchy/__init__.py +11 -0
- dclab/rtdc_dataset/fmt_hierarchy/base.py +278 -0
- dclab/rtdc_dataset/fmt_hierarchy/events.py +146 -0
- dclab/rtdc_dataset/fmt_hierarchy/hfilter.py +140 -0
- dclab/rtdc_dataset/fmt_hierarchy/mapper.py +134 -0
- dclab/rtdc_dataset/fmt_http.py +102 -0
- dclab/rtdc_dataset/fmt_s3.py +354 -0
- dclab/rtdc_dataset/fmt_tdms/__init__.py +476 -0
- dclab/rtdc_dataset/fmt_tdms/event_contour.py +264 -0
- dclab/rtdc_dataset/fmt_tdms/event_image.py +220 -0
- dclab/rtdc_dataset/fmt_tdms/event_mask.py +62 -0
- dclab/rtdc_dataset/fmt_tdms/event_trace.py +146 -0
- dclab/rtdc_dataset/fmt_tdms/exc.py +37 -0
- dclab/rtdc_dataset/fmt_tdms/naming.py +151 -0
- dclab/rtdc_dataset/load.py +77 -0
- dclab/rtdc_dataset/meta_table.py +25 -0
- dclab/rtdc_dataset/writer.py +1019 -0
- dclab/statistics.py +226 -0
- dclab/util.py +176 -0
- dclab/warn.py +15 -0
- dclab-0.67.0.dist-info/METADATA +153 -0
- dclab-0.67.0.dist-info/RECORD +142 -0
- dclab-0.67.0.dist-info/WHEEL +6 -0
- dclab-0.67.0.dist-info/entry_points.txt +8 -0
- dclab-0.67.0.dist-info/licenses/LICENSE +283 -0
- dclab-0.67.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from ..meta_table import MetaTable
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class H5Tables:
|
|
5
|
+
def __init__(self, h5):
|
|
6
|
+
self.h5file = h5
|
|
7
|
+
self._cache_keys = None
|
|
8
|
+
|
|
9
|
+
def __getitem__(self, key):
|
|
10
|
+
if key in self.keys():
|
|
11
|
+
tab = H5Table(self.h5file["tables"][key])
|
|
12
|
+
else:
|
|
13
|
+
raise KeyError(f"Table '{key}' not found or empty "
|
|
14
|
+
f"in {self.h5file.file.filename}!")
|
|
15
|
+
return tab
|
|
16
|
+
|
|
17
|
+
def __iter__(self):
|
|
18
|
+
# dict-like behavior
|
|
19
|
+
for key in self.keys():
|
|
20
|
+
yield key
|
|
21
|
+
|
|
22
|
+
def __len__(self):
|
|
23
|
+
return len(self.keys())
|
|
24
|
+
|
|
25
|
+
def keys(self):
|
|
26
|
+
if self._cache_keys is None:
|
|
27
|
+
names = []
|
|
28
|
+
if "tables" in self.h5file:
|
|
29
|
+
for key in self.h5file["tables"]:
|
|
30
|
+
if self.h5file["tables"][key].size:
|
|
31
|
+
names.append(key)
|
|
32
|
+
self._cache_keys = names
|
|
33
|
+
return self._cache_keys
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class H5Table(MetaTable):
|
|
37
|
+
def __init__(self, h5_ds):
|
|
38
|
+
self._h5_ds = h5_ds
|
|
39
|
+
self._keys = None
|
|
40
|
+
self._meta = None
|
|
41
|
+
|
|
42
|
+
def __array__(self, *args, **kwargs):
|
|
43
|
+
return self._h5_ds.__array__(*args, **kwargs)
|
|
44
|
+
|
|
45
|
+
@property
|
|
46
|
+
def meta(self):
|
|
47
|
+
if self._meta is None:
|
|
48
|
+
self._meta = dict(self._h5_ds.attrs)
|
|
49
|
+
return self._meta
|
|
50
|
+
|
|
51
|
+
def has_graphs(self):
|
|
52
|
+
return self.keys() is not None
|
|
53
|
+
|
|
54
|
+
def keys(self):
|
|
55
|
+
if self._keys is None:
|
|
56
|
+
self._keys = self._h5_ds.dtype.names
|
|
57
|
+
return self._keys
|
|
58
|
+
|
|
59
|
+
def __getitem__(self, key):
|
|
60
|
+
return self._h5_ds[key]
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# flake8: noqa: F401
|
|
2
|
+
from .base import RTDC_Hierarchy
|
|
3
|
+
from .events import (
|
|
4
|
+
ChildTrace, ChildTraceItem, ChildScalar, ChildContour, ChildNDArray,
|
|
5
|
+
ChildBase
|
|
6
|
+
)
|
|
7
|
+
from .hfilter import HierarchyFilter, HierarchyFilterError
|
|
8
|
+
from .mapper import (
|
|
9
|
+
map_indices_child2parent, map_indices_child2root,
|
|
10
|
+
map_indices_root2child, map_indices_parent2child
|
|
11
|
+
)
|
|
@@ -0,0 +1,278 @@
|
|
|
1
|
+
"""RT-DC hierarchy format"""
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from ... import definitions as dfn
|
|
7
|
+
from ...util import hashobj
|
|
8
|
+
|
|
9
|
+
from ..config import Configuration
|
|
10
|
+
from ..core import RTDCBase
|
|
11
|
+
|
|
12
|
+
from .events import (
|
|
13
|
+
ChildContour, ChildNDArray, ChildScalar, ChildTrace, ChildTraceItem
|
|
14
|
+
)
|
|
15
|
+
from .hfilter import HierarchyFilter
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class RTDC_Hierarchy(RTDCBase):
|
|
19
|
+
def __init__(self, hparent, apply_filter=True, *args, **kwargs):
|
|
20
|
+
"""Hierarchy dataset (filtered from RTDCBase)
|
|
21
|
+
|
|
22
|
+
A few words on hierarchies:
|
|
23
|
+
The idea is that a subclass of RTDCBase can use the filtered
|
|
24
|
+
data of another subclass of RTDCBase and interpret these data
|
|
25
|
+
as unfiltered events. This comes in handy e.g. when the
|
|
26
|
+
percentage of different subpopulations need to be distinguished
|
|
27
|
+
without the noise in the original data.
|
|
28
|
+
|
|
29
|
+
Children in hierarchies always update their data according to
|
|
30
|
+
the filtered event data from their parent when `apply_filter`
|
|
31
|
+
is called. This makes it easier to save and load hierarchy
|
|
32
|
+
children with e.g. DCscope and it makes the handling of
|
|
33
|
+
hierarchies more intuitive (when the parent changes,
|
|
34
|
+
the child changes as well).
|
|
35
|
+
|
|
36
|
+
Parameters
|
|
37
|
+
----------
|
|
38
|
+
hparent: instance of RTDCBase
|
|
39
|
+
The hierarchy parent
|
|
40
|
+
apply_filter: bool
|
|
41
|
+
Whether to apply the filter during instantiation;
|
|
42
|
+
If set to `False`, `apply_filter` must be called
|
|
43
|
+
manually.
|
|
44
|
+
*args:
|
|
45
|
+
Arguments for `RTDCBase`
|
|
46
|
+
**kwargs:
|
|
47
|
+
Keyword arguments for `RTDCBase`
|
|
48
|
+
|
|
49
|
+
Attributes
|
|
50
|
+
----------
|
|
51
|
+
hparent: RTDCBase
|
|
52
|
+
Hierarchy parent of this instance
|
|
53
|
+
"""
|
|
54
|
+
super(RTDC_Hierarchy, self).__init__(*args, **kwargs)
|
|
55
|
+
|
|
56
|
+
self.path = hparent.path
|
|
57
|
+
self.title = hparent.title + "_child"
|
|
58
|
+
self._logs = None # lazily-loaded
|
|
59
|
+
self._tables = None # lazily-loaded
|
|
60
|
+
|
|
61
|
+
self._events = {}
|
|
62
|
+
|
|
63
|
+
self.hparent = hparent
|
|
64
|
+
|
|
65
|
+
self.config = self._create_config() # init config
|
|
66
|
+
self._update_config() # sets e.g. event count
|
|
67
|
+
|
|
68
|
+
if apply_filter:
|
|
69
|
+
# Apply the filter
|
|
70
|
+
# This will also populate all event attributes
|
|
71
|
+
self.apply_filter()
|
|
72
|
+
|
|
73
|
+
self._length = None
|
|
74
|
+
|
|
75
|
+
def __contains__(self, key):
|
|
76
|
+
return self.hparent.__contains__(key)
|
|
77
|
+
|
|
78
|
+
def __getitem__(self, feat):
|
|
79
|
+
"""Return the feature data and cache them in self._events"""
|
|
80
|
+
if feat in self._events:
|
|
81
|
+
data = self._events[feat]
|
|
82
|
+
elif feat in self.hparent:
|
|
83
|
+
if len(self.hparent[feat].shape) > 1:
|
|
84
|
+
# non-scalar feature
|
|
85
|
+
data = ChildNDArray(self, feat)
|
|
86
|
+
else:
|
|
87
|
+
# scalar feature
|
|
88
|
+
data = ChildScalar(self, feat)
|
|
89
|
+
# Cache everything, even the Young's modulus. The user is
|
|
90
|
+
# responsible for calling `rejuvenate` to reset everything.
|
|
91
|
+
self._events[feat] = data
|
|
92
|
+
else:
|
|
93
|
+
raise KeyError(
|
|
94
|
+
f"The dataset {self} does not contain the feature '{feat}'! "
|
|
95
|
+
+ "If you are attempting to access an ancillary feature "
|
|
96
|
+
+ "(e.g. emodulus), please make sure that the feature "
|
|
97
|
+
+ f"data are computed for {self.get_root_parent()} (the "
|
|
98
|
+
+ "root parent of this hierarchy child).")
|
|
99
|
+
return data
|
|
100
|
+
|
|
101
|
+
def __len__(self):
|
|
102
|
+
if self._length is None:
|
|
103
|
+
self._length = np.sum(self.hparent.filter.all)
|
|
104
|
+
return self._length
|
|
105
|
+
|
|
106
|
+
def _assert_filter(self):
|
|
107
|
+
"""Make sure filters exists
|
|
108
|
+
|
|
109
|
+
Override from base class that uses :class:`.HierarchyFilter`
|
|
110
|
+
instead of :class:`.Filter`.
|
|
111
|
+
"""
|
|
112
|
+
if self._ds_filter is None:
|
|
113
|
+
self._ds_filter = HierarchyFilter(self)
|
|
114
|
+
|
|
115
|
+
def _check_parent_filter(self):
|
|
116
|
+
"""Reset filter if parent changed
|
|
117
|
+
|
|
118
|
+
This will create a new HierarchyFilter for self if the
|
|
119
|
+
parent RTDCBase changed. We do it like this, because it
|
|
120
|
+
would be complicated to track all the changes in
|
|
121
|
+
HierarchyFilter.
|
|
122
|
+
"""
|
|
123
|
+
if self.filter.parent_changed:
|
|
124
|
+
manual_pidx = self.filter.retrieve_manual_indices(self)
|
|
125
|
+
self._ds_filter = None # forces recreation of HierarchyFilter
|
|
126
|
+
self._assert_filter()
|
|
127
|
+
self.filter.apply_manual_indices(self, manual_pidx)
|
|
128
|
+
|
|
129
|
+
def _create_config(self):
|
|
130
|
+
"""Return a stripped configuration from the parent"""
|
|
131
|
+
# create a new configuration
|
|
132
|
+
cfg = self.hparent.config.copy()
|
|
133
|
+
# Remove previously applied filters
|
|
134
|
+
pops = []
|
|
135
|
+
for key in cfg["filtering"]:
|
|
136
|
+
if (key.endswith(" min") or
|
|
137
|
+
key.endswith(" max") or
|
|
138
|
+
key == "polygon filters"):
|
|
139
|
+
pops.append(key)
|
|
140
|
+
[cfg["filtering"].pop(key) for key in pops]
|
|
141
|
+
# Add parent information in dictionary
|
|
142
|
+
cfg["filtering"]["hierarchy parent"] = self.hparent.identifier
|
|
143
|
+
return Configuration(cfg=cfg)
|
|
144
|
+
|
|
145
|
+
def _update_config(self):
|
|
146
|
+
"""Update varying config values from self.hparent"""
|
|
147
|
+
# event count
|
|
148
|
+
self.config["experiment"]["event count"] = np.sum(
|
|
149
|
+
self.hparent.filter.all)
|
|
150
|
+
# calculation
|
|
151
|
+
if "calculation" in self.hparent.config:
|
|
152
|
+
self.config["calculation"].clear()
|
|
153
|
+
self.config["calculation"].update(
|
|
154
|
+
self.hparent.config["calculation"])
|
|
155
|
+
|
|
156
|
+
@property
|
|
157
|
+
def basins(self):
|
|
158
|
+
return self.hparent.basins
|
|
159
|
+
|
|
160
|
+
@property
|
|
161
|
+
def features(self):
|
|
162
|
+
return self.hparent.features
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
def features_ancillary(self):
|
|
166
|
+
return self.hparent.features_ancillary
|
|
167
|
+
|
|
168
|
+
@property
|
|
169
|
+
def features_basin(self):
|
|
170
|
+
return self.hparent.features_basin
|
|
171
|
+
|
|
172
|
+
@property
|
|
173
|
+
def features_innate(self):
|
|
174
|
+
return self.hparent.features_innate
|
|
175
|
+
|
|
176
|
+
@property
|
|
177
|
+
def features_loaded(self):
|
|
178
|
+
return self.hparent.features_loaded
|
|
179
|
+
|
|
180
|
+
@property
|
|
181
|
+
def features_local(self):
|
|
182
|
+
return self.hparent.features_local
|
|
183
|
+
|
|
184
|
+
@property
|
|
185
|
+
def features_scalar(self):
|
|
186
|
+
return self.hparent.features_scalar
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def hash(self):
|
|
190
|
+
"""Hashes of a hierarchy child changes if the parent changes"""
|
|
191
|
+
# Do not apply filters here (speed)
|
|
192
|
+
hph = self.hparent.hash
|
|
193
|
+
hpfilt = hashobj(self.hparent.filter.all)
|
|
194
|
+
dhash = hashobj(hph + hpfilt)
|
|
195
|
+
return dhash
|
|
196
|
+
|
|
197
|
+
@property
|
|
198
|
+
def logs(self):
|
|
199
|
+
if self._logs is None:
|
|
200
|
+
self._logs = self.hparent.logs
|
|
201
|
+
return self._logs
|
|
202
|
+
|
|
203
|
+
@logs.setter
|
|
204
|
+
def logs(self, value):
|
|
205
|
+
# to support setting `self.logs = {}` in RTDCBase
|
|
206
|
+
if value:
|
|
207
|
+
raise ValueError(
|
|
208
|
+
"Setting actual logs not supported for RTDC_Hierarchy")
|
|
209
|
+
|
|
210
|
+
@property
|
|
211
|
+
def tables(self):
|
|
212
|
+
if self._tables is None:
|
|
213
|
+
self._tables = self.hparent.tables
|
|
214
|
+
return self._tables
|
|
215
|
+
|
|
216
|
+
@tables.setter
|
|
217
|
+
def tables(self, value):
|
|
218
|
+
# to support setting `self.tables = {}` in RTDCBase
|
|
219
|
+
if value:
|
|
220
|
+
raise ValueError(
|
|
221
|
+
"Setting actual tables not supported for RTDC_Hierarchy")
|
|
222
|
+
|
|
223
|
+
def apply_filter(self, *args, **kwargs):
|
|
224
|
+
"""Overridden `apply_filter` to perform tasks for hierarchy child"""
|
|
225
|
+
if self._ds_filter is not None:
|
|
226
|
+
# make sure self.filter knows about root manual indices
|
|
227
|
+
# (stored in self.filter._man_root_ids)
|
|
228
|
+
self.filter.retrieve_manual_indices(self)
|
|
229
|
+
|
|
230
|
+
# Copy event data from hierarchy parent
|
|
231
|
+
self.hparent.apply_filter(*args, **kwargs)
|
|
232
|
+
|
|
233
|
+
# Clear anything that has been cached until now
|
|
234
|
+
self._length = None
|
|
235
|
+
|
|
236
|
+
# update event index
|
|
237
|
+
event_count = len(self)
|
|
238
|
+
self._events.clear()
|
|
239
|
+
self._events["index"] = np.arange(1, event_count + 1)
|
|
240
|
+
# set non-scalar column data
|
|
241
|
+
for feat in ["image", "image_bg", "mask"]:
|
|
242
|
+
if feat in self.hparent:
|
|
243
|
+
self._events[feat] = ChildNDArray(self, feat)
|
|
244
|
+
if "contour" in self.hparent:
|
|
245
|
+
self._events["contour"] = ChildContour(self)
|
|
246
|
+
if "trace" in self.hparent:
|
|
247
|
+
trdict = ChildTrace()
|
|
248
|
+
for flname in dfn.FLUOR_TRACES:
|
|
249
|
+
if flname in self.hparent["trace"]:
|
|
250
|
+
trdict[flname] = ChildTraceItem(self, flname)
|
|
251
|
+
self._events["trace"] = trdict
|
|
252
|
+
# Update configuration
|
|
253
|
+
self._update_config()
|
|
254
|
+
|
|
255
|
+
# create a new filter if the parent changed
|
|
256
|
+
self._check_parent_filter()
|
|
257
|
+
super(RTDC_Hierarchy, self).apply_filter(*args, **kwargs)
|
|
258
|
+
|
|
259
|
+
def get_root_parent(self):
|
|
260
|
+
"""Return the root parent of this dataset"""
|
|
261
|
+
if isinstance(self.hparent, RTDC_Hierarchy):
|
|
262
|
+
return self.hparent.get_root_parent()
|
|
263
|
+
else:
|
|
264
|
+
return self.hparent
|
|
265
|
+
|
|
266
|
+
def rejuvenate(self):
|
|
267
|
+
"""Redraw the hierarchy tree, updating config and features
|
|
268
|
+
|
|
269
|
+
You should call this function whenever you change something
|
|
270
|
+
in the hierarchy parent(s), be it filters or metadata for computing
|
|
271
|
+
ancillary features.
|
|
272
|
+
|
|
273
|
+
.. versionadded: 0.47.0
|
|
274
|
+
This is just an alias of `apply_filter`, but with a more
|
|
275
|
+
accurate name (not only the filters are applied, but alot
|
|
276
|
+
of other things might change).
|
|
277
|
+
"""
|
|
278
|
+
self.apply_filter()
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
import warnings
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from ...util import copy_if_needed
|
|
7
|
+
from .mapper import map_indices_child2parent
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class ChildBase(object):
|
|
11
|
+
def __init__(self, child):
|
|
12
|
+
self.child = child
|
|
13
|
+
|
|
14
|
+
def __len__(self):
|
|
15
|
+
return len(self.child)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class ChildContour(ChildBase):
|
|
19
|
+
def __init__(self, child):
|
|
20
|
+
super(ChildContour, self).__init__(child)
|
|
21
|
+
self.shape = (len(child), np.nan, 2)
|
|
22
|
+
# Note that since the contour has variable lengths, we cannot
|
|
23
|
+
# implement an __array__ method here.
|
|
24
|
+
|
|
25
|
+
def __getitem__(self, idx):
|
|
26
|
+
pidx = map_indices_child2parent(child=self.child,
|
|
27
|
+
child_indices=idx)
|
|
28
|
+
hp = self.child.hparent
|
|
29
|
+
return hp["contour"][pidx]
|
|
30
|
+
|
|
31
|
+
@property
|
|
32
|
+
def dtype(self):
|
|
33
|
+
return self.child.hparent["contour"].dtype
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ChildNDArray(ChildBase):
|
|
37
|
+
def __init__(self, child, feat):
|
|
38
|
+
super(ChildNDArray, self).__init__(child)
|
|
39
|
+
self.feat = feat
|
|
40
|
+
|
|
41
|
+
def __array__(self, dtype=None, copy=copy_if_needed, *args, **kwargs):
|
|
42
|
+
warnings.warn("Please avoid calling the `__array__` method of the "
|
|
43
|
+
"`ChildNDArray`. It may consume a lot of memory. "
|
|
44
|
+
"Consider using a generator instead.",
|
|
45
|
+
UserWarning)
|
|
46
|
+
return np.asarray(self[:], dtype=dtype, *args, **kwargs)
|
|
47
|
+
|
|
48
|
+
def __getitem__(self, idx):
|
|
49
|
+
pidx = map_indices_child2parent(child=self.child,
|
|
50
|
+
child_indices=idx)
|
|
51
|
+
hp = self.child.hparent
|
|
52
|
+
return hp[self.feat][pidx]
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def dtype(self):
|
|
56
|
+
return self.child.hparent[self.feat].dtype
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def shape(self):
|
|
60
|
+
hp = self.child.hparent
|
|
61
|
+
return tuple([len(self)] + list(hp[self.feat][0].shape))
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
class ChildScalar(np.lib.mixins.NDArrayOperatorsMixin):
|
|
65
|
+
def __init__(self, child, feat):
|
|
66
|
+
self.child = child
|
|
67
|
+
self.feat = feat
|
|
68
|
+
self._array = None
|
|
69
|
+
# ufunc metadata attribute cache
|
|
70
|
+
self._ufunc_attrs = {}
|
|
71
|
+
self.ndim = 1 # matplotlib might expect this from an array
|
|
72
|
+
|
|
73
|
+
def __array__(self, dtype=None, copy=copy_if_needed, *args, **kwargs):
|
|
74
|
+
if self._array is None:
|
|
75
|
+
hparent = self.child.hparent
|
|
76
|
+
filt_arr = hparent.filter.all
|
|
77
|
+
self._array = hparent[self.feat][filt_arr]
|
|
78
|
+
return np.array(self._array, dtype=dtype, copy=copy, *args, **kwargs)
|
|
79
|
+
|
|
80
|
+
def __getitem__(self, idx):
|
|
81
|
+
return self.__array__()[idx]
|
|
82
|
+
|
|
83
|
+
def __len__(self):
|
|
84
|
+
return len(self.child)
|
|
85
|
+
|
|
86
|
+
def _fetch_ufunc_attr(self, uname, ufunc):
|
|
87
|
+
"""A wrapper for calling functions on the scalar feature data
|
|
88
|
+
|
|
89
|
+
If the ufunc is computed, it is cached permanently in
|
|
90
|
+
self._ufunc_attrs
|
|
91
|
+
"""
|
|
92
|
+
val = self._ufunc_attrs.get(uname, None)
|
|
93
|
+
if val is None:
|
|
94
|
+
val = ufunc(self.__array__())
|
|
95
|
+
self._ufunc_attrs[uname] = val
|
|
96
|
+
return val
|
|
97
|
+
|
|
98
|
+
def max(self, *args, **kwargs):
|
|
99
|
+
return self._fetch_ufunc_attr("max", np.nanmax)
|
|
100
|
+
|
|
101
|
+
def mean(self, *args, **kwargs):
|
|
102
|
+
return self._fetch_ufunc_attr("mean", np.nanmean)
|
|
103
|
+
|
|
104
|
+
def min(self, *args, **kwargs):
|
|
105
|
+
return self._fetch_ufunc_attr("min", np.nanmin)
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def shape(self):
|
|
109
|
+
return len(self),
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class ChildTrace(collections.UserDict):
|
|
113
|
+
@property
|
|
114
|
+
def shape(self):
|
|
115
|
+
# set proper shape (#117)
|
|
116
|
+
key0 = sorted(self.keys())[0]
|
|
117
|
+
return tuple([len(self)] + list(self[key0].shape))
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class ChildTraceItem(ChildBase):
|
|
121
|
+
def __init__(self, child, flname):
|
|
122
|
+
super(ChildTraceItem, self).__init__(child)
|
|
123
|
+
self.flname = flname
|
|
124
|
+
|
|
125
|
+
def __array__(self, dtype=None, copy=copy_if_needed, *args, **kwargs):
|
|
126
|
+
warnings.warn("Please avoid calling the `__array__` method of the "
|
|
127
|
+
"`ChildTraceItem`. It may consume a lot of memory. "
|
|
128
|
+
"Consider using a generator instead.",
|
|
129
|
+
UserWarning)
|
|
130
|
+
return np.asarray(self[:], dtype=dtype, *args, **kwargs)
|
|
131
|
+
|
|
132
|
+
def __getitem__(self, idx):
|
|
133
|
+
pidx = map_indices_child2parent(child=self.child,
|
|
134
|
+
child_indices=idx)
|
|
135
|
+
hp = self.child.hparent
|
|
136
|
+
return hp["trace"][self.flname][pidx]
|
|
137
|
+
|
|
138
|
+
@property
|
|
139
|
+
def dtype(self):
|
|
140
|
+
hp = self.child.hparent
|
|
141
|
+
return hp["trace"][self.flname].dtype
|
|
142
|
+
|
|
143
|
+
@property
|
|
144
|
+
def shape(self):
|
|
145
|
+
hp = self.child.hparent
|
|
146
|
+
return len(self), hp["trace"][self.flname].shape[1]
|
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
from ...util import hashobj
|
|
4
|
+
|
|
5
|
+
from ..filter import Filter
|
|
6
|
+
|
|
7
|
+
from .mapper import map_indices_root2child, map_indices_child2root
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class HierarchyFilterError(BaseException):
|
|
11
|
+
"""Used for unexpected filtering operations"""
|
|
12
|
+
pass
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class HierarchyFilter(Filter):
|
|
16
|
+
def __init__(self, rtdc_ds):
|
|
17
|
+
"""A filtering class for RTDC_Hierarchy
|
|
18
|
+
|
|
19
|
+
This subclass handles manual filters for hierarchy children.
|
|
20
|
+
The general problem with hierarchy children is that their data
|
|
21
|
+
changes when the hierarchy parent changes. As hierarchy
|
|
22
|
+
children may also have hierarchy children, dealing with
|
|
23
|
+
manual filters (`Filter.manual`) is not trivial. Here,
|
|
24
|
+
the manual filters are translated into event indices of the
|
|
25
|
+
root parent (the highest member of the hierarchy, which is
|
|
26
|
+
`RTDC_Hierarchy.hparent` if there is only one child).
|
|
27
|
+
This enables to keep track of the manually excluded events
|
|
28
|
+
even if
|
|
29
|
+
|
|
30
|
+
- the parent changes its filters,
|
|
31
|
+
- the parent is a hierarchy child as well, or
|
|
32
|
+
- the excluded event is filtered out in the parent.
|
|
33
|
+
"""
|
|
34
|
+
self._man_root_ids = []
|
|
35
|
+
super(HierarchyFilter, self).__init__(rtdc_ds)
|
|
36
|
+
self._parent_rtdc_ds = None
|
|
37
|
+
self._parent_hash = None
|
|
38
|
+
self.update_parent(rtdc_ds.hparent)
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def parent_changed(self):
|
|
42
|
+
return hashobj(self._parent_rtdc_ds.filter.all) != self._parent_hash
|
|
43
|
+
|
|
44
|
+
def apply_manual_indices(self, rtdc_ds, manual_indices):
|
|
45
|
+
"""Write to `self.manual`
|
|
46
|
+
|
|
47
|
+
Write `manual_indices` to the boolean array `self.manual`
|
|
48
|
+
and also store the indices as `self._man_root_ids`.
|
|
49
|
+
|
|
50
|
+
Notes
|
|
51
|
+
-----
|
|
52
|
+
If `self.parent_changed` is `True`, i.e. the parent applied
|
|
53
|
+
a filter and the child did not yet hear about this, then
|
|
54
|
+
`HierarchyFilterError` is raised. This is important, because
|
|
55
|
+
the size of the current filter would not match the size of
|
|
56
|
+
the filtered events of the parent and thus index-mapping
|
|
57
|
+
would not work.
|
|
58
|
+
"""
|
|
59
|
+
if self.parent_changed:
|
|
60
|
+
msg = "Cannot apply filter, because parent changed: " \
|
|
61
|
+
+ "dataset {}. ".format(rtdc_ds) \
|
|
62
|
+
+ "Run `RTDC_Hierarchy.apply_filter()` first!"
|
|
63
|
+
raise HierarchyFilterError(msg)
|
|
64
|
+
else:
|
|
65
|
+
self._man_root_ids = list(manual_indices)
|
|
66
|
+
cidx = map_indices_root2child(child=rtdc_ds,
|
|
67
|
+
root_indices=manual_indices)
|
|
68
|
+
if len(cidx):
|
|
69
|
+
self.manual[cidx] = False
|
|
70
|
+
|
|
71
|
+
def reset(self):
|
|
72
|
+
super(HierarchyFilter, self).reset()
|
|
73
|
+
self._man_root_ids.clear()
|
|
74
|
+
|
|
75
|
+
def retrieve_manual_indices(self, rtdc_ds):
|
|
76
|
+
"""Read from self.manual
|
|
77
|
+
|
|
78
|
+
Read from the boolean array `self.manual`, index all
|
|
79
|
+
occurences of `False` and find the corresponding indices
|
|
80
|
+
in the root hierarchy parent, return those and store them
|
|
81
|
+
in `self._man_root_ids` as well.
|
|
82
|
+
|
|
83
|
+
Notes
|
|
84
|
+
-----
|
|
85
|
+
This method also retrieves hidden indices, i.e. events
|
|
86
|
+
that are not part of the current hierarchy child but
|
|
87
|
+
which have been manually excluded before and are now
|
|
88
|
+
hidden because a hierarchy parent filtered it out.
|
|
89
|
+
|
|
90
|
+
If `self.parent_changed` is `True`, i.e. the parent applied
|
|
91
|
+
a filter and the child did not yet hear about this, then
|
|
92
|
+
nothing is computed and `self._man_root_ids` as-is. This
|
|
93
|
+
is important, because the size of the current filter would
|
|
94
|
+
not match the size of the filtered events of the parent and
|
|
95
|
+
thus index-mapping would not work.
|
|
96
|
+
"""
|
|
97
|
+
if self.parent_changed:
|
|
98
|
+
# ignore
|
|
99
|
+
pass
|
|
100
|
+
elif np.all(self.manual):
|
|
101
|
+
# Do not do anything and remember the events we manually
|
|
102
|
+
# excluded in case the parent reinserts them.
|
|
103
|
+
pass
|
|
104
|
+
else:
|
|
105
|
+
# indices from boolean array
|
|
106
|
+
pbool = map_indices_child2root(
|
|
107
|
+
child=rtdc_ds,
|
|
108
|
+
child_indices=np.where(~self.manual)[0]).tolist()
|
|
109
|
+
# retrieve all indices that are currently not visible
|
|
110
|
+
# previous indices
|
|
111
|
+
pold = self._man_root_ids
|
|
112
|
+
# all indices previously selected either via
|
|
113
|
+
# - self.manual or
|
|
114
|
+
# - self.apply_manual_indices
|
|
115
|
+
pall = sorted(list(set(pbool + pold)))
|
|
116
|
+
# visible indices (only available child indices are returned)
|
|
117
|
+
pvis_c = map_indices_root2child(child=rtdc_ds,
|
|
118
|
+
root_indices=pall).tolist()
|
|
119
|
+
# map visible child indices back to root indices
|
|
120
|
+
pvis_p = map_indices_child2root(child=rtdc_ds,
|
|
121
|
+
child_indices=pvis_c).tolist()
|
|
122
|
+
# hidden indices
|
|
123
|
+
phid = list(set(pall) - set(pvis_p))
|
|
124
|
+
# Why not set `all_idx` to `pall`:
|
|
125
|
+
# - pbool is considered to be correct
|
|
126
|
+
# - pold contains hidden indices, but also might contain
|
|
127
|
+
# excess indices from before, i.e. if self.apply_manual_indices
|
|
128
|
+
# is called, self.manual is also updated. If however,
|
|
129
|
+
# self.manual is updated, self._man_root_ids are not updated.
|
|
130
|
+
# Thus, we trust pbool (self.manual) and only use pold
|
|
131
|
+
# (self._man_root_ids) to determine hidden indices.
|
|
132
|
+
all_idx = list(set(pbool + phid))
|
|
133
|
+
self._man_root_ids = sorted(all_idx)
|
|
134
|
+
return self._man_root_ids
|
|
135
|
+
|
|
136
|
+
def update_parent(self, parent_rtdc_ds):
|
|
137
|
+
# hold reference to rtdc_ds parent
|
|
138
|
+
# (not to its filter, because that is reinstantiated)
|
|
139
|
+
self._parent_rtdc_ds = parent_rtdc_ds
|
|
140
|
+
self._parent_hash = hashobj(self._parent_rtdc_ds.filter.all)
|