dcnum 0.23.1__py3-none-any.whl → 0.25.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dcnum might be problematic. Click here for more details.
- dcnum/_version.py +2 -2
- dcnum/feat/event_extractor_manager_thread.py +6 -5
- dcnum/feat/feat_background/base.py +24 -9
- dcnum/feat/feat_background/bg_sparse_median.py +56 -30
- dcnum/logic/ctrl.py +118 -43
- dcnum/logic/job.py +22 -0
- dcnum/meta/ppid.py +4 -3
- dcnum/read/__init__.py +1 -0
- dcnum/read/cache.py +4 -3
- dcnum/read/detect_flicker.py +44 -0
- dcnum/read/hdf5_data.py +138 -70
- dcnum/read/mapped.py +15 -2
- dcnum/segm/segm_torch/__init__.py +8 -4
- dcnum/segm/segm_torch/segm_torch_mpo.py +4 -1
- dcnum/write/__init__.py +1 -1
- dcnum/write/queue_collector_thread.py +7 -14
- dcnum/write/writer.py +149 -36
- {dcnum-0.23.1.dist-info → dcnum-0.25.1.dist-info}/METADATA +2 -2
- {dcnum-0.23.1.dist-info → dcnum-0.25.1.dist-info}/RECORD +22 -21
- {dcnum-0.23.1.dist-info → dcnum-0.25.1.dist-info}/WHEEL +1 -1
- {dcnum-0.23.1.dist-info → dcnum-0.25.1.dist-info}/LICENSE +0 -0
- {dcnum-0.23.1.dist-info → dcnum-0.25.1.dist-info}/top_level.txt +0 -0
dcnum/write/writer.py
CHANGED
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
import hashlib
|
|
2
2
|
import json
|
|
3
3
|
import pathlib
|
|
4
|
-
from typing import Dict, List
|
|
4
|
+
from typing import Dict, List, Tuple
|
|
5
5
|
import warnings
|
|
6
6
|
|
|
7
7
|
import h5py
|
|
8
8
|
import hdf5plugin
|
|
9
9
|
import numpy as np
|
|
10
10
|
|
|
11
|
+
from ..read import HDF5Data
|
|
11
12
|
from .._version import version
|
|
12
13
|
|
|
13
14
|
|
|
@@ -16,6 +17,11 @@ class CreatingFileWithoutBasinWarning(UserWarning):
|
|
|
16
17
|
pass
|
|
17
18
|
|
|
18
19
|
|
|
20
|
+
class IgnoringBasinTypeWarning(UserWarning):
|
|
21
|
+
"""Issued when a specific basin type is ignored"""
|
|
22
|
+
pass
|
|
23
|
+
|
|
24
|
+
|
|
19
25
|
class HDF5Writer:
|
|
20
26
|
def __init__(self,
|
|
21
27
|
# TODO: make this a mandatory argument when `path` is
|
|
@@ -48,7 +54,13 @@ class HDF5Writer:
|
|
|
48
54
|
self.h5 = obj
|
|
49
55
|
self.h5_owned = False
|
|
50
56
|
else:
|
|
51
|
-
self.h5 = h5py.File(obj,
|
|
57
|
+
self.h5 = h5py.File(obj,
|
|
58
|
+
mode=mode,
|
|
59
|
+
libver="latest",
|
|
60
|
+
# Set chunk cache size to 3 MiB for each
|
|
61
|
+
# dataset to allow partial writes.
|
|
62
|
+
rdcc_nbytes=3145728,
|
|
63
|
+
)
|
|
52
64
|
self.h5_owned = True
|
|
53
65
|
self.events = self.h5.require_group("events")
|
|
54
66
|
ds_kwds = set_default_filter_kwargs(ds_kwds)
|
|
@@ -84,14 +96,42 @@ class HDF5Writer:
|
|
|
84
96
|
chunk_size_int = max(10, int(np.floor(chunk_size)))
|
|
85
97
|
return tuple([chunk_size_int] + list(item_shape))
|
|
86
98
|
|
|
87
|
-
def require_feature(self,
|
|
88
|
-
|
|
89
|
-
|
|
99
|
+
def require_feature(self,
|
|
100
|
+
feat: str,
|
|
101
|
+
item_shape: Tuple[int],
|
|
102
|
+
feat_dtype: np.dtype,
|
|
103
|
+
ds_kwds: Dict = None,
|
|
104
|
+
group_name: str = "events"):
|
|
105
|
+
"""Create a new feature in the "events" group
|
|
106
|
+
|
|
107
|
+
Parameters
|
|
108
|
+
----------
|
|
109
|
+
feat: str
|
|
110
|
+
name of the feature
|
|
111
|
+
item_shape: Tuple[int]
|
|
112
|
+
shape for one event of this feature, e.g. for a scalar
|
|
113
|
+
event, the shape would be `(1,)` and for an image, the
|
|
114
|
+
shape could be `(80, 300)`.
|
|
115
|
+
feat_dtype: np.dtype
|
|
116
|
+
dtype of the feature
|
|
117
|
+
ds_kwds: Dict
|
|
118
|
+
HDF5 Dataset keyword arguments (e.g. compression, fletcher32)
|
|
119
|
+
group_name: str
|
|
120
|
+
name of the HDF5 group where the feature should be written to;
|
|
121
|
+
defaults to the "events" group, but a different group can be
|
|
122
|
+
specified for storing e.g. internal basin features.
|
|
123
|
+
"""
|
|
124
|
+
if group_name == "events":
|
|
125
|
+
egroup = self.events
|
|
126
|
+
else:
|
|
127
|
+
egroup = self.h5.require_group(group_name)
|
|
128
|
+
|
|
129
|
+
if feat not in egroup:
|
|
90
130
|
if ds_kwds is None:
|
|
91
131
|
ds_kwds = {}
|
|
92
132
|
for key in self.ds_kwds:
|
|
93
133
|
ds_kwds.setdefault(key, self.ds_kwds[key])
|
|
94
|
-
dset =
|
|
134
|
+
dset = egroup.create_dataset(
|
|
95
135
|
feat,
|
|
96
136
|
shape=tuple([0] + list(item_shape)),
|
|
97
137
|
dtype=feat_dtype,
|
|
@@ -100,22 +140,23 @@ class HDF5Writer:
|
|
|
100
140
|
feat_dtype=feat_dtype),
|
|
101
141
|
**ds_kwds)
|
|
102
142
|
if len(item_shape) == 2:
|
|
103
|
-
dset.attrs.create('CLASS', np.
|
|
104
|
-
dset.attrs.create('IMAGE_VERSION', np.
|
|
143
|
+
dset.attrs.create('CLASS', np.bytes_('IMAGE'))
|
|
144
|
+
dset.attrs.create('IMAGE_VERSION', np.bytes_('1.2'))
|
|
105
145
|
dset.attrs.create('IMAGE_SUBCLASS',
|
|
106
|
-
np.
|
|
146
|
+
np.bytes_('IMAGE_GRAYSCALE'))
|
|
107
147
|
offset = 0
|
|
108
148
|
else:
|
|
109
|
-
dset =
|
|
149
|
+
dset = egroup[feat]
|
|
110
150
|
offset = dset.shape[0]
|
|
111
151
|
return dset, offset
|
|
112
152
|
|
|
113
153
|
def store_basin(self,
|
|
114
154
|
name: str,
|
|
115
|
-
paths: List[str | pathlib.Path],
|
|
155
|
+
paths: List[str | pathlib.Path] | None = None,
|
|
116
156
|
features: List[str] = None,
|
|
117
157
|
description: str | None = None,
|
|
118
|
-
mapping: np.ndarray = None
|
|
158
|
+
mapping: np.ndarray = None,
|
|
159
|
+
internal_data: Dict | None = None,
|
|
119
160
|
):
|
|
120
161
|
"""Write an HDF5-based file basin
|
|
121
162
|
|
|
@@ -123,8 +164,9 @@ class HDF5Writer:
|
|
|
123
164
|
----------
|
|
124
165
|
name: str
|
|
125
166
|
basin name; Names do not have to be unique.
|
|
126
|
-
paths: list of str or pathlib.Path
|
|
127
|
-
location(s) of the basin
|
|
167
|
+
paths: list of str or pathlib.Path or None
|
|
168
|
+
location(s) of the basin; must be None when storing internal
|
|
169
|
+
data, a list of paths otherwise
|
|
128
170
|
features: list of str
|
|
129
171
|
list of features provided by `paths`
|
|
130
172
|
description: str
|
|
@@ -132,14 +174,39 @@ class HDF5Writer:
|
|
|
132
174
|
mapping: 1D array
|
|
133
175
|
integer array with indices that map the basin dataset
|
|
134
176
|
to this dataset
|
|
177
|
+
internal_data: dict of ndarrays
|
|
178
|
+
internal basin data to store; If this is set, then `features`
|
|
179
|
+
and `paths` must be set to `None`.
|
|
135
180
|
"""
|
|
136
181
|
bdat = {
|
|
137
182
|
"description": description,
|
|
138
|
-
"format": "hdf5",
|
|
139
183
|
"name": name,
|
|
140
|
-
"paths": [str(pp) for pp in paths],
|
|
141
|
-
"type": "file",
|
|
142
184
|
}
|
|
185
|
+
|
|
186
|
+
if internal_data:
|
|
187
|
+
if features is not None:
|
|
188
|
+
raise ValueError("`features` must be set to None when storing "
|
|
189
|
+
"internal basin features")
|
|
190
|
+
if paths is not None:
|
|
191
|
+
raise ValueError("`paths` must be set to None when storing "
|
|
192
|
+
"internal basin features")
|
|
193
|
+
# store the internal basin information
|
|
194
|
+
for feat in internal_data:
|
|
195
|
+
if feat in self.h5.require_group("basin_events"):
|
|
196
|
+
raise ValueError(f"Feature '{feat}' is already defined "
|
|
197
|
+
f"as an internal basin feature")
|
|
198
|
+
self.store_feature_chunk(feat=feat,
|
|
199
|
+
data=internal_data[feat],
|
|
200
|
+
group_name="basin_events")
|
|
201
|
+
features = sorted(internal_data.keys())
|
|
202
|
+
bdat["format"] = "h5dataset"
|
|
203
|
+
bdat["paths"] = ["basin_events"]
|
|
204
|
+
bdat["type"] = "internal"
|
|
205
|
+
else:
|
|
206
|
+
bdat["format"] = "hdf5"
|
|
207
|
+
bdat["paths"] = [str(pp) for pp in paths]
|
|
208
|
+
bdat["type"] = "file"
|
|
209
|
+
|
|
143
210
|
# Explicit features stored in basin file
|
|
144
211
|
if features is not None and len(features):
|
|
145
212
|
bdat["features"] = features
|
|
@@ -189,7 +256,7 @@ class HDF5Writer:
|
|
|
189
256
|
chunks=True,
|
|
190
257
|
**self.ds_kwds)
|
|
191
258
|
|
|
192
|
-
def store_feature_chunk(self, feat, data):
|
|
259
|
+
def store_feature_chunk(self, feat, data, group_name="events"):
|
|
193
260
|
"""Store feature data
|
|
194
261
|
|
|
195
262
|
The "chunk" implies that always chunks of data are stored,
|
|
@@ -199,7 +266,8 @@ class HDF5Writer:
|
|
|
199
266
|
data = 255 * np.array(data, dtype=np.uint8)
|
|
200
267
|
ds, offset = self.require_feature(feat=feat,
|
|
201
268
|
item_shape=data.shape[1:],
|
|
202
|
-
feat_dtype=data.dtype
|
|
269
|
+
feat_dtype=data.dtype,
|
|
270
|
+
group_name=group_name)
|
|
203
271
|
dsize = data.shape[0]
|
|
204
272
|
ds.resize(offset + dsize, axis=0)
|
|
205
273
|
ds[offset:offset + dsize] = data
|
|
@@ -285,14 +353,17 @@ def create_with_basins(
|
|
|
285
353
|
# copy metadata
|
|
286
354
|
with h5py.File(prep, libver="latest") as h5:
|
|
287
355
|
copy_metadata(h5_src=h5, h5_dst=hw.h5)
|
|
356
|
+
copy_basins(h5_src=h5, h5_dst=hw.h5)
|
|
288
357
|
# extract features
|
|
289
358
|
features = sorted(h5["events"].keys())
|
|
359
|
+
features = [f for f in features if
|
|
360
|
+
not f.startswith("basinmap")]
|
|
290
361
|
name = prep.name
|
|
291
362
|
else:
|
|
292
363
|
features = None
|
|
293
364
|
name = bps[0]
|
|
294
365
|
|
|
295
|
-
#
|
|
366
|
+
# Write the basin data
|
|
296
367
|
hw.store_basin(name=name,
|
|
297
368
|
paths=bps,
|
|
298
369
|
features=features,
|
|
@@ -300,6 +371,44 @@ def create_with_basins(
|
|
|
300
371
|
)
|
|
301
372
|
|
|
302
373
|
|
|
374
|
+
def copy_basins(h5_src: h5py.File,
|
|
375
|
+
h5_dst: h5py.File,
|
|
376
|
+
internal_basins: bool = True
|
|
377
|
+
):
|
|
378
|
+
"""Reassemble basin data in the output file
|
|
379
|
+
|
|
380
|
+
This does not just copy the datasets defined in the "basins"
|
|
381
|
+
group, but it also loads the "basinmap?" features and stores
|
|
382
|
+
them as new "basinmap?" features in the output file.
|
|
383
|
+
"""
|
|
384
|
+
basins = HDF5Data.extract_basin_dicts(h5_src, check=False)
|
|
385
|
+
hw = HDF5Writer(h5_dst)
|
|
386
|
+
for bn_dict in basins:
|
|
387
|
+
if bn_dict["type"] == "internal" and internal_basins:
|
|
388
|
+
internal_data = {}
|
|
389
|
+
for feat in bn_dict["features"]:
|
|
390
|
+
internal_data[feat] = h5_src["basin_events"][feat]
|
|
391
|
+
hw.store_basin(name=bn_dict["name"],
|
|
392
|
+
description=bn_dict["description"],
|
|
393
|
+
mapping=h5_src["events"][bn_dict["mapping"]][:],
|
|
394
|
+
internal_data=internal_data,
|
|
395
|
+
)
|
|
396
|
+
elif bn_dict["type"] == "file":
|
|
397
|
+
if bn_dict.get("mapping") is not None:
|
|
398
|
+
mapping = h5_src["events"][bn_dict["mapping"]][:]
|
|
399
|
+
else:
|
|
400
|
+
mapping = None
|
|
401
|
+
hw.store_basin(name=bn_dict["name"],
|
|
402
|
+
description=bn_dict["description"],
|
|
403
|
+
paths=bn_dict["paths"],
|
|
404
|
+
features=bn_dict["features"],
|
|
405
|
+
mapping=mapping,
|
|
406
|
+
)
|
|
407
|
+
else:
|
|
408
|
+
warnings.warn(f"Ignored basin of type '{bn_dict['type']}'",
|
|
409
|
+
IgnoringBasinTypeWarning)
|
|
410
|
+
|
|
411
|
+
|
|
303
412
|
def copy_features(h5_src: h5py.File,
|
|
304
413
|
h5_dst: h5py.File,
|
|
305
414
|
features: List[str],
|
|
@@ -323,8 +432,6 @@ def copy_features(h5_src: h5py.File,
|
|
|
323
432
|
"""
|
|
324
433
|
ei = h5_src["events"]
|
|
325
434
|
eo = h5_dst.require_group("events")
|
|
326
|
-
# This is the size of the output dataset
|
|
327
|
-
size = h5_dst.attrs["experiment:event count"]
|
|
328
435
|
hw = HDF5Writer(h5_dst)
|
|
329
436
|
for feat in features:
|
|
330
437
|
if feat in eo:
|
|
@@ -341,26 +448,34 @@ def copy_features(h5_src: h5py.File,
|
|
|
341
448
|
dst_name=feat.encode(),
|
|
342
449
|
)
|
|
343
450
|
else:
|
|
344
|
-
#
|
|
345
|
-
#
|
|
451
|
+
# We have to perform mapping.
|
|
452
|
+
# Since h5py is very slow at indexing with arrays,
|
|
453
|
+
# we instead read the data in chunks from the input file,
|
|
454
|
+
# and perform the mapping afterward using the numpy arrays.
|
|
346
455
|
dsi = ei[feat]
|
|
347
456
|
chunk_size = hw.get_best_nd_chunks(dsi[0].shape, dsi.dtype)[0]
|
|
457
|
+
size_in = dsi.shape[0]
|
|
348
458
|
start = 0
|
|
349
|
-
while start <
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
459
|
+
while start < size_in:
|
|
460
|
+
# Get a big chunk of data
|
|
461
|
+
big_chunk = 10 * chunk_size
|
|
462
|
+
stop = start + big_chunk
|
|
463
|
+
data_in = dsi[start:stop]
|
|
464
|
+
# Determine the indices that we need from that chunk.
|
|
465
|
+
mapping_idx = (start <= mapping) * (mapping < stop)
|
|
466
|
+
mapping_chunk = mapping[mapping_idx] - start
|
|
467
|
+
data = data_in[mapping_chunk]
|
|
468
|
+
# Note that HDF5 does its own caching, properly handling
|
|
469
|
+
# partial chunk writes.
|
|
355
470
|
hw.store_feature_chunk(feat, data)
|
|
356
471
|
# increment start
|
|
357
|
-
start
|
|
472
|
+
start = stop
|
|
358
473
|
|
|
359
474
|
|
|
360
475
|
def copy_metadata(h5_src: h5py.File,
|
|
361
|
-
h5_dst: h5py.File
|
|
362
|
-
|
|
363
|
-
"""Copy attributes, tables,
|
|
476
|
+
h5_dst: h5py.File
|
|
477
|
+
):
|
|
478
|
+
"""Copy attributes, tables, and logs from one H5File to another
|
|
364
479
|
|
|
365
480
|
Notes
|
|
366
481
|
-----
|
|
@@ -374,8 +489,6 @@ def copy_metadata(h5_src: h5py.File,
|
|
|
374
489
|
for kk in src_attrs:
|
|
375
490
|
h5_dst.attrs.setdefault(kk, src_attrs[kk])
|
|
376
491
|
copy_data = ["logs", "tables"]
|
|
377
|
-
if copy_basins:
|
|
378
|
-
copy_data.append("basins")
|
|
379
492
|
# copy other metadata
|
|
380
493
|
for topic in copy_data:
|
|
381
494
|
if topic in h5_src:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dcnum
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.25.1
|
|
4
4
|
Summary: numerics toolbox for imaging deformability cytometry
|
|
5
5
|
Author: Maximilian Schlögel, Paul Müller, Raghava Alajangi
|
|
6
6
|
Maintainer-email: Paul Müller <dev@craban.de>
|
|
@@ -26,7 +26,7 @@ Requires-Dist: opencv-python-headless
|
|
|
26
26
|
Requires-Dist: scikit-image
|
|
27
27
|
Requires-Dist: scipy >=1.8.0
|
|
28
28
|
Provides-Extra: torch
|
|
29
|
-
Requires-Dist: torch >=2.
|
|
29
|
+
Requires-Dist: torch >=2.2 ; extra == 'torch'
|
|
30
30
|
|
|
31
31
|
|dcnum|
|
|
32
32
|
=======
|
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
dcnum/__init__.py,sha256=hcawIKS7utYiOyVhOAX9t7K3xYzP1b9862VV0b6qSrQ,74
|
|
2
|
-
dcnum/_version.py,sha256=
|
|
2
|
+
dcnum/_version.py,sha256=uQMGTxwBAv62qqmqUtTzCRwELoTJrxD49IfmR-dUSn8,413
|
|
3
3
|
dcnum/feat/__init__.py,sha256=jUJYWTD3VIoDNKrmryXbjHb1rGwYtK4b7VPWihYgUoo,325
|
|
4
|
-
dcnum/feat/event_extractor_manager_thread.py,sha256=
|
|
4
|
+
dcnum/feat/event_extractor_manager_thread.py,sha256=FAxSyRfaNAuBWNplxHngp5h-44s0qIP24XX_oETdfMk,7836
|
|
5
5
|
dcnum/feat/gate.py,sha256=Yhxq80JoRMmQzBxl35C8NT91c9QcmQa-EIKLuxK6WvE,7221
|
|
6
6
|
dcnum/feat/queue_event_extractor.py,sha256=0ncTQleT1sfc98zYkFuZWxU-akecfTrW6-OOU3z-d8o,15698
|
|
7
7
|
dcnum/feat/feat_background/__init__.py,sha256=OTmMuazHNaSrZb2XW4cnJ6PlgJLbKrPbaidpEixYa0A,341
|
|
8
|
-
dcnum/feat/feat_background/base.py,sha256=
|
|
8
|
+
dcnum/feat/feat_background/base.py,sha256=bQBPvztrku-8YSVk8YBUUNh7MaYcnztgyD2-dQHxpzw,8674
|
|
9
9
|
dcnum/feat/feat_background/bg_copy.py,sha256=PK8x4_Uph-_A6uszZC5uhe1gD1dSRdHnDMEsN0HSGHA,1034
|
|
10
10
|
dcnum/feat/feat_background/bg_roll_median.py,sha256=EyjstMDXFBYuJB1lN6g4Uw7tPm434X3hXQxKSqvcoJ4,13175
|
|
11
|
-
dcnum/feat/feat_background/bg_sparse_median.py,sha256=
|
|
11
|
+
dcnum/feat/feat_background/bg_sparse_median.py,sha256=wt7lvPSiZkdaerRErgezd_YDVxHA2kAqO1LMX9PuHJk,22053
|
|
12
12
|
dcnum/feat/feat_brightness/__init__.py,sha256=o6AebVlmydwNgVF5kW6ITqJyFreoKrU3Ki_3EC8If-s,155
|
|
13
13
|
dcnum/feat/feat_brightness/bright_all.py,sha256=vf8xaYBdKD24hHUXdkI0_S7nbr7m49KW6gvuWvbHDVg,4545
|
|
14
14
|
dcnum/feat/feat_brightness/common.py,sha256=JX49EszYDmnvoOKXFVV1CalEIWRmOuY5EryNbqGbdac,156
|
|
@@ -20,36 +20,37 @@ dcnum/feat/feat_texture/__init__.py,sha256=6StM9S540UVtdFFR3bHa7nfCTomeVdoo7Uy9C
|
|
|
20
20
|
dcnum/feat/feat_texture/common.py,sha256=COXHpXS-7DMouGu3WF83I76L02Sr7P9re4lxajh6g0E,439
|
|
21
21
|
dcnum/feat/feat_texture/tex_all.py,sha256=_5H3sXYRN0Uq2eUHn3XUyEHkU_tncEqbqJTC-HZcnGY,5198
|
|
22
22
|
dcnum/logic/__init__.py,sha256=7J3GrwJInNQbrLk61HRIV7X7p69TAIbMYpR34hh6u14,177
|
|
23
|
-
dcnum/logic/ctrl.py,sha256=
|
|
24
|
-
dcnum/logic/job.py,sha256=
|
|
23
|
+
dcnum/logic/ctrl.py,sha256=FwzrCou4YBPBaaRRTqJkPt6jenFev9JfnxxjUMS0B-w,39042
|
|
24
|
+
dcnum/logic/job.py,sha256=9BN2WjYqjjJuLnfNZAtQ2Nn47Glo2jVrivDodGJoqlQ,7713
|
|
25
25
|
dcnum/logic/json_encoder.py,sha256=cxMnqisbKEVf-rVcw6rK2BBAb6iz_hKFaGl81kK36lQ,571
|
|
26
26
|
dcnum/meta/__init__.py,sha256=AVqRgyKXO1orKnE305h88IBvoZ1oz6X11HN1WP5nGvg,60
|
|
27
27
|
dcnum/meta/paths.py,sha256=J_ikeHzd7gEeRgAKjuayz3x6q4h1fOiDadM-ZxhAGm4,1053
|
|
28
|
-
dcnum/meta/ppid.py,sha256=
|
|
29
|
-
dcnum/read/__init__.py,sha256=
|
|
30
|
-
dcnum/read/cache.py,sha256=
|
|
28
|
+
dcnum/meta/ppid.py,sha256=RnDkJSdV1kDznAsOhQN5WI7uC9UwSMCjyADP7yWNvkM,8478
|
|
29
|
+
dcnum/read/__init__.py,sha256=LYHyZHgiNTpjV5oEcty-7Kh5topLpHT_cFlNl-QX8gg,262
|
|
30
|
+
dcnum/read/cache.py,sha256=LNA5nnDyrw8Nj07E7XfG2GcHEoWm6vA6Qo_8N-n-sGw,6492
|
|
31
31
|
dcnum/read/const.py,sha256=GG9iyXDtEldvJYOBnhZjlimzIeBMAt4bSr2-xn2gzzc,464
|
|
32
|
-
dcnum/read/
|
|
33
|
-
dcnum/read/
|
|
32
|
+
dcnum/read/detect_flicker.py,sha256=CeUyxI6LaX_lCNvBPm_yzsiWmiNcZYqbNZCtvKPdkcU,1827
|
|
33
|
+
dcnum/read/hdf5_data.py,sha256=JVk9YWw1rPgTPxaMZsw2ehk4FJq9UqhmB1SW7yhPw50,25867
|
|
34
|
+
dcnum/read/mapped.py,sha256=zU2fYdZfLNHn0rKHxDzBhNFMu4--WWa8nSeE2likyZA,3637
|
|
34
35
|
dcnum/segm/__init__.py,sha256=9cLEAd3JWE8IGqDHV-eSDIYOGBfOepd8OcebtNs8Omk,309
|
|
35
36
|
dcnum/segm/segm_thresh.py,sha256=iVhvIhzO0Gw0t3rXOgH71rOI0CNjJJQq4Gg6BulUhK8,948
|
|
36
37
|
dcnum/segm/segmenter.py,sha256=FWLFDBR-x_85ku2rObA2F-QBrM4IUaUL-YHChLagVvM,14902
|
|
37
38
|
dcnum/segm/segmenter_manager_thread.py,sha256=frM0sMxC7f7TQiFjmpRxuwG2kUBFpW1inV8dtpADHiI,5924
|
|
38
39
|
dcnum/segm/segmenter_mpo.py,sha256=o6mQlITHgEWvQt9v6oCWwAcZUvxE7MOeLE9DFManzpY,13757
|
|
39
40
|
dcnum/segm/segmenter_sto.py,sha256=e6MtN_RWusA0wTExV-FLGpDXNJs1CbSyXcSdWUPBMvM,3959
|
|
40
|
-
dcnum/segm/segm_torch/__init__.py,sha256=
|
|
41
|
+
dcnum/segm/segm_torch/__init__.py,sha256=DtUqJTbj7ybrTbXlwHq1Y4SCzi22rMW9Cus6wX-iU-A,822
|
|
41
42
|
dcnum/segm/segm_torch/segm_torch_base.py,sha256=G9AhVyD6LkAmk0tkbYnJUSpvcj3_HYf0uqfILZQsyus,4479
|
|
42
|
-
dcnum/segm/segm_torch/segm_torch_mpo.py,sha256=
|
|
43
|
+
dcnum/segm/segm_torch/segm_torch_mpo.py,sha256=GOva6o-6_SppxWD4BeBB3ap1TR-6rIYHavtfIstaYvc,2643
|
|
43
44
|
dcnum/segm/segm_torch/segm_torch_sto.py,sha256=PTOJrP_FkaxZZul8lM4VA2HL3KyxrheDDWWdJbmJdiw,3393
|
|
44
45
|
dcnum/segm/segm_torch/torch_model.py,sha256=5aL6SwSvg1N2gATEGBhP3aA4WTHlvGzQVYuizmh0LrU,3187
|
|
45
46
|
dcnum/segm/segm_torch/torch_postproc.py,sha256=ctirQTmsZnuZGIxkwFWN9arRneHRYJUxaJ_ZyCgjByM,3311
|
|
46
47
|
dcnum/segm/segm_torch/torch_preproc.py,sha256=kjabu76paw23kO7RP7Ik6IY60Kk1VBAHKBAedflA0aQ,4002
|
|
47
|
-
dcnum/write/__init__.py,sha256=
|
|
48
|
+
dcnum/write/__init__.py,sha256=sK79IlvCFIqf2oFABVeyYedMnHOsEIQpxAauEeNO-Tw,273
|
|
48
49
|
dcnum/write/deque_writer_thread.py,sha256=ao7F1yrVKyufgC4rC0Y2_Vt7snuT6KpI7W2qVxcjdhk,1994
|
|
49
|
-
dcnum/write/queue_collector_thread.py,sha256=
|
|
50
|
-
dcnum/write/writer.py,sha256=
|
|
51
|
-
dcnum-0.
|
|
52
|
-
dcnum-0.
|
|
53
|
-
dcnum-0.
|
|
54
|
-
dcnum-0.
|
|
55
|
-
dcnum-0.
|
|
50
|
+
dcnum/write/queue_collector_thread.py,sha256=KwwNIDFEF2DU83woKES5K05MxxOhDxPMZLLeyPugfDo,11542
|
|
51
|
+
dcnum/write/writer.py,sha256=oHlq4bDHQxb33-3Fw1xnzJwACecLyH-6koGK8SN0cSk,20528
|
|
52
|
+
dcnum-0.25.1.dist-info/LICENSE,sha256=YRChA1C8A2E-amJbudwMcbTCZy_HzmeY0hMIvduh1MM,1089
|
|
53
|
+
dcnum-0.25.1.dist-info/METADATA,sha256=XtFOojvzsJHhAz_uxJW0vxg3mg3SYPB39tlKgLPGnbY,2280
|
|
54
|
+
dcnum-0.25.1.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
|
|
55
|
+
dcnum-0.25.1.dist-info/top_level.txt,sha256=Hmh38rgG_MFTVDpUDGuO2HWTSq80P585Het4COQzFTg,6
|
|
56
|
+
dcnum-0.25.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|