dcnum 0.23.1__py3-none-any.whl → 0.23.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dcnum might be problematic. Click here for more details.
- dcnum/_version.py +2 -2
- dcnum/feat/event_extractor_manager_thread.py +6 -5
- dcnum/logic/ctrl.py +28 -3
- dcnum/logic/job.py +22 -0
- dcnum/read/hdf5_data.py +3 -1
- dcnum/segm/segm_torch/__init__.py +8 -4
- dcnum/write/writer.py +24 -12
- {dcnum-0.23.1.dist-info → dcnum-0.23.3.dist-info}/METADATA +2 -2
- {dcnum-0.23.1.dist-info → dcnum-0.23.3.dist-info}/RECORD +12 -12
- {dcnum-0.23.1.dist-info → dcnum-0.23.3.dist-info}/LICENSE +0 -0
- {dcnum-0.23.1.dist-info → dcnum-0.23.3.dist-info}/WHEEL +0 -0
- {dcnum-0.23.1.dist-info → dcnum-0.23.3.dist-info}/top_level.txt +0 -0
dcnum/_version.py
CHANGED
|
@@ -96,12 +96,13 @@ class EventExtractorManagerThread(threading.Thread):
|
|
|
96
96
|
# If the writer_dq starts filling up, then this could lead to
|
|
97
97
|
# an oom-kill signal. Stall for the writer to prevent this.
|
|
98
98
|
if (ldq := len(self.writer_dq)) > 1000:
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
99
|
+
stalled_sec = 0.
|
|
100
|
+
for ii in range(60):
|
|
101
|
+
if len(self.writer_dq) > 200:
|
|
102
|
+
time.sleep(.5)
|
|
103
|
+
stalled_sec += .5
|
|
103
104
|
self.logger.warning(
|
|
104
|
-
f"Stalled {
|
|
105
|
+
f"Stalled {stalled_sec:.1f}s due to slow writer "
|
|
105
106
|
f"({ldq} chunks queued)")
|
|
106
107
|
|
|
107
108
|
unavailable_slots = 0
|
dcnum/logic/ctrl.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import collections
|
|
2
2
|
import datetime
|
|
3
3
|
import hashlib
|
|
4
|
+
import importlib
|
|
4
5
|
import json
|
|
5
6
|
import logging
|
|
6
7
|
from logging.handlers import QueueListener
|
|
@@ -33,6 +34,7 @@ from ..write import (
|
|
|
33
34
|
from .job import DCNumPipelineJob
|
|
34
35
|
from .json_encoder import ExtendedJSONEncoder
|
|
35
36
|
|
|
37
|
+
|
|
36
38
|
# Force using "spawn" method for multiprocessing, because we are using
|
|
37
39
|
# queues and threads and would end up with race conditions otherwise.
|
|
38
40
|
mp_spawn = mp.get_context("spawn")
|
|
@@ -430,6 +432,16 @@ class DCNumJobRunner(threading.Thread):
|
|
|
430
432
|
"build": ", ".join(platform.python_build()),
|
|
431
433
|
"implementation":
|
|
432
434
|
platform.python_implementation(),
|
|
435
|
+
"libraries": get_library_versions_dict([
|
|
436
|
+
"cv2",
|
|
437
|
+
"h5py",
|
|
438
|
+
"mahotas",
|
|
439
|
+
"numba",
|
|
440
|
+
"numpy",
|
|
441
|
+
"scipy",
|
|
442
|
+
"skimage",
|
|
443
|
+
"torch",
|
|
444
|
+
]),
|
|
433
445
|
"version": platform.python_version(),
|
|
434
446
|
},
|
|
435
447
|
"system": {
|
|
@@ -551,8 +563,8 @@ class DCNumJobRunner(threading.Thread):
|
|
|
551
563
|
# 3. image features from the input file
|
|
552
564
|
[self.draw.h5, ["image", "image_bg", "bg_off"], "optional"],
|
|
553
565
|
]
|
|
554
|
-
with
|
|
555
|
-
|
|
566
|
+
with HDF5Writer(self.path_temp_out) as hw:
|
|
567
|
+
hout = hw.h5
|
|
556
568
|
# First, we have to determine the basin mapping from input to
|
|
557
569
|
# output. This information is stored by the QueueCollectorThread
|
|
558
570
|
# in the "basinmap0" feature, ready to be used by us.
|
|
@@ -565,7 +577,7 @@ class DCNumJobRunner(threading.Thread):
|
|
|
565
577
|
# mapping of the input file was set to slice(1, 100), then the
|
|
566
578
|
# first image would not be there, and we would have
|
|
567
579
|
# [1, 1, 1, ...].
|
|
568
|
-
idx_um = hout["events/index_unmapped"]
|
|
580
|
+
idx_um = hout["events/index_unmapped"][:]
|
|
569
581
|
|
|
570
582
|
# If we want to convert this to an actual basinmap feature,
|
|
571
583
|
# then we have to convert those indices to indices that map
|
|
@@ -780,6 +792,19 @@ class DCNumJobRunner(threading.Thread):
|
|
|
780
792
|
self.logger.info("Finished segmentation and feature extraction")
|
|
781
793
|
|
|
782
794
|
|
|
795
|
+
def get_library_versions_dict(library_name_list):
|
|
796
|
+
version_dict = {}
|
|
797
|
+
for library_name in library_name_list:
|
|
798
|
+
try:
|
|
799
|
+
lib = importlib.import_module(library_name)
|
|
800
|
+
except BaseException:
|
|
801
|
+
version = None
|
|
802
|
+
else:
|
|
803
|
+
version = lib.__version__
|
|
804
|
+
version_dict[library_name] = version
|
|
805
|
+
return version_dict
|
|
806
|
+
|
|
807
|
+
|
|
783
808
|
def join_thread_helper(thr, timeout, retries, logger, name):
|
|
784
809
|
for _ in range(retries):
|
|
785
810
|
thr.join(timeout=timeout)
|
dcnum/logic/job.py
CHANGED
|
@@ -182,3 +182,25 @@ class DCNumPipelineJob:
|
|
|
182
182
|
if len(ret) == 1:
|
|
183
183
|
ret = ret[0]
|
|
184
184
|
return ret
|
|
185
|
+
|
|
186
|
+
def validate(self):
|
|
187
|
+
"""Make sure the pipeline will run given the job kwargs
|
|
188
|
+
|
|
189
|
+
Returns
|
|
190
|
+
-------
|
|
191
|
+
True:
|
|
192
|
+
for testing convenience
|
|
193
|
+
|
|
194
|
+
Raises
|
|
195
|
+
------
|
|
196
|
+
dcnum.segm.SegmenterNotApplicableError:
|
|
197
|
+
the segmenter is incompatible with the input path
|
|
198
|
+
"""
|
|
199
|
+
# Check segmenter applicability applicability
|
|
200
|
+
seg_cls = get_available_segmenters()[self.kwargs["segmenter_code"]]
|
|
201
|
+
with HDF5Data(self.kwargs["path_in"]) as hd:
|
|
202
|
+
seg_cls.validate_applicability(
|
|
203
|
+
segmenter_kwargs=self.kwargs["segmenter_kwargs"],
|
|
204
|
+
logs=hd.logs,
|
|
205
|
+
meta=hd.meta)
|
|
206
|
+
return True
|
dcnum/read/hdf5_data.py
CHANGED
|
@@ -205,7 +205,9 @@ class HDF5Data:
|
|
|
205
205
|
bn_data = "\n".join(
|
|
206
206
|
[s.decode() for s in h5["basins"][bnkey][:].tolist()])
|
|
207
207
|
bn_dict = json.loads(bn_data)
|
|
208
|
-
|
|
208
|
+
if bn_dict["type"] == "file":
|
|
209
|
+
# we only support file-based basins
|
|
210
|
+
basins.append(bn_dict)
|
|
209
211
|
self.basins = sorted(basins, key=lambda x: x["name"])
|
|
210
212
|
|
|
211
213
|
if state["pixel_size"] is not None:
|
|
@@ -1,16 +1,20 @@
|
|
|
1
1
|
import importlib
|
|
2
|
+
import warnings
|
|
2
3
|
|
|
3
4
|
try:
|
|
4
5
|
torch = importlib.import_module("torch")
|
|
5
6
|
req_maj = 2
|
|
6
|
-
req_min =
|
|
7
|
+
req_min = 2
|
|
7
8
|
ver_tuple = torch.__version__.split(".")
|
|
8
9
|
act_maj = int(ver_tuple[0])
|
|
9
10
|
act_min = int(ver_tuple[1])
|
|
10
11
|
if act_maj < req_maj or (act_maj == req_maj and act_min < req_min):
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
12
|
+
warnings.warn(f"Your PyTorch version {act_maj}.{act_min} is "
|
|
13
|
+
f"not supported, please update to at least "
|
|
14
|
+
f"{req_maj}.{req_min} to use dcnum's PyTorch"
|
|
15
|
+
f"segmenters")
|
|
16
|
+
raise ImportError(
|
|
17
|
+
f"Could not find PyTorch {req_maj}.{req_min}")
|
|
14
18
|
except ImportError:
|
|
15
19
|
pass
|
|
16
20
|
else:
|
dcnum/write/writer.py
CHANGED
|
@@ -48,7 +48,13 @@ class HDF5Writer:
|
|
|
48
48
|
self.h5 = obj
|
|
49
49
|
self.h5_owned = False
|
|
50
50
|
else:
|
|
51
|
-
self.h5 = h5py.File(obj,
|
|
51
|
+
self.h5 = h5py.File(obj,
|
|
52
|
+
mode=mode,
|
|
53
|
+
libver="latest",
|
|
54
|
+
# Set chunk cache size to 3 MiB for each
|
|
55
|
+
# dataset to allow partial writes.
|
|
56
|
+
rdcc_nbytes=3145728,
|
|
57
|
+
)
|
|
52
58
|
self.h5_owned = True
|
|
53
59
|
self.events = self.h5.require_group("events")
|
|
54
60
|
ds_kwds = set_default_filter_kwargs(ds_kwds)
|
|
@@ -323,8 +329,6 @@ def copy_features(h5_src: h5py.File,
|
|
|
323
329
|
"""
|
|
324
330
|
ei = h5_src["events"]
|
|
325
331
|
eo = h5_dst.require_group("events")
|
|
326
|
-
# This is the size of the output dataset
|
|
327
|
-
size = h5_dst.attrs["experiment:event count"]
|
|
328
332
|
hw = HDF5Writer(h5_dst)
|
|
329
333
|
for feat in features:
|
|
330
334
|
if feat in eo:
|
|
@@ -341,20 +345,28 @@ def copy_features(h5_src: h5py.File,
|
|
|
341
345
|
dst_name=feat.encode(),
|
|
342
346
|
)
|
|
343
347
|
else:
|
|
344
|
-
#
|
|
345
|
-
#
|
|
348
|
+
# We have to perform mapping.
|
|
349
|
+
# Since h5py is very slow at indexing with arrays,
|
|
350
|
+
# we instead read the data in chunks from the input file,
|
|
351
|
+
# and perform the mapping afterward using the numpy arrays.
|
|
346
352
|
dsi = ei[feat]
|
|
347
353
|
chunk_size = hw.get_best_nd_chunks(dsi[0].shape, dsi.dtype)[0]
|
|
354
|
+
size_in = dsi.shape[0]
|
|
348
355
|
start = 0
|
|
349
|
-
while start <
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
356
|
+
while start < size_in:
|
|
357
|
+
# Get a big chunk of data
|
|
358
|
+
big_chunk = 10 * chunk_size
|
|
359
|
+
stop = start + big_chunk
|
|
360
|
+
data_in = dsi[start:stop]
|
|
361
|
+
# Determine the indices that we need from that chunk.
|
|
362
|
+
mapping_idx = (start <= mapping) * (mapping < stop)
|
|
363
|
+
mapping_chunk = mapping[mapping_idx] - start
|
|
364
|
+
data = data_in[mapping_chunk]
|
|
365
|
+
# Note that HDF5 does its own caching, properly handling
|
|
366
|
+
# partial chunk writes.
|
|
355
367
|
hw.store_feature_chunk(feat, data)
|
|
356
368
|
# increment start
|
|
357
|
-
start
|
|
369
|
+
start = stop
|
|
358
370
|
|
|
359
371
|
|
|
360
372
|
def copy_metadata(h5_src: h5py.File,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dcnum
|
|
3
|
-
Version: 0.23.
|
|
3
|
+
Version: 0.23.3
|
|
4
4
|
Summary: numerics toolbox for imaging deformability cytometry
|
|
5
5
|
Author: Maximilian Schlögel, Paul Müller, Raghava Alajangi
|
|
6
6
|
Maintainer-email: Paul Müller <dev@craban.de>
|
|
@@ -26,7 +26,7 @@ Requires-Dist: opencv-python-headless
|
|
|
26
26
|
Requires-Dist: scikit-image
|
|
27
27
|
Requires-Dist: scipy >=1.8.0
|
|
28
28
|
Provides-Extra: torch
|
|
29
|
-
Requires-Dist: torch >=2.
|
|
29
|
+
Requires-Dist: torch >=2.2 ; extra == 'torch'
|
|
30
30
|
|
|
31
31
|
|dcnum|
|
|
32
32
|
=======
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
dcnum/__init__.py,sha256=hcawIKS7utYiOyVhOAX9t7K3xYzP1b9862VV0b6qSrQ,74
|
|
2
|
-
dcnum/_version.py,sha256=
|
|
2
|
+
dcnum/_version.py,sha256=9qJUsi4XZXN5b8lZ0o4HLiLegFWN6GNFX7DcJU4ehOE,413
|
|
3
3
|
dcnum/feat/__init__.py,sha256=jUJYWTD3VIoDNKrmryXbjHb1rGwYtK4b7VPWihYgUoo,325
|
|
4
|
-
dcnum/feat/event_extractor_manager_thread.py,sha256=
|
|
4
|
+
dcnum/feat/event_extractor_manager_thread.py,sha256=FAxSyRfaNAuBWNplxHngp5h-44s0qIP24XX_oETdfMk,7836
|
|
5
5
|
dcnum/feat/gate.py,sha256=Yhxq80JoRMmQzBxl35C8NT91c9QcmQa-EIKLuxK6WvE,7221
|
|
6
6
|
dcnum/feat/queue_event_extractor.py,sha256=0ncTQleT1sfc98zYkFuZWxU-akecfTrW6-OOU3z-d8o,15698
|
|
7
7
|
dcnum/feat/feat_background/__init__.py,sha256=OTmMuazHNaSrZb2XW4cnJ6PlgJLbKrPbaidpEixYa0A,341
|
|
@@ -20,8 +20,8 @@ dcnum/feat/feat_texture/__init__.py,sha256=6StM9S540UVtdFFR3bHa7nfCTomeVdoo7Uy9C
|
|
|
20
20
|
dcnum/feat/feat_texture/common.py,sha256=COXHpXS-7DMouGu3WF83I76L02Sr7P9re4lxajh6g0E,439
|
|
21
21
|
dcnum/feat/feat_texture/tex_all.py,sha256=_5H3sXYRN0Uq2eUHn3XUyEHkU_tncEqbqJTC-HZcnGY,5198
|
|
22
22
|
dcnum/logic/__init__.py,sha256=7J3GrwJInNQbrLk61HRIV7X7p69TAIbMYpR34hh6u14,177
|
|
23
|
-
dcnum/logic/ctrl.py,sha256=
|
|
24
|
-
dcnum/logic/job.py,sha256=
|
|
23
|
+
dcnum/logic/ctrl.py,sha256=sgn1gxctsiPjzD4hFQpzcPGeHP08yjMhWuwG0ocijeY,36182
|
|
24
|
+
dcnum/logic/job.py,sha256=9BN2WjYqjjJuLnfNZAtQ2Nn47Glo2jVrivDodGJoqlQ,7713
|
|
25
25
|
dcnum/logic/json_encoder.py,sha256=cxMnqisbKEVf-rVcw6rK2BBAb6iz_hKFaGl81kK36lQ,571
|
|
26
26
|
dcnum/meta/__init__.py,sha256=AVqRgyKXO1orKnE305h88IBvoZ1oz6X11HN1WP5nGvg,60
|
|
27
27
|
dcnum/meta/paths.py,sha256=J_ikeHzd7gEeRgAKjuayz3x6q4h1fOiDadM-ZxhAGm4,1053
|
|
@@ -29,7 +29,7 @@ dcnum/meta/ppid.py,sha256=OD79NrZ8waC3julwdH8NjneUuXqSRSHsUDpKzT5pdyU,8432
|
|
|
29
29
|
dcnum/read/__init__.py,sha256=ksLdV8EkOU3EPje8teCOSehcUeGAZfg9TQ5ltuEUgls,216
|
|
30
30
|
dcnum/read/cache.py,sha256=lisrGG7AyvVitf0h92wh5FvYCsxa0pWyGcAyYwGP-LQ,6471
|
|
31
31
|
dcnum/read/const.py,sha256=GG9iyXDtEldvJYOBnhZjlimzIeBMAt4bSr2-xn2gzzc,464
|
|
32
|
-
dcnum/read/hdf5_data.py,sha256=
|
|
32
|
+
dcnum/read/hdf5_data.py,sha256=g1kcq9nK2eA063qlAWyvYq1UuRCxdCmWqjK8VTxE8Hk,23603
|
|
33
33
|
dcnum/read/mapped.py,sha256=UryArlrIsHxjOyimBL2Nooi3r73zuGtnGdqdxa6PK_g,3076
|
|
34
34
|
dcnum/segm/__init__.py,sha256=9cLEAd3JWE8IGqDHV-eSDIYOGBfOepd8OcebtNs8Omk,309
|
|
35
35
|
dcnum/segm/segm_thresh.py,sha256=iVhvIhzO0Gw0t3rXOgH71rOI0CNjJJQq4Gg6BulUhK8,948
|
|
@@ -37,7 +37,7 @@ dcnum/segm/segmenter.py,sha256=FWLFDBR-x_85ku2rObA2F-QBrM4IUaUL-YHChLagVvM,14902
|
|
|
37
37
|
dcnum/segm/segmenter_manager_thread.py,sha256=frM0sMxC7f7TQiFjmpRxuwG2kUBFpW1inV8dtpADHiI,5924
|
|
38
38
|
dcnum/segm/segmenter_mpo.py,sha256=o6mQlITHgEWvQt9v6oCWwAcZUvxE7MOeLE9DFManzpY,13757
|
|
39
39
|
dcnum/segm/segmenter_sto.py,sha256=e6MtN_RWusA0wTExV-FLGpDXNJs1CbSyXcSdWUPBMvM,3959
|
|
40
|
-
dcnum/segm/segm_torch/__init__.py,sha256=
|
|
40
|
+
dcnum/segm/segm_torch/__init__.py,sha256=DtUqJTbj7ybrTbXlwHq1Y4SCzi22rMW9Cus6wX-iU-A,822
|
|
41
41
|
dcnum/segm/segm_torch/segm_torch_base.py,sha256=G9AhVyD6LkAmk0tkbYnJUSpvcj3_HYf0uqfILZQsyus,4479
|
|
42
42
|
dcnum/segm/segm_torch/segm_torch_mpo.py,sha256=N01dVXai_4eIGfHJrPjg5C2Bkyq1TOeXeJhw3YbGidw,2504
|
|
43
43
|
dcnum/segm/segm_torch/segm_torch_sto.py,sha256=PTOJrP_FkaxZZul8lM4VA2HL3KyxrheDDWWdJbmJdiw,3393
|
|
@@ -47,9 +47,9 @@ dcnum/segm/segm_torch/torch_preproc.py,sha256=kjabu76paw23kO7RP7Ik6IY60Kk1VBAHKB
|
|
|
47
47
|
dcnum/write/__init__.py,sha256=QvWHeZmjHI18i-YlGYuzN3i7dVWY9UCReKchrJ-gif0,260
|
|
48
48
|
dcnum/write/deque_writer_thread.py,sha256=ao7F1yrVKyufgC4rC0Y2_Vt7snuT6KpI7W2qVxcjdhk,1994
|
|
49
49
|
dcnum/write/queue_collector_thread.py,sha256=d_WfdsZdFnFsiAY0zVMwUlA4juIMeiWYmE_-rezBQCE,11734
|
|
50
|
-
dcnum/write/writer.py,sha256=
|
|
51
|
-
dcnum-0.23.
|
|
52
|
-
dcnum-0.23.
|
|
53
|
-
dcnum-0.23.
|
|
54
|
-
dcnum-0.23.
|
|
55
|
-
dcnum-0.23.
|
|
50
|
+
dcnum/write/writer.py,sha256=H0XoX6lOi1D1sa-ea5SLVftIQY-gdSIVq4vihYv0ODo,16200
|
|
51
|
+
dcnum-0.23.3.dist-info/LICENSE,sha256=YRChA1C8A2E-amJbudwMcbTCZy_HzmeY0hMIvduh1MM,1089
|
|
52
|
+
dcnum-0.23.3.dist-info/METADATA,sha256=bqOjeHyxCqI4R5leSsFWtw-Ff0ywxQtsA_GuGB1hVNE,2280
|
|
53
|
+
dcnum-0.23.3.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
54
|
+
dcnum-0.23.3.dist-info/top_level.txt,sha256=Hmh38rgG_MFTVDpUDGuO2HWTSq80P585Het4COQzFTg,6
|
|
55
|
+
dcnum-0.23.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|