dclab 0.62.17__cp39-cp39-musllinux_1_2_x86_64.whl → 0.64.0__cp39-cp39-musllinux_1_2_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dclab might be problematic. Click here for more details.

dclab/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.62.17'
21
- __version_tuple__ = version_tuple = (0, 62, 17)
20
+ __version__ = version = '0.64.0'
21
+ __version_tuple__ = version_tuple = (0, 64, 0)
dclab/http_utils.py CHANGED
@@ -254,7 +254,7 @@ class ResoluteRequestsSession(requests.Session):
254
254
  break
255
255
  else:
256
256
  raise requests.exceptions.ReadTimeout(
257
- f"Resolut sesion failed for {args} and {kwargs}!")
257
+ f"Resolute session failed for {args} and {kwargs}!")
258
258
  return resp
259
259
 
260
260
 
dclab/kde/base.py CHANGED
@@ -2,7 +2,12 @@ import warnings
2
2
 
3
3
  import numpy as np
4
4
 
5
- from .methods import bin_width_doane, get_bad_vals, methods
5
+ from .methods import bin_width_doane_div5, get_bad_vals, methods
6
+ from .contours import find_contours_level, get_quantile_levels
7
+
8
+
9
+ class ContourSpacingTooLarge(UserWarning):
10
+ pass
6
11
 
7
12
 
8
13
  class KernelDensityEstimator:
@@ -107,6 +112,139 @@ class KernelDensityEstimator:
107
112
  yscale: str
108
113
  See `xscale`.
109
114
 
115
+ Returns
116
+ -------
117
+ X, Y, Z : coordinates
118
+ The kernel density Z evaluated on a rectangular grid (X,Y).
119
+ """
120
+ warnings.warn("`get_contour` is deprecated; please use "
121
+ "`get_raster` instead", DeprecationWarning)
122
+ return self.get_raster(
123
+ xax=xax, yax=yax, xacc=xacc, yacc=yacc,
124
+ kde_type=kde_type, kde_kwargs=kde_kwargs,
125
+ xscale=xscale, yscale=yscale
126
+ )
127
+
128
+ def get_contour_lines(self, quantiles=None, xax="area_um", yax="deform",
129
+ xacc=None, yacc=None, kde_type="histogram",
130
+ kde_kwargs=None, xscale="linear", yscale="linear",
131
+ ret_levels=False):
132
+ """Compute contour lines for a given kernel kensity estimate.
133
+
134
+ Parameters
135
+ ----------
136
+ quantiles: list or array of floats
137
+ KDE Quantiles for which contour levels are computed. The
138
+ values must be between 0 and 1. If set to None, use
139
+ [0.5, 0.95] as default.
140
+ xax: str
141
+ Identifier for X axis (e.g. "area_um", "aspect", "deform")
142
+ yax: str
143
+ Identifier for Y axis
144
+ xacc: float
145
+ Contour accuracy in x direction
146
+ if set to None, will use :func:`bin_width_doane_div5`
147
+ yacc: float
148
+ Contour accuracy in y direction
149
+ if set to None, will use :func:`bin_width_doane_div5`
150
+ kde_type: str
151
+ The KDE method to use
152
+ kde_kwargs: dict
153
+ Additional keyword arguments to the KDE method
154
+ xscale: str
155
+ If set to "log", take the logarithm of the x-values before
156
+ computing the KDE. This is useful when data are
157
+ displayed on a log-scale. Defaults to "linear".
158
+ yscale: str
159
+ See `xscale`
160
+ ret_levels: bool
161
+ If set to True, return the levels of the contours
162
+ (default: False)
163
+
164
+ Returns
165
+ -------
166
+ contour_lines: list of lists (of lists)
167
+ For every number in `quantiles`, this list contains a list of
168
+ corresponding contour lines. Each contour line is a 2D
169
+ array of shape (N, 2), where N is the number of points in the
170
+ contour line.
171
+ levels: list of floats
172
+ The density levels corresponding to each number in `quantiles`.
173
+ Only returned if `ret_levels` is set to True.
174
+ """
175
+ if not quantiles:
176
+ quantiles = [0.5, 0.95]
177
+ try:
178
+ x, y, density = self.get_raster(
179
+ xax=xax,
180
+ yax=yax,
181
+ xacc=xacc,
182
+ yacc=yacc,
183
+ xscale=xscale,
184
+ yscale=yscale,
185
+ kde_type=kde_type,
186
+ kde_kwargs=kde_kwargs,
187
+ )
188
+ except ValueError:
189
+ # most-likely there is nothing to compute a contour for
190
+ return []
191
+ if density.shape[0] < 3 or density.shape[1] < 3:
192
+ warnings.warn("Contour not possible; spacing may be too large!",
193
+ ContourSpacingTooLarge)
194
+ return []
195
+ levels = get_quantile_levels(
196
+ density=density,
197
+ x=x,
198
+ y=y,
199
+ xp=self.rtdc_ds[xax][self.rtdc_ds.filter.all],
200
+ yp=self.rtdc_ds[yax][self.rtdc_ds.filter.all],
201
+ q=np.array(quantiles),
202
+ normalize=False)
203
+ contours = []
204
+ # Normalize levels to [0, 1]
205
+ nlevels = np.array(levels) / density.max()
206
+ for nlev in nlevels:
207
+ # make sure that the contour levels are not at the boundaries
208
+ if not (np.allclose(nlev, 0, atol=1e-12, rtol=0)
209
+ or np.allclose(nlev, 1, atol=1e-12, rtol=0)):
210
+ cc = find_contours_level(
211
+ density, x=x, y=y, level=nlev)
212
+ contours.append(cc)
213
+ else:
214
+ contours.append([])
215
+ if ret_levels:
216
+ return contours, levels
217
+ else:
218
+ return contours
219
+
220
+ def get_raster(self, xax="area_um", yax="deform", xacc=None, yacc=None,
221
+ kde_type="histogram", kde_kwargs=None, xscale="linear",
222
+ yscale="linear"):
223
+ """Evaluate the kernel density estimate on a grid
224
+
225
+ Parameters
226
+ ----------
227
+ xax: str
228
+ Identifier for X axis (e.g. "area_um", "aspect", "deform")
229
+ yax: str
230
+ Identifier for Y axis
231
+ xacc: float
232
+ Contour accuracy in x direction
233
+ if set to None, will use :func:`bin_width_doane_div5`
234
+ yacc: float
235
+ Contour accuracy in y direction
236
+ if set to None, will use :func:`bin_width_doane_div5`
237
+ kde_type: str
238
+ The KDE method to use
239
+ kde_kwargs: dict
240
+ Additional keyword arguments to the KDE method
241
+ xscale: str
242
+ If set to "log", take the logarithm of the x-values before
243
+ computing the KDE. This is useful when data are
244
+ displayed on a log-scale. Defaults to "linear".
245
+ yscale: str
246
+ See `xscale`.
247
+
110
248
  Returns
111
249
  -------
112
250
  X, Y, Z : coordinates
@@ -128,21 +266,21 @@ class KernelDensityEstimator:
128
266
  a=x,
129
267
  feat=xax,
130
268
  scale=xscale,
131
- method=bin_width_doane,
269
+ method=bin_width_doane_div5,
132
270
  ret_scaled=True)
133
271
 
134
272
  yacc_sc, ys = self.get_spacing(
135
273
  a=y,
136
274
  feat=yax,
137
275
  scale=yscale,
138
- method=bin_width_doane,
276
+ method=bin_width_doane_div5,
139
277
  ret_scaled=True)
140
278
 
141
279
  if xacc is None or xacc == 0:
142
- xacc = xacc_sc / 5
280
+ xacc = xacc_sc
143
281
 
144
282
  if yacc is None or yacc == 0:
145
- yacc = yacc_sc / 5
283
+ yacc = yacc_sc
146
284
 
147
285
  # Ignore infs and nans
148
286
  bad = get_bad_vals(xs, ys)
dclab/kde/methods.py CHANGED
@@ -56,6 +56,16 @@ def bin_width_doane(a):
56
56
  return acc
57
57
 
58
58
 
59
+ def bin_width_doane_div5(a):
60
+ """Compute contour spacing based on Doane's formula divided by five
61
+
62
+ See Also
63
+ --------
64
+ bin_width_doane: method used to compute the bin width
65
+ """
66
+ return bin_width_doane(a) / 5
67
+
68
+
59
69
  def bin_width_percentile(a):
60
70
  """Compute contour spacing based on data percentiles
61
71
 
@@ -635,7 +635,7 @@ class RTDCBase(abc.ABC):
635
635
  The kernel density Z evaluated on a rectangular grid (X,Y).
636
636
  """
637
637
  kde_instance = KernelDensityEstimator(rtdc_ds=self)
638
- xmesh, ymesh, density = kde_instance.get_contour(
638
+ xmesh, ymesh, density = kde_instance.get_raster(
639
639
  xax=xax, yax=yax, xacc=xacc, yacc=yacc, kde_type=kde_type,
640
640
  kde_kwargs=kde_kwargs, xscale=xscale, yscale=yscale
641
641
  )
@@ -746,6 +746,8 @@ class RTDCBase(abc.ABC):
746
746
  "ignored_basins": bd_keys,
747
747
  # basin key
748
748
  "key": bdict["key"],
749
+ # whether the basin is perishable or not
750
+ "perishable": bdict.get("perishable", False),
749
751
  }
750
752
 
751
753
  # Check whether this basin is supported and exists
@@ -783,12 +785,19 @@ class RTDCBase(abc.ABC):
783
785
  b_cls = bc[bdict["format"]]
784
786
  # Try absolute path
785
787
  bna = b_cls(pp, **kwargs)
786
- if bna.verify_basin():
787
- basins.append(bna)
788
- break
788
+
789
+ try:
790
+ absolute_exists = bna.verify_basin()
791
+ except BaseException:
792
+ pass
793
+ else:
794
+ if absolute_exists:
795
+ basins.append(bna)
796
+ break
789
797
  # Try relative path
790
798
  this_path = pathlib.Path(self.path)
791
799
  if this_path.exists():
800
+
792
801
  # Insert relative path
793
802
  bnr = b_cls(this_path.parent / pp, **kwargs)
794
803
  if bnr.verify_basin():
@@ -51,6 +51,7 @@ class Export(object):
51
51
  pixel_format: str = "yuv420p",
52
52
  codec: str = "rawvideo",
53
53
  codec_options: dict[str, str] = None,
54
+ progress_callback: callable = None,
54
55
  ):
55
56
  """Exports filtered event images to a video file
56
57
 
@@ -72,6 +73,10 @@ class Export(object):
72
73
  codec_options:
73
74
  Additional arguments to give to the codec using ffmpeg,
74
75
  e.g. `{'preset': 'slow', 'crf': '0'}` for "libx264" codec.
76
+ progress_callback: callable
77
+ Function that takes at least two arguments: float between 0 and
78
+ 1 for monitoring progress and a string describing what is being
79
+ done.
75
80
 
76
81
  Notes
77
82
  -----
@@ -103,6 +108,10 @@ class Export(object):
103
108
 
104
109
  # write the filtered frames to the video file
105
110
  for evid in np.arange(len(ds)):
111
+
112
+ if progress_callback is not None and evid % 10_000 == 0:
113
+ progress_callback(evid / len(ds), "exporting video")
114
+
106
115
  # skip frames that were filtered out
107
116
  if filtered and not ds.filter.all[evid]:
108
117
  continue
@@ -116,12 +125,22 @@ class Export(object):
116
125
 
117
126
  for packet in stream.encode(av_frame):
118
127
  container.mux(packet)
128
+
129
+ if progress_callback is not None:
130
+ progress_callback(1.0, "video export complete")
131
+
119
132
  else:
120
133
  msg = "No image data to export: dataset {} !".format(ds.title)
121
134
  raise OSError(msg)
122
135
 
123
- def fcs(self, path, features, meta_data=None, filtered=True,
124
- override=False):
136
+ def fcs(self,
137
+ path: pathlib.Path | str,
138
+ features: list[str],
139
+ meta_data: dict = None,
140
+ filtered: bool = True,
141
+ override: bool = False,
142
+ progress_callback: callable = None,
143
+ ):
125
144
  """Export the data of an RT-DC dataset to an .fcs file
126
145
 
127
146
  Parameters
@@ -142,6 +161,10 @@ class Export(object):
142
161
  override: bool
143
162
  If set to `True`, an existing file ``path`` will be overridden.
144
163
  If set to `False`, raises `OSError` if ``path`` exists.
164
+ progress_callback: callable
165
+ Function that takes at least two arguments: float between 0 and
166
+ 1 for monitoring progress and a string describing what is being
167
+ done.
145
168
 
146
169
  Notes
147
170
  -----
@@ -175,12 +198,18 @@ class Export(object):
175
198
  # Collect the header
176
199
  chn_names = [dfn.get_feature_label(c, rtdc_ds=ds) for c in features]
177
200
 
201
+ if progress_callback is not None:
202
+ progress_callback(0.0, "collecting data")
203
+
178
204
  # Collect the data
179
205
  if filtered:
180
206
  data = [ds[c][ds.filter.all] for c in features]
181
207
  else:
182
208
  data = [ds[c] for c in features]
183
209
 
210
+ if progress_callback is not None:
211
+ progress_callback(0.5, "exporting data")
212
+
184
213
  data = np.array(data).transpose()
185
214
  meta_data["dclab version"] = version
186
215
  fcswrite.write_fcs(filename=str(path),
@@ -189,6 +218,9 @@ class Export(object):
189
218
  text_kw_pr=meta_data,
190
219
  )
191
220
 
221
+ if progress_callback is not None:
222
+ progress_callback(1.0, "export complete")
223
+
192
224
  def hdf5(self,
193
225
  path: str | pathlib.Path,
194
226
  features: List[str] = None,
@@ -200,7 +232,9 @@ class Export(object):
200
232
  override: bool = False,
201
233
  compression_kwargs: Dict = None,
202
234
  compression: str = "deprecated",
203
- skip_checks: bool = False):
235
+ skip_checks: bool = False,
236
+ progress_callback: callable = None,
237
+ ):
204
238
  """Export the data of the current instance to an HDF5 file
205
239
 
206
240
  Parameters
@@ -234,8 +268,8 @@ class Export(object):
234
268
  compression_kwargs: dict
235
269
  Dictionary with the keys "compression" and "compression_opts"
236
270
  which are passed to :func:`h5py.H5File.create_dataset`. The
237
- default is Zstandard compression with the lowest compression
238
- level `hdf5plugin.Zstd(clevel=1)`.
271
+ default is Zstandard compression with the compression
272
+ level 5 `hdf5plugin.Zstd(clevel=5)`.
239
273
  compression: str or None
240
274
  Compression method used for data storage;
241
275
  one of [None, "lzf", "gzip", "szip"].
@@ -244,7 +278,10 @@ class Export(object):
244
278
  Use `compression_kwargs` instead.
245
279
  skip_checks: bool
246
280
  Disable checking whether all features have the same length.
247
-
281
+ progress_callback: callable
282
+ Function that takes at least two arguments: float between 0 and
283
+ 1 for monitoring progress and a string describing what is being
284
+ done.
248
285
 
249
286
  .. versionchanged:: 0.58.0
250
287
 
@@ -263,7 +300,7 @@ class Export(object):
263
300
  # be backwards-compatible
264
301
  compression_kwargs = {"compression": compression}
265
302
  if compression_kwargs is None:
266
- compression_kwargs = hdf5plugin.Zstd(clevel=1)
303
+ compression_kwargs = hdf5plugin.Zstd(clevel=5)
267
304
  path = pathlib.Path(path)
268
305
  # Make sure that path ends with .rtdc
269
306
  if path.suffix not in [".rtdc", ".rtdc~"]:
@@ -335,6 +372,8 @@ class Export(object):
335
372
  with RTDCWriter(path,
336
373
  mode="append",
337
374
  compression_kwargs=compression_kwargs) as hw:
375
+ if progress_callback is not None:
376
+ progress_callback(0.0, "writing metadata")
338
377
  # write meta data
339
378
  hw.store_metadata(meta)
340
379
 
@@ -369,7 +408,10 @@ class Export(object):
369
408
  ds.tables[tab])
370
409
 
371
410
  # write each feature individually
372
- for feat in features:
411
+ for ii, feat in enumerate(features):
412
+ if progress_callback is not None:
413
+ progress_callback(ii / len(features), f"exporting {feat}")
414
+
373
415
  if (filter_arr is None or
374
416
  # This does not work for the .tdms file format
375
417
  # (and probably also not for DCOR).
@@ -393,6 +435,10 @@ class Export(object):
393
435
  filtarr=filter_arr)
394
436
 
395
437
  if basins:
438
+ if progress_callback:
439
+ progress_callback(1 - 1 / (len(features) or 1),
440
+ "writing basins")
441
+
396
442
  # We have to store basins. There are three options:
397
443
  # - filtering disabled: just copy basins
398
444
  # - filtering enabled
@@ -459,6 +505,11 @@ class Export(object):
459
505
  # defined in. Since we are exporting, it does not
460
506
  # make sense to store these basins in the output file.
461
507
  continue
508
+ elif bn_dict.get("perishable"):
509
+ # Perishable basins require secret keys or complicated
510
+ # logic to execute in order to refresh them. We do not
511
+ # store them in the output file.
512
+ continue
462
513
  basinmap_orig = bn_dict.get("basin_map")
463
514
  if not filtered:
464
515
  # filtering disabled: just copy basins
@@ -472,9 +523,17 @@ class Export(object):
472
523
 
473
524
  # Do not verify basins, it takes too long.
474
525
  hw.store_basin(**bn_dict, verify=False)
526
+ if progress_callback is not None:
527
+ progress_callback(1.0, "export complete")
475
528
 
476
- def tsv(self, path, features, meta_data=None, filtered=True,
477
- override=False):
529
+ def tsv(self,
530
+ path: pathlib.Path | str,
531
+ features: list[str],
532
+ meta_data: dict = None,
533
+ filtered: bool = True,
534
+ override: bool = False,
535
+ progress_callback: callable = None,
536
+ ):
478
537
  """Export the data of the current instance to a .tsv file
479
538
 
480
539
  Parameters
@@ -496,6 +555,10 @@ class Export(object):
496
555
  override: bool
497
556
  If set to `True`, an existing file ``path`` will be overridden.
498
557
  If set to `False`, raises `OSError` if ``path`` exists.
558
+ progress_callback: callable
559
+ Function that takes at least two arguments: float between 0 and
560
+ 1 for monitoring progress and a string describing what is being
561
+ done.
499
562
  """
500
563
  if meta_data is None:
501
564
  meta_data = {}
@@ -516,6 +579,10 @@ class Export(object):
516
579
  if c not in ds.features_scalar:
517
580
  raise ValueError("Invalid feature name {}".format(c))
518
581
  meta_data["dclab version"] = version
582
+
583
+ if progress_callback is not None:
584
+ progress_callback(0.0, "writing metadata")
585
+
519
586
  # Write BOM header
520
587
  with path.open("wb") as fd:
521
588
  fd.write(codecs.BOM_UTF8)
@@ -539,17 +606,26 @@ class Export(object):
539
606
  fd.write("# "+header2+"\n")
540
607
 
541
608
  with path.open("ab") as fd:
542
- # write data
609
+ if progress_callback is not None:
610
+ progress_callback(0.1, "collecting data")
611
+
612
+ # collect data
543
613
  if filtered:
544
614
  data = [ds[c][ds.filter.all] for c in features]
545
615
  else:
546
616
  data = [ds[c] for c in features]
547
617
 
618
+ if progress_callback is not None:
619
+ progress_callback(0.5, "writing data")
620
+
548
621
  np.savetxt(fd,
549
622
  np.array(data).transpose(),
550
623
  fmt=str("%.10e"),
551
624
  delimiter="\t")
552
625
 
626
+ if progress_callback is not None:
627
+ progress_callback(1.0, "export complete")
628
+
553
629
 
554
630
  def yield_filtered_array_stacks(data, indices):
555
631
  """Generator returning chunks with the filtered feature data
@@ -6,9 +6,10 @@ which, when opened in dclab, can access features stored in the input file
6
6
  from __future__ import annotations
7
7
 
8
8
  import abc
9
+ import logging
9
10
  import numbers
10
11
  import threading
11
- from typing import Dict, List, Literal
12
+ from typing import Callable, Dict, List, Literal, Union
12
13
  import uuid
13
14
  import warnings
14
15
  import weakref
@@ -18,6 +19,9 @@ import numpy as np
18
19
  from ..util import copy_if_needed
19
20
 
20
21
 
22
+ logger = logging.getLogger(__name__)
23
+
24
+
21
25
  class BasinFeatureMissingWarning(UserWarning):
22
26
  """Used when a badin feature is defined but not stored"""
23
27
 
@@ -26,6 +30,10 @@ class CyclicBasinDependencyFoundWarning(UserWarning):
26
30
  """Used when a basin is defined in one of its sub-basins"""
27
31
 
28
32
 
33
+ class IgnoringPerishableBasinTTL(UserWarning):
34
+ """Used when refreshing a basin does not support TTL"""
35
+
36
+
29
37
  class BasinmapFeatureMissingError(KeyError):
30
38
  """Used when one of the `basinmap` features is not defined"""
31
39
  pass
@@ -47,6 +55,114 @@ class BasinAvailabilityChecker(threading.Thread):
47
55
  self.basin.is_available()
48
56
 
49
57
 
58
+ class PerishableRecord:
59
+ """A class containing information about perishable basins
60
+
61
+ Perishable basins are basins than may discontinue to work after
62
+ e.g. a specific amount of time (e.g. presigned S3 URLs). With the
63
+ `PerishableRecord`, these basins may be "refreshed" (made
64
+ available again).
65
+ """
66
+ def __init__(self,
67
+ basin,
68
+ expiration_func: Callable = None,
69
+ expiration_kwargs: Dict = None,
70
+ refresh_func: Callable = None,
71
+ refresh_kwargs: Dict = None,
72
+ ):
73
+ """
74
+ Parameters
75
+ ----------
76
+ basin: Basin
77
+ Instance of the perishable basin
78
+ expiration_func: callable
79
+ A function that determines whether the basin has perished.
80
+ It must accept `basin` as the first argument. Calling this
81
+ function should be fast, as it is called every time a feature
82
+ is accessed.
83
+ Note that if you are implementing this in the time domain, then
84
+ you should use `time.time()` (TSE), because you need an absolute
85
+ time measure. `time.monotonic()` for instance does not count up
86
+ when the system goes to sleep. However, keep in mind that if
87
+ a remote machine dictates the expiration time, then that
88
+ remote machine should also transmit the creation time (in case
89
+ there are time offsets).
90
+ expiration_kwargs: dict
91
+ Additional kwargs for `expiration_func`.
92
+ refresh_func: callable
93
+ The function used to refresh the `basin`. It must accept
94
+ `basin` as the first argument.
95
+ refresh_kwargs: dict
96
+ Additional kwargs for `refresh_func`
97
+ """
98
+ if not isinstance(basin, weakref.ProxyType):
99
+ basin = weakref.proxy(basin)
100
+ self.basin = basin
101
+ self.expiration_func = expiration_func
102
+ self.expiration_kwargs = expiration_kwargs or {}
103
+ self.refresh_func = refresh_func
104
+ self.refresh_kwargs = refresh_kwargs or {}
105
+
106
+ def __repr__(self):
107
+ state = "perished" if self.perished() else "valid"
108
+ return f"<PerishableRecord ({state}) at {hex(id(self))}>"
109
+
110
+ def perished(self) -> Union[bool, None]:
111
+ """Determine whether the basin has perished
112
+
113
+ Returns
114
+ -------
115
+ state: bool or None
116
+ True means the basin has perished, False means the basin
117
+ has not perished, and `None` means we don't know
118
+ """
119
+ if self.expiration_func is None:
120
+ return None
121
+ else:
122
+ return self.expiration_func(self.basin, **self.expiration_kwargs)
123
+
124
+ def refresh(self, extend_by: float = None) -> None:
125
+ """Extend the lifetime of the associated perishable basin
126
+
127
+ Parameters
128
+ ----------
129
+ extend_by: float
130
+ Custom argument for extending the life of the basin.
131
+ Normally, this would be a lifetime.
132
+
133
+ Returns
134
+ -------
135
+ basin: dict | None
136
+ Dictionary for instantiating a new basin
137
+ """
138
+ if self.refresh_func is None:
139
+ # The basin is a perishable basin, but we have no way of
140
+ # refreshing it.
141
+ logger.error(f"Cannot refresh basin '{self.basin}'")
142
+ return
143
+
144
+ if extend_by and "extend_by" not in self.refresh_kwargs:
145
+ warnings.warn(
146
+ "Parameter 'extend_by' ignored, because the basin "
147
+ "source does not support it",
148
+ IgnoringPerishableBasinTTL)
149
+ extend_by = None
150
+
151
+ rkw = {}
152
+ rkw.update(self.refresh_kwargs)
153
+
154
+ if extend_by is not None:
155
+ rkw["extend_by"] = extend_by
156
+
157
+ self.refresh_func(self.basin, **rkw)
158
+ logger.info(f"Refreshed basin '{self.basin}'")
159
+
160
+ # If everything went well, reset the current dataset of the basin
161
+ if self.basin._ds is not None:
162
+ self.basin._ds.close()
163
+ self.basin._ds = None
164
+
165
+
50
166
  class Basin(abc.ABC):
51
167
  """A basin represents data from an external source
52
168
 
@@ -76,6 +192,7 @@ class Basin(abc.ABC):
76
192
  mapping_referrer: Dict = None,
77
193
  ignored_basins: List[str] = None,
78
194
  key: str = None,
195
+ perishable=False,
79
196
  **kwargs):
80
197
  """
81
198
 
@@ -115,6 +232,10 @@ class Basin(abc.ABC):
115
232
  Unique key to identify this basin; normally computed from
116
233
  a JSON dump of the basin definition. A random string is used
117
234
  if None is specified.
235
+ perishable: bool or PerishableRecord
236
+ If this is not False, then it must be a :class:`.PerishableRecord`
237
+ that holds the information about the expiration time, and that
238
+ comes with a method `refresh` to extend the lifetime of the basin.
118
239
  kwargs:
119
240
  Additional keyword arguments passed to the `load_dataset`
120
241
  method of the `Basin` subclass.
@@ -130,7 +251,12 @@ class Basin(abc.ABC):
130
251
  self.name = name
131
252
  #: lengthy description of the basin
132
253
  self.description = description
133
- # defining key of the basin
254
+ # perishable record
255
+ if isinstance(perishable, bool) and perishable:
256
+ # Create an empty perishable record
257
+ perishable = PerishableRecord(self)
258
+ self.perishable = perishable
259
+ # define key of the basin
134
260
  self.key = key or str(uuid.uuid4())
135
261
  # features this basin provides
136
262
  self._features = features
@@ -164,10 +290,14 @@ class Basin(abc.ABC):
164
290
  self._av_check.start()
165
291
 
166
292
  def __repr__(self):
293
+ try:
294
+ feature_info = len(self.features)
295
+ except BaseException:
296
+ feature_info = "unknown"
167
297
  options = [
168
298
  self.name,
169
299
  f"mapped {self.mapping}" if self.mapping != "same" else "",
170
- f"features {self._features}" if self.features else "full-featured",
300
+ f"{feature_info} features",
171
301
  f"location {self.location}",
172
302
  ]
173
303
  opt_str = ", ".join([o for o in options if o])
@@ -220,6 +350,10 @@ class Basin(abc.ABC):
220
350
  @property
221
351
  def ds(self):
222
352
  """The :class:`.RTDCBase` instance represented by the basin"""
353
+ if self.perishable and self.perishable.perished():
354
+ # We have perished. Ask the PerishableRecord to refresh this
355
+ # basin so we can access it again.
356
+ self.perishable.refresh()
223
357
  if self._ds is None:
224
358
  if not self.is_available():
225
359
  raise BasinNotAvailableError(f"Basin {self} is not available!")
@@ -265,6 +399,7 @@ class Basin(abc.ABC):
265
399
  "basin_descr": self.description,
266
400
  "basin_feats": self.features,
267
401
  "basin_map": self.basinmap,
402
+ "perishable": bool(self.perishable),
268
403
  }
269
404
 
270
405
  def close(self):
@@ -11,7 +11,9 @@ class DCORAccessError(BaseException):
11
11
 
12
12
  class APIHandler:
13
13
  """Handles the DCOR api with caching for simple queries"""
14
- #: these are cached to minimize network usage
14
+ #: These are cached to minimize network usage
15
+ #: Note that we are not caching basins, since they may contain
16
+ #: expiring URLs.
15
17
  cache_queries = ["metadata", "size", "feature_list", "valid"]
16
18
  #: DCOR API Keys/Tokens in the current session
17
19
  api_keys = []
@@ -52,8 +54,36 @@ class APIHandler:
52
54
  if api_key.strip() and api_key not in APIHandler.api_keys:
53
55
  APIHandler.api_keys.append(api_key)
54
56
 
55
- def _get(self, query, feat=None, trace=None, event=None, api_key="",
56
- retries=13):
57
+ def _get(self,
58
+ query: str,
59
+ feat: str = None,
60
+ trace: str = None,
61
+ event: str = None,
62
+ api_key: str = "",
63
+ timeout: float = None,
64
+ retries: int = 5):
65
+ """Fetch information via the DCOR API
66
+
67
+ Parameters
68
+ ----------
69
+ query: str
70
+ API route
71
+ feat: str
72
+ DEPRECATED (use basins instead), adds f"&feature={feat}" to query
73
+ trace: str
74
+ DEPRECATED (use basins instead), adds f"&trace={trace}" to query
75
+ event: str
76
+ DEPRECATED (use basins instead), adds f"&event={event}" to query
77
+ api_key: str
78
+ DCOR API token to use
79
+ timeout: float
80
+ Request timeout
81
+ retries: int
82
+ Number of retries to fetch the request. For every retry, the
83
+ timeout is increased by two seconds.
84
+ """
85
+ if timeout is None:
86
+ timeout = 1
57
87
  # "version=2" introduced in dclab 0.54.3
58
88
  # (supported since ckanext.dc_serve 0.13.2)
59
89
  qstr = f"&version={self.dcserv_api_version}&query={query}"
@@ -65,13 +95,13 @@ class APIHandler:
65
95
  qstr += f"&event={event}"
66
96
  apicall = self.url + qstr
67
97
  fail_reasons = []
68
- for _ in range(retries):
98
+ for ii in range(retries):
69
99
  try:
70
100
  # try-except both requests and json conversion
71
101
  req = self.session.get(apicall,
72
102
  headers={"Authorization": api_key},
73
103
  verify=self.verify,
74
- timeout=1,
104
+ timeout=timeout + ii * 2,
75
105
  )
76
106
  jreq = req.json()
77
107
  except requests.urllib3.exceptions.ConnectionError: # requests
@@ -92,13 +122,45 @@ class APIHandler:
92
122
  f"Messages: {fail_reasons}")
93
123
  return jreq
94
124
 
95
- def get(self, query, feat=None, trace=None, event=None):
125
+ def get(self,
126
+ query: str,
127
+ feat: str = None,
128
+ trace: str = None,
129
+ event: str = None,
130
+ timeout: float = None,
131
+ retries: int = 5,
132
+ ):
133
+ """Fetch information from DCOR
134
+
135
+ Parameters
136
+ ----------
137
+ query: str
138
+ API route
139
+ feat: str
140
+ DEPRECATED (use basins instead), adds f"&feature={feat}" to query
141
+ trace: str
142
+ DEPRECATED (use basins instead), adds f"&trace={trace}" to query
143
+ event: str
144
+ DEPRECATED (use basins instead), adds f"&event={event}" to query
145
+ timeout: float
146
+ Request timeout
147
+ retries: int
148
+ Number of retries to fetch the request. For every retry, the
149
+ timeout is increased by two seconds.
150
+ """
96
151
  if query in APIHandler.cache_queries and query in self._cache:
97
152
  result = self._cache[query]
98
153
  else:
99
154
  req = {"error": {"message": "No access to API (api key?)"}}
100
155
  for api_key in [self.api_key] + APIHandler.api_keys:
101
- req = self._get(query, feat, trace, event, api_key)
156
+ req = self._get(query=query,
157
+ feat=feat,
158
+ trace=trace,
159
+ event=event,
160
+ api_key=api_key,
161
+ timeout=timeout,
162
+ retries=retries,
163
+ )
102
164
  if req["success"]:
103
165
  self.api_key = api_key # remember working key
104
166
  break
@@ -1,17 +1,23 @@
1
1
  """DCOR client interface"""
2
+ import logging
2
3
  import pathlib
3
4
  import re
5
+ import time
4
6
 
5
7
  from ...util import hashobj
6
8
 
7
9
  from ..config import Configuration
8
10
  from ..core import RTDCBase
11
+ from ..feat_basin import PerishableRecord
9
12
 
10
13
  from . import api
11
14
  from .logs import DCORLogs
12
15
  from .tables import DCORTables
13
16
 
14
17
 
18
+ logger = logging.getLogger(__name__)
19
+
20
+
15
21
  #: Append directories here where dclab should look for certificate bundles
16
22
  #: for a specific host. The directory should contain files named after the
17
23
  #: hostname, e.g. "dcor.mpl.mpg.de.cert".
@@ -73,6 +79,8 @@ class RTDC_DCOR(RTDCBase):
73
79
  super(RTDC_DCOR, self).__init__(*args, **kwargs)
74
80
 
75
81
  self._hash = None
82
+ self._cache_basin_dict = None
83
+ self.cache_basin_dict_time = 600
76
84
  self.path = RTDC_DCOR.get_full_url(url, use_ssl, host)
77
85
 
78
86
  if cert_path is None:
@@ -161,15 +169,106 @@ class RTDC_DCOR(RTDCBase):
161
169
  new_url = f"{scheme}://{netloc}/{api_path}"
162
170
  return new_url
163
171
 
164
- def basins_get_dicts(self):
165
- """Return list of dicts for all basins defined in `self.h5file`"""
172
+ def _basin_refresh(self, basin):
173
+ """Refresh the specified basin"""
174
+ # Retrieve the basin dictionary from DCOR
175
+ basin_dicts = self.basins_get_dicts()
176
+ for bn_dict in basin_dicts:
177
+ if bn_dict.get("name") == basin.name:
178
+ break
179
+ else:
180
+ raise ValueError(f"Basin '{basin.name}' not found in {self}")
181
+
182
+ tre = bn_dict["time_request"]
183
+ ttl = bn_dict["time_expiration"]
184
+ # remember time relative to time.time, subtract 30s to be on safe side
185
+ tex = bn_dict["time_local_request"] + (ttl - tre) - 30
186
+
187
+ if isinstance(basin.perishable, bool):
188
+ logger.debug("Initializing basin perishable %s", basin.name)
189
+ # create a perishable record
190
+ basin.perishable = PerishableRecord(
191
+ basin=basin,
192
+ expiration_func=self._basin_expiration,
193
+ expiration_kwargs={"time_local_expiration": tex},
194
+ refresh_func=self._basin_refresh,
195
+ )
196
+ else:
197
+ logger.debug("Refreshing basin perishable %s", basin.name)
198
+ # only update (this also works with weakref.ProxyType)
199
+ basin.perishable.expiration_kwargs = {"time_local_expiration": tex}
200
+
201
+ if len(bn_dict["urls"]) > 1:
202
+ logger.warning(f"Basin {basin.name} has multiple URLs. I am not "
203
+ f"checking their availability: {bn_dict}")
204
+ basin.location = bn_dict["urls"][0]
205
+
206
+ def _basin_expiration(self, basin, time_local_expiration):
207
+ """Check whether the basin has perished"""
208
+ return time_local_expiration < time.time()
209
+
210
+ def _basins_get_dicts(self):
166
211
  try:
167
- basins = self.api.get(query="basins")
212
+ basin_dicts = self.api.get(query="basins")
213
+ # Fill in missing timing information
214
+ for bn_dict in basin_dicts:
215
+ if (bn_dict.get("format") == "http"
216
+ and "perishable" not in bn_dict):
217
+ # We are communicating with an older version of
218
+ # ckanext-dc_serve. Take a look at the URL and check
219
+ # whether we have a perishable (~1 hour) URL or whether
220
+ # this is a public resource.
221
+ expires_regexp = re.compile(".*expires=([0-9]*)$")
222
+ for url in bn_dict.get("urls", []):
223
+ if match := expires_regexp.match(url.lower()):
224
+ logger.debug("Detected perishable basin: %s",
225
+ bn_dict["name"])
226
+ bn_dict["perishable"] = True
227
+ bn_dict["time_request"] = time.time()
228
+ bn_dict["time_expiration"] = int(match.group(1))
229
+ # add part of the resource ID to the name
230
+ infourl = url.split(bn_dict["name"], 1)[-1]
231
+ infourl = infourl.replace("/", "")
232
+ bn_dict["name"] += f"-{infourl[:5]}"
233
+ break
234
+ else:
235
+ bn_dict["perishable"] = False
236
+ # If we have a perishable basin, add the local request time
237
+ if bn_dict.get("perishable"):
238
+ bn_dict["time_local_request"] = time.time()
168
239
  except api.DCORAccessError:
169
240
  # TODO: Do not catch this exception when all DCOR instances
170
241
  # implement the 'basins' query.
171
242
  # This means that the server does not implement the 'basins' query.
172
- basins = []
243
+ basin_dicts = []
244
+ return basin_dicts
245
+
246
+ def basins_get_dicts(self):
247
+ """Return list of dicts for all basins defined on DCOR
248
+
249
+ The return value of this method is cached for 10 minutes
250
+ (cache time defined in the `cache_basin_dict_time` [s] property).
251
+ """
252
+ if (self._cache_basin_dict is None
253
+ or time.time() > (self._cache_basin_dict[1]
254
+ + self.cache_basin_dict_time)):
255
+ self._cache_basin_dict = (self._basins_get_dicts(), time.time())
256
+ return self._cache_basin_dict[0]
257
+
258
+ def basins_retrieve(self):
259
+ """Same as superclass, but add perishable information"""
260
+ basin_dicts = self.basins_get_dicts()
261
+ basins = super(RTDC_DCOR, self).basins_retrieve()
262
+ for bn in basins:
263
+ for bn_dict in basin_dicts:
264
+ if bn.name == bn_dict.get("name"):
265
+ # Determine whether we have to set a perishable record.
266
+ if bn_dict.get("perishable"):
267
+ # required for `_basin_refresh` to create a record
268
+ bn.perishable = True
269
+ # create the actual record
270
+ self._basin_refresh(bn)
271
+ break
173
272
  return basins
174
273
 
175
274
 
@@ -22,5 +22,5 @@ class DCORLogs:
22
22
  @property
23
23
  def _logs(self):
24
24
  if self._logs_cache is None:
25
- self._logs_cache = self.api.get(query="logs")
25
+ self._logs_cache = self.api.get(query="logs", timeout=5)
26
26
  return self._logs_cache
@@ -27,7 +27,7 @@ class DCORTables:
27
27
  @property
28
28
  def _tables(self):
29
29
  if self._tables_cache is None:
30
- table_data = self.api.get(query="tables")
30
+ table_data = self.api.get(query="tables", timeout=13)
31
31
  # assemble the tables
32
32
  tables = {}
33
33
  for key in table_data:
@@ -42,6 +42,27 @@ S3_ACCESS_KEY_ID = os.environ.get("DCLAB_S3_ACCESS_KEY_ID")
42
42
  S3_SECRET_ACCESS_KEY = os.environ.get("DCLAB_S3_SECRET_ACCESS_KEY")
43
43
 
44
44
 
45
+ @functools.lru_cache(maxsize=1000)
46
+ def get_s3_session_client(access_key_id: str,
47
+ secret_access_key: str,
48
+ use_ssl: bool,
49
+ verify_ssl: bool,
50
+ endpoint_url: str
51
+ ):
52
+ botocore_session = botocore.session.get_session()
53
+ s3_session = boto3.Session(
54
+ aws_access_key_id=access_key_id,
55
+ aws_secret_access_key=secret_access_key,
56
+ botocore_session=botocore_session)
57
+ s3_client = s3_session.client(
58
+ service_name='s3',
59
+ use_ssl=use_ssl,
60
+ verify=verify_ssl,
61
+ endpoint_url=endpoint_url,
62
+ )
63
+ return botocore_session, s3_session, s3_client
64
+
65
+
45
66
  class S3File(HTTPFile):
46
67
  """Monkeypatched `HTTPFile` to support authenticated access to S3"""
47
68
  def __init__(self,
@@ -74,17 +95,15 @@ class S3File(HTTPFile):
74
95
  "not specify the full S3 URL or that you forgot to set "
75
96
  "the `S3_ENDPOINT_URL` environment variable.")
76
97
  endpoint_url = endpoint_url.strip().rstrip("/")
77
- self.botocore_session = botocore.session.get_session()
78
- self.s3_session = boto3.Session(
79
- aws_access_key_id=access_key_id,
80
- aws_secret_access_key=secret_access_key,
81
- botocore_session=self.botocore_session)
82
- self.s3_client = self.s3_session.client(
83
- service_name='s3',
84
- use_ssl=use_ssl,
85
- verify=verify_ssl,
86
- endpoint_url=endpoint_url,
98
+ self.botocore_session, self.s3_session, self.s3_client = \
99
+ get_s3_session_client(
100
+ access_key_id=access_key_id,
101
+ secret_access_key=secret_access_key,
102
+ use_ssl=use_ssl,
103
+ verify_ssl=verify_ssl,
104
+ endpoint_url=endpoint_url,
87
105
  )
106
+
88
107
  # Use a configuration that allows anonymous access
89
108
  # https://stackoverflow.com/a/34866092
90
109
  if not secret_access_key:
@@ -66,8 +66,8 @@ class RTDCWriter:
66
66
  compression_kwargs: dict-like
67
67
  Dictionary with the keys "compression" and "compression_opts"
68
68
  which are passed to :func:`h5py.H5File.create_dataset`. The
69
- default is Zstandard compression with the lowest compression
70
- level `hdf5plugin.Zstd(clevel=1)`. To disable compression, use
69
+ default is Zstandard compression with the compression
70
+ level 5 `hdf5plugin.Zstd(clevel=5)`. To disable compression, use
71
71
  `{"compression": None}`.
72
72
  compression: str or None
73
73
  Compression method used for data storage;
@@ -88,7 +88,7 @@ class RTDCWriter:
88
88
  # be backwards-compatible
89
89
  compression_kwargs = {"compression": compression}
90
90
  if compression_kwargs is None:
91
- compression_kwargs = hdf5plugin.Zstd(clevel=1)
91
+ compression_kwargs = hdf5plugin.Zstd(clevel=5)
92
92
 
93
93
  self.mode = mode
94
94
  self.compression_kwargs = compression_kwargs
@@ -209,6 +209,7 @@ class RTDCWriter:
209
209
  basin_map: np.ndarray | Tuple[str, np.ndarray] = None,
210
210
  internal_data: Dict | h5py.Group = None,
211
211
  verify: bool = True,
212
+ perishable: bool = False,
212
213
  ):
213
214
  """Write basin information
214
215
 
@@ -249,9 +250,13 @@ class RTDCWriter:
249
250
  This must be specified when storing internal basins, and it
250
251
  must not be specified for any other basin type.
251
252
  verify: bool
252
- whether to verify the basin before storing it; You might have
253
+ Whether to verify the basin before storing it; You might have
253
254
  set this to False if you would like to write a basin that is
254
255
  e.g. temporarily not available
256
+ perishable: bool
257
+ Whether the basin is perishable. If this is True, then a
258
+ warning will be issued, because perishable basins may not be
259
+ accessed (e.g. time-based URL for private S3 data).
255
260
 
256
261
  Returns
257
262
  -------
@@ -261,6 +266,8 @@ class RTDCWriter:
261
266
 
262
267
  .. versionadded:: 0.58.0
263
268
  """
269
+ if perishable:
270
+ warnings.warn(f"Storing perishable basin {basin_name}")
264
271
  if basin_type == "internal":
265
272
  if internal_data is None:
266
273
  raise ValueError(
@@ -381,6 +388,7 @@ class RTDCWriter:
381
388
  "type": basin_type,
382
389
  "features": None if basin_feats is None else sorted(basin_feats),
383
390
  "mapping": basin_map_name,
391
+ "perishable": perishable,
384
392
  }
385
393
  if basin_type == "file":
386
394
  flocs = []
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dclab
3
- Version: 0.62.17
3
+ Version: 0.64.0
4
4
  Summary: Library for real-time deformability cytometry (RT-DC)
5
5
  Author: Benedikt Hartmann, Eoghan O'Connell, Maik Herbig, Maximilian Schlögel, Nadia Sbaa, Paul Müller, Philipp Rosendahl, Raghava Alajangi
6
6
  Maintainer-email: Paul Müller <dev@craban.de>
@@ -14,7 +14,7 @@ Classifier: Operating System :: OS Independent
14
14
  Classifier: Programming Language :: Python :: 3
15
15
  Classifier: Topic :: Scientific/Engineering :: Visualization
16
16
  Classifier: Intended Audience :: Science/Research
17
- Requires-Python: <4,>=3.8
17
+ Requires-Python: <4,>=3.9
18
18
  Description-Content-Type: text/x-rst
19
19
  License-File: LICENSE
20
20
  Requires-Dist: h5py<4,>=3.0.0
@@ -1,9 +1,9 @@
1
1
  dclab/__init__.py,sha256=wyJWhElQRPcq09vUqUnuquTU_KHgHxv6wQxuxQ988Iw,1583
2
- dclab/_version.py,sha256=GEa7QH3NrBgvyLi3LNdBdNGUvveY9cblWLa2mkR4bCI,515
2
+ dclab/_version.py,sha256=zVb-mNfv7Q80VP6gLIonMXNXR1BAme5zhf4Ww25kC90,513
3
3
  dclab/cached.py,sha256=eWTYBiI-HQM7JuPH-oxa5LLnhAX32GpRwlYg2kQ3sTA,2917
4
- dclab/downsampling.cpython-39-x86_64-linux-gnu.so,sha256=2dz66rF0V8H9-kX5hkOZjrfNw1hx60NtnJq0pTtXjyo,1489200
4
+ dclab/downsampling.cpython-39-x86_64-linux-gnu.so,sha256=a8dZlnaETZhboAi2V_jmqv8jTjtPaJMSXfi1XLiRPYI,1480784
5
5
  dclab/downsampling.pyx,sha256=OK7zbgGLl5gVyoU8ZBHo9EWwb8C9ChavmLNEvQvC9T0,7258
6
- dclab/http_utils.py,sha256=YtZHEwB-BBBo2fCvwhlJvlnWvfWFMHclqol3OIJ7atM,10910
6
+ dclab/http_utils.py,sha256=XHicbHQts5LY3zSNmYqNgAZpKTktotEiwQgJ8d2sBlk,10912
7
7
  dclab/kde_contours.py,sha256=UlU64lrzMQUZH11oZndW7xf7NFCzwP3FcVujwuqXDCI,278
8
8
  dclab/kde_methods.py,sha256=f0-zDN7ETintvGB3gSzxwgBb53YtT9jZtzI70EAX50g,365
9
9
  dclab/polygon_filter.py,sha256=qexmo-rXe06CUPZhN6EMJy4y4B5gXZeqejdvIB2arOE,13480
@@ -35,14 +35,14 @@ dclab/external/packaging/version.py,sha256=9MLL6_EYHvGA1yCGndwL5ZmmDA_wqQsW15GyK
35
35
  dclab/external/skimage/LICENSE,sha256=ivsSBvn3c0R9mOctWRRdza7C7wdZSRYgCVxlVqUdlB8,1452
36
36
  dclab/external/skimage/__init__.py,sha256=-B2QUKHAFzQuBWuuKvPDC5JIl0Zb-x3OGmbwPaE9VwQ,72
37
37
  dclab/external/skimage/_find_contours.py,sha256=16v5eeTZBmevG8SSuXtJ6yUpVPhwfSmtc8pDD0nuuOU,9340
38
- dclab/external/skimage/_find_contours_cy.cpython-39-x86_64-linux-gnu.so,sha256=Jcy6cWsfskVrHxOOeWSuOSX8qjK9QHF3sH5yMRzig4g,1098048
38
+ dclab/external/skimage/_find_contours_cy.cpython-39-x86_64-linux-gnu.so,sha256=B2frqGkOmlrASITtPC3yHMGiOfQ3Q4ddrVmbgrYoZis,1090792
39
39
  dclab/external/skimage/_find_contours_cy.pyx,sha256=pZJOBhMHzYEMkcz4WQVyjn7jDNrdjCfet47FU1hRAxk,7161
40
- dclab/external/skimage/_pnpoly.cpython-39-x86_64-linux-gnu.so,sha256=onU-ie9GEo-qpgi2k6M1pbdyFkSpuwtkZnP7gUSO14c,1213424
40
+ dclab/external/skimage/_pnpoly.cpython-39-x86_64-linux-gnu.so,sha256=QJg9uS9gap-2wiTrSnZCOstM9ukYfhCVysdEJ3doOgw,1191776
41
41
  dclab/external/skimage/_pnpoly.pyx,sha256=Qdn6xPazDschBqbr46DzB75MB2MnqvdnoTSBMK7kUGE,2504
42
42
  dclab/external/skimage/measure.py,sha256=y1idCqD9TUxp3-QnOiWR_d674OKaeqBJ4MN2-gVP6ro,247
43
43
  dclab/external/skimage/pnpoly.py,sha256=r8hFNiTz5XlUoNZjosqA0iyv1FPn0l7ewbplgFgkdaw,1347
44
44
  dclab/external/skimage/_shared/__init__.py,sha256=2sHZwTtJSlMTa3Q2YSvQW7jrPLMUSqDJQa-ROe5zfcw,37
45
- dclab/external/skimage/_shared/geometry.cpython-39-x86_64-linux-gnu.so,sha256=80XHcBsxmcSLzufJF2w7vbvgb393tjvPKehmY6kRR9I,73440
45
+ dclab/external/skimage/_shared/geometry.cpython-39-x86_64-linux-gnu.so,sha256=8BUxEv5oF5nWa8uxzvSZDBLWz_uXpIp3vrHGXXeLmbk,64944
46
46
  dclab/external/skimage/_shared/geometry.pxd,sha256=kRsu9ifv_rL3kbRIgSLf86p0hn2oTMp6s013lZ9bBZM,346
47
47
  dclab/external/skimage/_shared/geometry.pyx,sha256=miCHUh6mBDbRRIoaF_0xAER1MRzsCAzFdlYQZhV7RmE,1667
48
48
  dclab/external/statsmodels/LICENSE,sha256=JCyTeA3bPAyFsOpDoSVZjoui7Lu1XTrcAuf0eClKvV0,1637
@@ -76,9 +76,9 @@ dclab/isoelastics/iso_LE-2D-FEM-19-area_um-deform.txt,sha256=lcTjUUnIwj_bVBrG2T2
76
76
  dclab/isoelastics/iso_LE-2D-FEM-19-volume-deform.txt,sha256=vTcazOlOXo3BQ0NQtGB_IdHKA0neOLXZ_d3JuMU--RE,83358
77
77
  dclab/isoelastics/iso_LE-2D-ana-18-area_um-deform.txt,sha256=KD2RkhCfkrna20pLJ3UzNZZapMkhQydMYz0iKdMtRRE,46805
78
78
  dclab/kde/__init__.py,sha256=_WSLPMfxE2su6tmO5mJxUE_9ON16-pqQUQCUlzRtyKI,55
79
- dclab/kde/base.py,sha256=i6bNYYeQSFlq03Z5CZjhBE-V3xpY6YLWivL3guscsnE,7683
79
+ dclab/kde/base.py,sha256=KuV_9_AJ4Sn14C4WAH4eF63URAVYCnaFOcLgsVCaAjo,12961
80
80
  dclab/kde/contours.py,sha256=WoRqBj_xK-23FZjtaYly7E2Q8sGZ16q2ILq-DmrlmC8,6742
81
- dclab/kde/methods.py,sha256=SYlAjoST66hEZnRmsdZ6izMmgfebxQxTfBR5PHhzDkE,9208
81
+ dclab/kde/methods.py,sha256=8g4lYUKYqt2pdA9efHVRBDCUUzmePmWPp6rljtJ0XD8,9438
82
82
  dclab/lme4/__init__.py,sha256=5WPFMTK-Yia3NJuwZEEBQ3fCyW3DiFgpZFrAwU33TV4,272
83
83
  dclab/lme4/lme4_template.R,sha256=CEXQIquvYCla9dCvRYgiBemI6fiVgAKnJTetJA2LAtk,2570
84
84
  dclab/lme4/rsetup.py,sha256=kH9VFtcK83ZaF9jvh1n5kcmGmPLLsmCPia_ElEHBLes,5890
@@ -87,17 +87,17 @@ dclab/rtdc_dataset/__init__.py,sha256=MUHSGVQJ4Zc0IyU2lf01dpDWyOyNveHip-UjSkmPNv
87
87
  dclab/rtdc_dataset/check.py,sha256=lJNaz4QTe2WNlxik6zSohRHTiAYuP_bKOzSDjPGTUS0,35006
88
88
  dclab/rtdc_dataset/config.py,sha256=MvBteFya3R6Ch3U6UgTakCsJoBgVykTxS_Z25STWPHU,17432
89
89
  dclab/rtdc_dataset/copier.py,sha256=-2ISiOs4ytxN_ttXQGhaepuD2Ppy80G9UlDSZVyEoOU,14175
90
- dclab/rtdc_dataset/core.py,sha256=EjNWk9SV-2xBTRtf34XosLCOS164vGWKP5dKKLSOSq4,34441
91
- dclab/rtdc_dataset/export.py,sha256=RPnWNDAPW1m3vPcStjqIX-YnxDOKyWLtQ1HrA38S3Uo,30384
92
- dclab/rtdc_dataset/feat_basin.py,sha256=ViKdvJcwFM8joysnrBYdZbA5t_wZix-6xn_FsvzpYsQ,21072
90
+ dclab/rtdc_dataset/core.py,sha256=p1Wy9Dq1Ny4i-cfvgPfCZov_lQAhGz0--x59j-g4mTs,34753
91
+ dclab/rtdc_dataset/export.py,sha256=Ukmdz-Mm9iei6vhfp1lh7oNQiAbXvA2o3tRx1XlBWCM,33416
92
+ dclab/rtdc_dataset/feat_basin.py,sha256=i1J6iKQQEJXdi3u-TB9y_OdxbkFPBK6_G2jeaGcBsLY,26172
93
93
  dclab/rtdc_dataset/feat_temp.py,sha256=XbDIS1iUUkRH0Zp9uVlwvK_untJ7hkOnKshK1Drsnt8,3694
94
94
  dclab/rtdc_dataset/filter.py,sha256=AFPUBzOIi3pqXgUdMQ5CIi9ZeGOKC71rfSZKLMLgtog,10023
95
95
  dclab/rtdc_dataset/fmt_dict.py,sha256=gumVQOiVVDFUKow_483PY7cxInqo-NiBBnBhIU8s4lg,3009
96
96
  dclab/rtdc_dataset/fmt_http.py,sha256=vXVxRLXZp2_V1v3xk4lu4VUHYXfNHJdsRkVt3trC1RU,3374
97
- dclab/rtdc_dataset/fmt_s3.py,sha256=FVw0q3CwiPwDKmz37EsjK2T5CLr4MsH3pvscu-gToVM,11278
97
+ dclab/rtdc_dataset/fmt_s3.py,sha256=bU3V_WGyqJhxPCH80X_nlNqq-jXcgoZKv_aUBIqwaL8,11877
98
98
  dclab/rtdc_dataset/load.py,sha256=5_xGw2P8Mjs0gW-vGr2Kn28j6Qv3BvvMvguMibC_mM4,2761
99
99
  dclab/rtdc_dataset/meta_table.py,sha256=ucqBNrgI6rDAuQFuMRckY8lp1LpnYAoRgEsLObWTJCE,648
100
- dclab/rtdc_dataset/writer.py,sha256=jc6ADyxGoujXpoXu1vF2nfZjGFMaO5LbRmoYJZ83JVo,41418
100
+ dclab/rtdc_dataset/writer.py,sha256=BqP4QSzHxCSxp9z7_cJF2v04taO3ZXE3Wg6zJY6fMao,41819
101
101
  dclab/rtdc_dataset/feat_anc_core/__init__.py,sha256=hEWMqg2rmbxW86Fe-dkTD4b0Zmp-dJe6gPsMjspPGXA,464
102
102
  dclab/rtdc_dataset/feat_anc_core/af_basic.py,sha256=15UW0wp1XG0o9QHxcVmn48dPZUdRgGezaC7ZidS6EhA,2193
103
103
  dclab/rtdc_dataset/feat_anc_core/af_emodulus.py,sha256=Pl1kJSqAx15xBeipbw7fv8QbsvMjzY6gd1MC7Atm5kc,6670
@@ -110,11 +110,11 @@ dclab/rtdc_dataset/feat_anc_plugin/__init__.py,sha256=Em8nKxzex6M46Q86pRoSdrzb02
110
110
  dclab/rtdc_dataset/feat_anc_plugin/plugin_feature.py,sha256=ED7vAtHgMXoanbeoUdklDAocj0Pq_cpckmCJ-YWqwr8,12381
111
111
  dclab/rtdc_dataset/fmt_dcor/__init__.py,sha256=WjO1uM_Vlof15Y7HkhkV5Xv75q9TDIdOBIuS_I38qps,210
112
112
  dclab/rtdc_dataset/fmt_dcor/access_token.py,sha256=jotLQay138RUlv8wbdF2ishRnyE9N0KwGGBlbCL0wRI,2028
113
- dclab/rtdc_dataset/fmt_dcor/api.py,sha256=COPRnPfPBcxbQGxHFEbGxp2CjK-Mgnt3cIu20-Zz04M,4245
114
- dclab/rtdc_dataset/fmt_dcor/base.py,sha256=wD127W5LvvhkUy8SvFVVwAR6EEYtzgoWJ4booh45rfA,6588
113
+ dclab/rtdc_dataset/fmt_dcor/api.py,sha256=IhmNpEdVSGHdJUepCkmuyTVOp3fNn1WASPVohAzwhu8,6274
114
+ dclab/rtdc_dataset/fmt_dcor/base.py,sha256=cFiZcWG43jOGTY3oNl-E_vdzbm5QZU1CEB7ucrLZP_U,11167
115
115
  dclab/rtdc_dataset/fmt_dcor/basin.py,sha256=tQZ4GumqURjS3eppRrSyUq1zBPD0y_8rwznMRDXiDUs,2526
116
- dclab/rtdc_dataset/fmt_dcor/logs.py,sha256=1JsMr_4r5j8rkfrrUsiN42_l92GcvDjapYxopZKimnw,583
117
- dclab/rtdc_dataset/fmt_dcor/tables.py,sha256=NaVEwLKmOg7Mz5iAMe2S8C4xRVC_YO3zeT7g5EbQE1M,1682
116
+ dclab/rtdc_dataset/fmt_dcor/logs.py,sha256=FHaDGjh4wMd18S2FFr6IVdd23l21S6s3fwZnFbPG83E,594
117
+ dclab/rtdc_dataset/fmt_dcor/tables.py,sha256=a4gMIjH0TgFdz36l_F-xriHTT4LMI64ur-atHyvAp80,1694
118
118
  dclab/rtdc_dataset/fmt_hdf5/__init__.py,sha256=yWLYK-Fq0EYnp2eYfl1Ze02RBMOWg-iALJWs4dFSxxY,270
119
119
  dclab/rtdc_dataset/fmt_hdf5/base.py,sha256=_PgmDq2K7RGCuhV9J4YZwg9noW1hi2w14ZP8ooRR8Lw,6391
120
120
  dclab/rtdc_dataset/fmt_hdf5/basin.py,sha256=mJZR92Qoa71EwDVDYAP9KtOcjvRyjtA2wO1DkCBfBQc,792
@@ -134,9 +134,9 @@ dclab/rtdc_dataset/fmt_tdms/event_mask.py,sha256=eZiDHAGG3MCVckEMHsV-YBbL-pETVLo
134
134
  dclab/rtdc_dataset/fmt_tdms/event_trace.py,sha256=Vkym0QKSw2mq1XZl5n8wDkgHXmaZwQGiMAV5AuRSJkE,5215
135
135
  dclab/rtdc_dataset/fmt_tdms/exc.py,sha256=WzrMqnyrzp8gsT8Pf7JKqGGv43ewx7d_qgtirURppRI,813
136
136
  dclab/rtdc_dataset/fmt_tdms/naming.py,sha256=biI9l1EO6BuSYgwZG0deacj4i1fMHQcW78AKXEcm5Wc,5373
137
- dclab-0.62.17.dist-info/METADATA,sha256=sxoMpN4ygAI9x12l_IsEKUqi32wGJ3o2Tg2OKEx8cKQ,4756
138
- dclab-0.62.17.dist-info/WHEEL,sha256=dygxghiRV9JTYZ0KWRTlQWrNGgRs9bzKS66-L8fT8uI,110
139
- dclab-0.62.17.dist-info/entry_points.txt,sha256=eOpjgznu-eW-9utUpLU-77O5098YyUEgGF3ksGMdtec,273
140
- dclab-0.62.17.dist-info/top_level.txt,sha256=irvwZMgs1edY1Zj60ZFk7Almb9Zhk4k6E6aC4YPFnnM,6
141
- dclab-0.62.17.dist-info/RECORD,,
142
- dclab-0.62.17.dist-info/licenses/LICENSE,sha256=gLDaVZWRrlnLdyfOrR0qfWjLbOVcjvoJ-kCLUK0fyXA,15360
137
+ dclab-0.64.0.dist-info/METADATA,sha256=zfrjZaLGPiE_ZPYkVtoUGvGI-CPkjgY-eiKJshd-3bo,4755
138
+ dclab-0.64.0.dist-info/WHEEL,sha256=RCs-IytiafpWtat_ywWwz8oKW5vOEqKNayxVvj2QscE,110
139
+ dclab-0.64.0.dist-info/entry_points.txt,sha256=eOpjgznu-eW-9utUpLU-77O5098YyUEgGF3ksGMdtec,273
140
+ dclab-0.64.0.dist-info/top_level.txt,sha256=irvwZMgs1edY1Zj60ZFk7Almb9Zhk4k6E6aC4YPFnnM,6
141
+ dclab-0.64.0.dist-info/RECORD,,
142
+ dclab-0.64.0.dist-info/licenses/LICENSE,sha256=gLDaVZWRrlnLdyfOrR0qfWjLbOVcjvoJ-kCLUK0fyXA,15360
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (80.7.1)
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp39-cp39-musllinux_1_2_x86_64
5
5