tobac 1.6.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. tobac/__init__.py +112 -0
  2. tobac/analysis/__init__.py +31 -0
  3. tobac/analysis/cell_analysis.py +628 -0
  4. tobac/analysis/feature_analysis.py +212 -0
  5. tobac/analysis/spatial.py +619 -0
  6. tobac/centerofgravity.py +226 -0
  7. tobac/feature_detection.py +1758 -0
  8. tobac/merge_split.py +324 -0
  9. tobac/plotting.py +2321 -0
  10. tobac/segmentation/__init__.py +10 -0
  11. tobac/segmentation/watershed_segmentation.py +1316 -0
  12. tobac/testing.py +1179 -0
  13. tobac/tests/segmentation_tests/test_iris_xarray_segmentation.py +0 -0
  14. tobac/tests/segmentation_tests/test_segmentation.py +1183 -0
  15. tobac/tests/segmentation_tests/test_segmentation_time_pad.py +104 -0
  16. tobac/tests/test_analysis_spatial.py +1109 -0
  17. tobac/tests/test_convert.py +265 -0
  18. tobac/tests/test_datetime.py +216 -0
  19. tobac/tests/test_decorators.py +148 -0
  20. tobac/tests/test_feature_detection.py +1321 -0
  21. tobac/tests/test_generators.py +273 -0
  22. tobac/tests/test_import.py +24 -0
  23. tobac/tests/test_iris_xarray_match_utils.py +244 -0
  24. tobac/tests/test_merge_split.py +351 -0
  25. tobac/tests/test_pbc_utils.py +497 -0
  26. tobac/tests/test_sample_data.py +197 -0
  27. tobac/tests/test_testing.py +747 -0
  28. tobac/tests/test_tracking.py +714 -0
  29. tobac/tests/test_utils.py +650 -0
  30. tobac/tests/test_utils_bulk_statistics.py +789 -0
  31. tobac/tests/test_utils_coordinates.py +328 -0
  32. tobac/tests/test_utils_internal.py +97 -0
  33. tobac/tests/test_xarray_utils.py +232 -0
  34. tobac/tracking.py +613 -0
  35. tobac/utils/__init__.py +27 -0
  36. tobac/utils/bulk_statistics.py +360 -0
  37. tobac/utils/datetime.py +184 -0
  38. tobac/utils/decorators.py +540 -0
  39. tobac/utils/general.py +753 -0
  40. tobac/utils/generators.py +87 -0
  41. tobac/utils/internal/__init__.py +2 -0
  42. tobac/utils/internal/coordinates.py +430 -0
  43. tobac/utils/internal/iris_utils.py +462 -0
  44. tobac/utils/internal/label_props.py +82 -0
  45. tobac/utils/internal/xarray_utils.py +439 -0
  46. tobac/utils/mask.py +364 -0
  47. tobac/utils/periodic_boundaries.py +419 -0
  48. tobac/wrapper.py +244 -0
  49. tobac-1.6.2.dist-info/METADATA +154 -0
  50. tobac-1.6.2.dist-info/RECORD +53 -0
  51. tobac-1.6.2.dist-info/WHEEL +5 -0
  52. tobac-1.6.2.dist-info/licenses/LICENSE +29 -0
  53. tobac-1.6.2.dist-info/top_level.txt +1 -0
tobac/tracking.py ADDED
@@ -0,0 +1,613 @@
1
+ """Provide tracking methods.
2
+
3
+ The individual features and associated area/volumes identified in
4
+ each timestep have to be linked into trajectories to analyse
5
+ the time evolution of their properties for a better understanding of
6
+ the underlying physical processes.
7
+ The implementations are structured in a way that allows for the future
8
+ addition of more complex tracking methods recording a more complex
9
+ network of relationships between features at different points in
10
+ time.
11
+
12
+ References
13
+ ----------
14
+ .. Heikenfeld, M., Marinescu, P. J., Christensen, M.,
15
+ Watson-Parris, D., Senf, F., van den Heever, S. C.
16
+ & Stier, P. (2019). tobac 1.2: towards a flexible
17
+ framework for tracking and analysis of clouds in
18
+ diverse datasets. Geoscientific Model Development,
19
+ 12(11), 4551-4570.
20
+ """
21
+
22
+ from __future__ import annotations
23
+ import logging
24
+ import numpy as np
25
+ import pandas as pd
26
+ import warnings
27
+ import math
28
+ from . import utils as tb_utils
29
+ from .utils import periodic_boundaries as pbc_utils
30
+ from .utils import internal as internal_utils
31
+
32
+ from packaging import version as pkgvsn
33
+ import trackpy as tp
34
+ import copy
35
+
36
+
37
+ def linking_trackpy(
38
+ features,
39
+ field_in,
40
+ dt,
41
+ dxy,
42
+ dz=None,
43
+ v_max=None,
44
+ d_max=None,
45
+ d_min=None,
46
+ subnetwork_size=None,
47
+ memory=0,
48
+ stubs=1,
49
+ time_cell_min=None,
50
+ order=1,
51
+ extrapolate=0,
52
+ method_linking="random",
53
+ adaptive_step=None,
54
+ adaptive_stop=None,
55
+ cell_number_start=1,
56
+ cell_number_unassigned=-1,
57
+ vertical_coord="auto",
58
+ min_h1=None,
59
+ max_h1=None,
60
+ min_h2=None,
61
+ max_h2=None,
62
+ PBC_flag="none",
63
+ ):
64
+ """Perform Linking of features in trajectories.
65
+
66
+ The linking determines which of the features detected in a specific
67
+ timestep is most likely identical to an existing feature in the
68
+ previous timestep. For each existing feature, the movement within
69
+ a time step is extrapolated based on the velocities in a number
70
+ previous time steps. The algorithm then breaks the search process
71
+ down to a few candidate features by restricting the search to a
72
+ circular search region centered around the predicted position of
73
+ the feature in the next time step. For newly initialized trajectories,
74
+ where no velocity from previous time steps is available, the
75
+ algorithm resorts to the average velocity of the nearest tracked
76
+ objects. v_max and d_min are given as physical quantities and then
77
+ converted into pixel-based values used in trackpy. This allows for
78
+ tracking that is controlled by physically-based parameters that are
79
+ independent of the temporal and spatial resolution of the input
80
+ data. The algorithm creates a continuous track for the feature
81
+ that is the most probable based on the previous cell path.
82
+
83
+ Parameters
84
+ ----------
85
+ features : pandas.DataFrame
86
+ Detected features to be linked.
87
+
88
+ field_in : None
89
+ Input field. Not currently used; can be set to `None`.
90
+
91
+ dt : float
92
+ Time resolution of tracked features in seconds.
93
+
94
+ dxy : float
95
+ Horizontal grid spacing of the input data in meters.
96
+
97
+ dz : float
98
+ Constant vertical grid spacing (meters), optional. If not specified
99
+ and the input is 3D, this function requires that `vertical_coord` is available
100
+ in the `features` input. If you specify a value here, this function assumes
101
+ that it is the constant z spacing between points, even if ```vertical_coord```
102
+ is specified.
103
+
104
+ d_max : float, optional
105
+ Maximum search range in meters. Only one of `d_max`, `d_min`, or `v_max` can be set.
106
+ Default is None.
107
+
108
+ d_min : float, optional
109
+ Deprecated. Only one of `d_max`, `d_min`, or `v_max` can be set.
110
+ Default is None.
111
+
112
+ subnetwork_size : int, optional
113
+ Maximum size of subnetwork for linking. This parameter should be
114
+ adjusted when using adaptive search. Usually a lower value is desired
115
+ in that case. For a more in depth explanation have look
116
+ `here <https://soft-matter.github.io/trackpy/v0.5.0/tutorial/adaptive-search.html>`_
117
+ If None, 30 is used for regular search and 15 for adaptive search.
118
+ Default is None.
119
+
120
+ v_max : float, optional
121
+ Speed at which features are allowed to move in meters per second.
122
+ Only one of `d_max`, `d_min`, or `v_max` can be set.
123
+ Default is None.
124
+
125
+ memory : int, optional
126
+ Number of output timesteps features allowed to vanish for to
127
+ be still considered tracked. Default is 0.
128
+ .. warning :: This parameter should be used with caution, as it
129
+ can lead to erroneous trajectory linking,
130
+ espacially for data with low time resolution.
131
+
132
+ stubs : int, optional
133
+ Minimum number of timesteps of a tracked cell to be reported
134
+ Default is 1
135
+
136
+ time_cell_min : float, optional
137
+ Minimum length in time that a cell must be tracked for to be considered a
138
+ valid cell in seconds.
139
+ Default is None.
140
+
141
+ order : int, optional
142
+ Order of polynomial used to extrapolate trajectory into gaps and
143
+ ond start and end point.
144
+ Default is 1.
145
+
146
+ extrapolate : int, optional
147
+ Number or timesteps to extrapolate trajectories. Currently unused.
148
+ Default is 0.
149
+
150
+ method_linking : {'random', 'predict'}, optional
151
+ Flag choosing method used for trajectory linking.
152
+ Default is 'random', although we typically encourage users to use 'predict'.
153
+
154
+ adaptive_step : float, optional
155
+ Reduce search range by multiplying it by this factor. Needs to be
156
+ used in combination with adaptive_stop. Default is None.
157
+
158
+ adaptive_stop : float, optional
159
+ If not None, when encountering an oversize subnet, retry by progressively
160
+ reducing search_range by multiplying with adaptive_step until the subnet
161
+ is solvable. If search_range becomes <= adaptive_stop, give up and raise
162
+ a SubnetOversizeException. Needs to be used in combination with
163
+ adaptive_step. Default is None.
164
+
165
+ cell_number_start : int, optional
166
+ Cell number for first tracked cell.
167
+ Default is 1
168
+
169
+ cell_number_unassigned: int
170
+ Number to set the unassigned/non-tracked cells to. Note that if you set this
171
+ to `np.nan`, the data type of 'cell' will change to float.
172
+ Default is -1
173
+
174
+ vertical_coord: str
175
+ Name of the vertical coordinate. The vertical coordinate used
176
+ must be meters. If None, tries to auto-detect.
177
+ It looks for the coordinate or the dimension name corresponding
178
+ to the string. To use `dz`, set this to `None`.
179
+
180
+ min_h1: int
181
+ Minimum hdim_1 value, required when PBC_flag is 'hdim_1' or 'both'
182
+
183
+ max_h1: int
184
+ Maximum hdim_1 value, required when PBC_flag is 'hdim_1' or 'both'
185
+
186
+ min_h2: int
187
+ Minimum hdim_2 value, required when PBC_flag is 'hdim_2' or 'both'
188
+
189
+ max_h2: int
190
+ Maximum hdim_2 value, required when PBC_flag is 'hdim_2' or 'both'
191
+
192
+ PBC_flag : str('none', 'hdim_1', 'hdim_2', 'both')
193
+ Sets whether to use periodic boundaries, and if so in which directions.
194
+ 'none' means that we do not have periodic boundaries
195
+ 'hdim_1' means that we are periodic along hdim1
196
+ 'hdim_2' means that we are periodic along hdim2
197
+ 'both' means that we are periodic along both horizontal dimensions
198
+
199
+ Returns
200
+ -------
201
+ trajectories_final : pandas.DataFrame
202
+ Dataframe of the linked features, containing the variable 'cell',
203
+ with integers indicating the affiliation of a feature to a specific
204
+ track, and the variable 'time_cell' with the time the cell has
205
+ already existed.
206
+
207
+ Raises
208
+ ------
209
+ ValueError
210
+ If method_linking is neither 'random' nor 'predict'.
211
+ """
212
+
213
+ if extrapolate != 0:
214
+ raise NotImplementedError(
215
+ "Extrapolation is not yet implemented. Set this parameter to 0 to continue."
216
+ )
217
+
218
+ # from trackpy import link_df
219
+ # from trackpy import link_df
220
+
221
+ # from trackpy import filter_stubs
222
+ # from .utils import add_coordinates
223
+
224
+ if (v_max is None) and (d_min is None) and (d_max is None):
225
+ raise ValueError(
226
+ "Neither d_max nor v_max has been provided. Either one of these arguments must be specified."
227
+ )
228
+
229
+ # calculate search range based on timestep and grid spacing
230
+ if v_max is not None:
231
+ search_range = dt * v_max / dxy
232
+
233
+ # calculate search range based on timestep and grid spacing
234
+ if d_max is not None:
235
+ if v_max is not None:
236
+ raise ValueError(
237
+ "Multiple parameter inputs for v_max, d_max or d_min have been provided. Only use one of these parameters as they supercede each other leading to unexpected behaviour"
238
+ )
239
+ search_range = d_max / dxy
240
+
241
+ # calculate search range based on timestep and grid spacing
242
+ if d_min is not None:
243
+ if (v_max is not None) or (d_max is not None):
244
+ raise ValueError(
245
+ "Multiple parameter inputs for v_max, d_max or d_min have been provided. Only use one of these parameters as they supercede each other leading to unexpected behaviour"
246
+ )
247
+ search_range = d_min / dxy
248
+ warnings.warn(
249
+ "d_min parameter will be deprecated in a future version of tobac. Please use d_max instead",
250
+ FutureWarning,
251
+ )
252
+ # Check if we are 3D.
253
+ if "vdim" in features:
254
+ is_3D = True
255
+ if dz is not None and vertical_coord is not None:
256
+ raise ValueError(
257
+ "dz and vertical_coord both set, vertical"
258
+ " spacing is ambiguous. Set one to None."
259
+ )
260
+ if dz is None and vertical_coord is None:
261
+ raise ValueError(
262
+ "Neither dz nor vertical_coord are set. One" " must be set."
263
+ )
264
+ if vertical_coord is not None:
265
+ found_vertical_coord = internal_utils.find_dataframe_vertical_coord(
266
+ variable_dataframe=features, vertical_coord=vertical_coord
267
+ )
268
+ else:
269
+ is_3D = False
270
+
271
+ # make sure that we have min and max for h1 and h2 if we are PBC
272
+ if PBC_flag in ["hdim_1", "both"] and (min_h1 is None or max_h1 is None):
273
+ raise ValueError("For PBC tracking, must set min and max coordinates.")
274
+
275
+ if PBC_flag in ["hdim_2", "both"] and (min_h2 is None or max_h2 is None):
276
+ raise ValueError("For PBC tracking, must set min and max coordinates.")
277
+
278
+ # in case of adaptive search, check wether both parameters are specified
279
+ if adaptive_stop is not None:
280
+ if adaptive_step is None:
281
+ raise ValueError(
282
+ "Adaptive search requires values for adaptive_step and adaptive_stop. Please specify adaptive_step."
283
+ )
284
+
285
+ if adaptive_step is not None:
286
+ if adaptive_stop is None:
287
+ raise ValueError(
288
+ "Adaptive search requires values for adaptive_step and adaptive_stop. Please specify adaptive_stop."
289
+ )
290
+
291
+ if time_cell_min:
292
+ stubs = np.floor(time_cell_min / dt) + 1
293
+
294
+ logging.debug("stubs: " + str(stubs))
295
+
296
+ logging.debug("start linking features into trajectories")
297
+
298
+ # If subnetwork size given, set maximum subnet size
299
+ if subnetwork_size is not None:
300
+ # Choose the right parameter depending on the use of adaptive search, save previously set values
301
+ if adaptive_step is None and adaptive_stop is None:
302
+ size_cache = tp.linking.Linker.MAX_SUB_NET_SIZE
303
+ tp.linking.Linker.MAX_SUB_NET_SIZE = subnetwork_size
304
+ else:
305
+ size_cache = tp.linking.Linker.MAX_SUB_NET_SIZE_ADAPTIVE
306
+ tp.linking.Linker.MAX_SUB_NET_SIZE_ADAPTIVE = subnetwork_size
307
+
308
+ # deep copy to preserve features field:
309
+ features_linking = copy.deepcopy(features)
310
+
311
+ # check if we are 3D or not
312
+ if is_3D:
313
+ # If we are 3D, we need to convert the vertical
314
+ # coordinates so that 1 unit is equal to dxy.
315
+
316
+ if dz is not None:
317
+ features_linking["vdim_adj"] = features_linking["vdim"] * dz / dxy
318
+ else:
319
+ features_linking["vdim_adj"] = features_linking[found_vertical_coord] / dxy
320
+
321
+ pos_columns_tp = ["vdim_adj", "hdim_1", "hdim_2"]
322
+
323
+ else:
324
+ pos_columns_tp = ["hdim_1", "hdim_2"]
325
+
326
+ # Check if we have PBCs.
327
+ if PBC_flag in ["hdim_1", "hdim_2", "both"]:
328
+ # Per the trackpy docs, to specify a custom distance function
329
+ # which we need for PBCs, neighbor_strategy must be 'BTree'.
330
+ # I think this shouldn't change results, but it will degrade performance.
331
+ neighbor_strategy = "BTree"
332
+ dist_func = pbc_utils.build_distance_function(
333
+ min_h1, max_h1, min_h2, max_h2, PBC_flag, is_3D
334
+ )
335
+
336
+ else:
337
+ neighbor_strategy = "KDTree"
338
+ dist_func = None
339
+
340
+ if method_linking == "random":
341
+ # link features into trajectories:
342
+ trajectories_unfiltered = tp.link(
343
+ features_linking,
344
+ search_range=search_range,
345
+ memory=memory,
346
+ t_column="frame",
347
+ pos_columns=pos_columns_tp,
348
+ adaptive_step=adaptive_step,
349
+ adaptive_stop=adaptive_stop,
350
+ neighbor_strategy=neighbor_strategy,
351
+ link_strategy="auto",
352
+ dist_func=dist_func,
353
+ )
354
+ elif method_linking == "predict":
355
+ if is_3D and pkgvsn.parse(tp.__version__) < pkgvsn.parse("0.6.0"):
356
+ raise ValueError(
357
+ "3D Predictive Tracking Only Supported with trackpy versions newer than 0.6.0."
358
+ )
359
+
360
+ # avoid setting pos_columns by renaming to default values to avoid trackpy bug
361
+ features_linking.rename(
362
+ columns={
363
+ "y": "__temp_y_coord",
364
+ "x": "__temp_x_coord",
365
+ "z": "__temp_z_coord",
366
+ },
367
+ inplace=True,
368
+ )
369
+
370
+ features_linking.rename(
371
+ columns={"hdim_1": "y", "hdim_2": "x", "vdim_adj": "z"}, inplace=True
372
+ )
373
+
374
+ # generate list of features as input for df_link_iter to avoid bug in df_link
375
+ features_linking_list = [
376
+ frame for i, frame in features_linking.groupby("frame", sort=True)
377
+ ]
378
+
379
+ pred = tp.predict.NearestVelocityPredict(span=1)
380
+ trajectories_unfiltered = pred.link_df_iter(
381
+ features_linking_list,
382
+ search_range=search_range,
383
+ memory=memory,
384
+ # pos_columns=["hdim_1", "hdim_2"], # not working atm
385
+ t_column="frame",
386
+ neighbor_strategy=neighbor_strategy,
387
+ link_strategy="auto",
388
+ adaptive_step=adaptive_step,
389
+ adaptive_stop=adaptive_stop,
390
+ dist_func=dist_func,
391
+ # copy_features=False, diagnostics=False,
392
+ # hash_size=None, box_size=None, verify_integrity=True,
393
+ # retain_index=False
394
+ )
395
+ # recreate a single dataframe from the list
396
+
397
+ trajectories_unfiltered = pd.concat(trajectories_unfiltered)
398
+
399
+ # change to column names back
400
+ trajectories_unfiltered.rename(
401
+ columns={"y": "hdim_1", "x": "hdim_2", "z": "vdim_adj"}, inplace=True
402
+ )
403
+ trajectories_unfiltered.rename(
404
+ columns={
405
+ "__temp_y_coord": "y",
406
+ "__temp_x_coord": "x",
407
+ "__temp_z_coord": "z",
408
+ },
409
+ inplace=True,
410
+ )
411
+
412
+ else:
413
+ raise ValueError("method_linking unknown")
414
+
415
+ # Reset trackpy parameters to previously set values
416
+ if subnetwork_size is not None:
417
+ if adaptive_step is None and adaptive_stop is None:
418
+ tp.linking.Linker.MAX_SUB_NET_SIZE = size_cache
419
+ else:
420
+ tp.linking.Linker.MAX_SUB_NET_SIZE_ADAPTIVE = size_cache
421
+
422
+ # Filter trajectories to exclude short trajectories that are likely to be spurious
423
+ # trajectories_filtered = filter_stubs(trajectories_unfiltered,threshold=stubs)
424
+ # trajectories_filtered=trajectories_filtered.reset_index(drop=True)
425
+
426
+ # clean up our temporary filters
427
+ if is_3D:
428
+ trajectories_unfiltered = trajectories_unfiltered.drop("vdim_adj", axis=1)
429
+
430
+ # Reset particle numbers from the arbitray numbers at the end of the feature detection and linking to consecutive cell numbers
431
+ # keep 'particle' for reference to the feature detection step.
432
+ trajectories_unfiltered["cell"] = None
433
+ particle_num_to_cell_num = dict()
434
+ for i_particle, particle in enumerate(
435
+ pd.Series.unique(trajectories_unfiltered["particle"])
436
+ ):
437
+ cell = int(i_particle + cell_number_start)
438
+ particle_num_to_cell_num[particle] = int(cell)
439
+ remap_particle_to_cell_vec = np.vectorize(remap_particle_to_cell_nv)
440
+ trajectories_unfiltered["cell"] = remap_particle_to_cell_vec(
441
+ particle_num_to_cell_num, trajectories_unfiltered["particle"]
442
+ )
443
+ trajectories_unfiltered["cell"] = trajectories_unfiltered["cell"].astype(int)
444
+ trajectories_unfiltered.drop(columns=["particle"], inplace=True)
445
+
446
+ trajectories_bycell = trajectories_unfiltered.groupby("cell")
447
+ stub_cell_nums = list()
448
+ for cell, trajectories_cell in trajectories_bycell:
449
+ # logging.debug("cell: "+str(cell))
450
+ # logging.debug("feature: "+str(trajectories_cell['feature'].values))
451
+ # logging.debug("trajectories_cell.shape[0]: "+ str(trajectories_cell.shape[0]))
452
+
453
+ if trajectories_cell.shape[0] < stubs:
454
+ logging.debug(
455
+ "cell"
456
+ + str(cell)
457
+ + " is a stub ("
458
+ + str(trajectories_cell.shape[0])
459
+ + "), setting cell number to "
460
+ + str(cell_number_unassigned)
461
+ )
462
+ stub_cell_nums.append(cell)
463
+
464
+ trajectories_unfiltered.loc[
465
+ trajectories_unfiltered["cell"].isin(stub_cell_nums), "cell"
466
+ ] = cell_number_unassigned
467
+
468
+ trajectories_filtered = trajectories_unfiltered
469
+
470
+ # Interpolate to fill the gaps in the trajectories (left from allowing memory in the linking)
471
+ trajectories_filtered_unfilled = copy.deepcopy(trajectories_filtered)
472
+
473
+ # trajectories_filtered_filled=fill_gaps(trajectories_filtered_unfilled,order=order,
474
+ # extrapolate=extrapolate,frame_max=field_in.shape[0]-1,
475
+ # hdim_1_max=field_in.shape[1],hdim_2_max=field_in.shape[2])
476
+ # add coorinates from input fields to output trajectories (time,dimensions)
477
+ # logging.debug('start adding coordinates to trajectories')
478
+ # trajectories_filtered_filled=add_coordinates(trajectories_filtered_filled,field_in)
479
+ # add time coordinate relative to cell initiation:
480
+ # logging.debug('start adding cell time to trajectories')
481
+ trajectories_filtered_filled = trajectories_filtered_unfilled
482
+ trajectories_final = add_cell_time(
483
+ trajectories_filtered_filled, cell_number_unassigned=cell_number_unassigned
484
+ )
485
+ # Add metadata
486
+ trajectories_final.attrs["cell_number_unassigned"] = cell_number_unassigned
487
+
488
+ # add coordinate to raw features identified:
489
+ logging.debug("start adding coordinates to detected features")
490
+ logging.debug("feature linking completed")
491
+
492
+ return trajectories_final
493
+
494
+
495
+ def fill_gaps(
496
+ t, order=1, extrapolate=0, frame_max=None, hdim_1_max=None, hdim_2_max=None
497
+ ):
498
+ """Add cell time as time since the initiation of each cell.
499
+ :hidden:
500
+ Parameters
501
+ ----------
502
+ t : pandas.DataFrame
503
+ Trajectories from trackpy.
504
+
505
+ order : int, optional
506
+ Order of polynomial used to extrapolate trajectory into
507
+ gaps and beyond start and end point. Default is 1.
508
+
509
+ extrapolate : int, optional
510
+ Number or timesteps to extrapolate trajectories. Default is 0.
511
+
512
+ frame_max : int, optional
513
+ Size of input data along time axis. Default is None.
514
+
515
+ hdim_1_max, hdim2_max : int, optional
516
+ Size of input data along first and second horizontal axis.
517
+ Default is None.
518
+
519
+ Returns
520
+ -------
521
+ t : pandas.DataFrame
522
+ Trajectories from trackpy with with filled gaps and potentially
523
+ extrapolated.
524
+ """
525
+
526
+ from scipy.interpolate import InterpolatedUnivariateSpline
527
+
528
+ logging.debug("start filling gaps")
529
+
530
+ t_list = [] # empty list to store interpolated DataFrames
531
+
532
+ # group by cell number and perform process for each cell individually:
533
+ t_grouped = t.groupby("cell")
534
+ for cell, track in t_grouped:
535
+ # Setup interpolator from existing points (of order given as keyword)
536
+ frame_in = track["frame"].values
537
+ hdim_1_in = track["hdim_1"].values
538
+ hdim_2_in = track["hdim_2"].values
539
+ s_x = InterpolatedUnivariateSpline(frame_in, hdim_1_in, k=order)
540
+ s_y = InterpolatedUnivariateSpline(frame_in, hdim_2_in, k=order)
541
+
542
+ # Create new index filling in gaps and possibly extrapolating:
543
+ index_min = min(frame_in) - extrapolate
544
+ index_min = max(index_min, 0)
545
+ index_max = max(frame_in) + extrapolate
546
+ index_max = min(index_max, frame_max)
547
+ new_index = range(index_min, index_max + 1) # +1 here to include last value
548
+ track = track.reindex(new_index)
549
+
550
+ # Interpolate to extended index:
551
+ frame_out = new_index
552
+ hdim_1_out = s_x(frame_out)
553
+ hdim_2_out = s_y(frame_out)
554
+
555
+ # Replace fields in data frame with
556
+ track["frame"] = new_index
557
+ track["hdim_1"] = hdim_1_out
558
+ track["hdim_2"] = hdim_2_out
559
+ track["cell"] = cell
560
+
561
+ # Append DataFrame to list of DataFrames
562
+ t_list.append(track)
563
+ # Concatenate interpolated trajectories into one DataFrame:
564
+ t_out = pd.concat(t_list)
565
+ # Restrict output trajectories to input data in time and space:
566
+ t_out = t_out.loc[
567
+ (t_out["hdim_1"] < hdim_1_max)
568
+ & (t_out["hdim_2"] < hdim_2_max)
569
+ & (t_out["hdim_1"] > 0)
570
+ & (t_out["hdim_2"] > 0)
571
+ ]
572
+ t_out = t_out.reset_index(drop=True)
573
+ return t_out
574
+
575
+
576
+ def add_cell_time(t: pd.DataFrame, cell_number_unassigned: int):
577
+ """add cell time as time since the initiation of each cell
578
+ :hidden:
579
+ Parameters
580
+ ----------
581
+ t : pandas.DataFrame
582
+ trajectories with added coordinates
583
+ cell_number_unassigned: int
584
+ unassigned cell value
585
+
586
+ Returns
587
+ -------
588
+ t : pandas.Dataframe
589
+ trajectories with added cell time
590
+ """
591
+
592
+ # logging.debug('start adding time relative to cell initiation')
593
+ t_grouped = t.groupby("cell")
594
+
595
+ t["time_cell"] = t["time"] - t.groupby("cell")["time"].transform("min")
596
+ t["time_cell"] = pd.to_timedelta(t["time_cell"])
597
+ t.loc[t["cell"] == cell_number_unassigned, "time_cell"] = pd.Timedelta("nat")
598
+ return t
599
+
600
+
601
+ def remap_particle_to_cell_nv(particle_cell_map, input_particle):
602
+ """Remaps the particles to new cells given an input map and the current particle.
603
+ Helper function that is designed to be vectorized with np.vectorize
604
+ :hidden:
605
+ Parameters
606
+ ----------
607
+ particle_cell_map: dict-like
608
+ The dictionary mapping particle number to cell number
609
+ input_particle: key for particle_cell_map
610
+ The particle number to remap
611
+
612
+ """
613
+ return particle_cell_map[input_particle]
@@ -0,0 +1,27 @@
1
+ from .general import (
2
+ add_coordinates,
3
+ add_coordinates_3D,
4
+ get_spacings,
5
+ get_bounding_box,
6
+ combine_tobac_feats,
7
+ combine_feature_dataframes,
8
+ transform_feature_points,
9
+ standardize_track_dataset,
10
+ spectral_filtering,
11
+ )
12
+
13
+ from .mask import (
14
+ mask_cell,
15
+ mask_cell_surface,
16
+ mask_cube_cell,
17
+ mask_cube_untracked,
18
+ mask_cube,
19
+ column_mask_from2D,
20
+ mask_features,
21
+ mask_features_surface,
22
+ mask_cube_features,
23
+ )
24
+
25
+ from .internal import get_label_props_in_dict, get_indices_of_labels_from_reg_prop_dict
26
+
27
+ from .bulk_statistics import get_statistics, get_statistics_from_mask