tobac 1.6.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. tobac/__init__.py +112 -0
  2. tobac/analysis/__init__.py +31 -0
  3. tobac/analysis/cell_analysis.py +628 -0
  4. tobac/analysis/feature_analysis.py +212 -0
  5. tobac/analysis/spatial.py +619 -0
  6. tobac/centerofgravity.py +226 -0
  7. tobac/feature_detection.py +1758 -0
  8. tobac/merge_split.py +324 -0
  9. tobac/plotting.py +2321 -0
  10. tobac/segmentation/__init__.py +10 -0
  11. tobac/segmentation/watershed_segmentation.py +1316 -0
  12. tobac/testing.py +1179 -0
  13. tobac/tests/segmentation_tests/test_iris_xarray_segmentation.py +0 -0
  14. tobac/tests/segmentation_tests/test_segmentation.py +1183 -0
  15. tobac/tests/segmentation_tests/test_segmentation_time_pad.py +104 -0
  16. tobac/tests/test_analysis_spatial.py +1109 -0
  17. tobac/tests/test_convert.py +265 -0
  18. tobac/tests/test_datetime.py +216 -0
  19. tobac/tests/test_decorators.py +148 -0
  20. tobac/tests/test_feature_detection.py +1321 -0
  21. tobac/tests/test_generators.py +273 -0
  22. tobac/tests/test_import.py +24 -0
  23. tobac/tests/test_iris_xarray_match_utils.py +244 -0
  24. tobac/tests/test_merge_split.py +351 -0
  25. tobac/tests/test_pbc_utils.py +497 -0
  26. tobac/tests/test_sample_data.py +197 -0
  27. tobac/tests/test_testing.py +747 -0
  28. tobac/tests/test_tracking.py +714 -0
  29. tobac/tests/test_utils.py +650 -0
  30. tobac/tests/test_utils_bulk_statistics.py +789 -0
  31. tobac/tests/test_utils_coordinates.py +328 -0
  32. tobac/tests/test_utils_internal.py +97 -0
  33. tobac/tests/test_xarray_utils.py +232 -0
  34. tobac/tracking.py +613 -0
  35. tobac/utils/__init__.py +27 -0
  36. tobac/utils/bulk_statistics.py +360 -0
  37. tobac/utils/datetime.py +184 -0
  38. tobac/utils/decorators.py +540 -0
  39. tobac/utils/general.py +753 -0
  40. tobac/utils/generators.py +87 -0
  41. tobac/utils/internal/__init__.py +2 -0
  42. tobac/utils/internal/coordinates.py +430 -0
  43. tobac/utils/internal/iris_utils.py +462 -0
  44. tobac/utils/internal/label_props.py +82 -0
  45. tobac/utils/internal/xarray_utils.py +439 -0
  46. tobac/utils/mask.py +364 -0
  47. tobac/utils/periodic_boundaries.py +419 -0
  48. tobac/wrapper.py +244 -0
  49. tobac-1.6.2.dist-info/METADATA +154 -0
  50. tobac-1.6.2.dist-info/RECORD +53 -0
  51. tobac-1.6.2.dist-info/WHEEL +5 -0
  52. tobac-1.6.2.dist-info/licenses/LICENSE +29 -0
  53. tobac-1.6.2.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1183 @@
1
+ import pytest
2
+ import tobac.segmentation as seg
3
+ import numpy as np
4
+ from tobac import segmentation, feature_detection, testing
5
+ from tobac.utils import periodic_boundaries as pbc_utils
6
+
7
+
8
+ def test_segmentation_timestep_2D_feature_2D_seg():
9
+ """Tests `tobac.segmentation.segmentation_timestep` with a 2D
10
+ input feature and a 2D segmentation array
11
+ """
12
+ # Before we can run segmentation, we must run feature detection.
13
+
14
+ # start by building a simple dataset with a single feature
15
+
16
+ test_dset_size = (50, 50)
17
+ test_hdim_1_pt = 20.0
18
+ test_hdim_2_pt = 20.0
19
+ test_hdim_1_sz = 5
20
+ test_hdim_2_sz = 5
21
+ test_dxy = 1000
22
+ hdim_1_start_feat = int(np.ceil(test_hdim_1_pt - test_hdim_1_sz / 2))
23
+ hdim_1_end_feat = int(np.ceil(test_hdim_1_pt + test_hdim_1_sz / 2))
24
+ hdim_2_start_feat = int(np.ceil(test_hdim_2_pt - test_hdim_2_sz / 2))
25
+ hdim_2_end_feat = int(np.ceil(test_hdim_2_pt + test_hdim_2_sz / 2))
26
+
27
+ test_amp = 2
28
+
29
+ test_data = np.zeros(test_dset_size)
30
+ test_data = testing.make_feature_blob(
31
+ test_data,
32
+ test_hdim_1_pt,
33
+ test_hdim_2_pt,
34
+ h1_size=test_hdim_1_sz,
35
+ h2_size=test_hdim_2_sz,
36
+ amplitude=test_amp,
37
+ )
38
+ test_data_iris = testing.make_dataset_from_arr(test_data, data_type="iris")
39
+ # Generate dummy feature dataset
40
+ test_feature_ds = testing.generate_single_feature(
41
+ start_h1=20.0, start_h2=20.0, max_h1=1000, max_h2=1000
42
+ )
43
+
44
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
45
+ field_in=test_data_iris,
46
+ features_in=test_feature_ds,
47
+ dxy=test_dxy,
48
+ threshold=1.5,
49
+ PBC_flag="none",
50
+ )
51
+
52
+ # Make sure that all labeled points are segmented
53
+ assert np.all(
54
+ out_seg_mask.core_data()[
55
+ hdim_1_start_feat:hdim_1_end_feat, hdim_2_start_feat:hdim_2_end_feat
56
+ ]
57
+ == np.ones((test_hdim_1_sz, test_hdim_2_sz))
58
+ )
59
+
60
+ # Now try PBCs
61
+ # First, something stretching across hdim_1
62
+ test_hdim_1_pt = 0.0
63
+ test_data = np.zeros(test_dset_size)
64
+
65
+ # Note that PBC flag here is 'both' as we still want the blob to be on both
66
+ # sides of the boundary to see if we accidentally grab it without PBC
67
+ # segmentation
68
+ test_data = testing.make_feature_blob(
69
+ test_data,
70
+ test_hdim_1_pt,
71
+ test_hdim_2_pt,
72
+ h1_size=test_hdim_1_sz,
73
+ h2_size=test_hdim_2_sz,
74
+ amplitude=test_amp,
75
+ PBC_flag="both",
76
+ )
77
+
78
+ test_data_iris = testing.make_dataset_from_arr(test_data, data_type="iris")
79
+ # Generate dummy feature dataset
80
+ test_feature_ds = testing.generate_single_feature(
81
+ start_h1=test_hdim_1_pt, start_h2=test_hdim_2_pt, max_h1=1000, max_h2=1000
82
+ )
83
+
84
+ hdim_1_start_feat, hdim_1_end_feat = testing.get_start_end_of_feat(
85
+ test_hdim_1_pt, test_hdim_1_sz, 0, test_dset_size[0], is_pbc=True
86
+ )
87
+
88
+ for pbc_option in ["none", "hdim_1", "hdim_2", "both"]:
89
+ out_seg_mask, out_df = seg.segmentation_timestep(
90
+ field_in=test_data_iris,
91
+ features_in=test_feature_ds,
92
+ dxy=test_dxy,
93
+ threshold=test_amp - 0.5,
94
+ PBC_flag=pbc_option,
95
+ )
96
+ # This will automatically give the appropriate box, and it's tested separately.
97
+ segmented_box_expected = pbc_utils.get_pbc_coordinates(
98
+ 0,
99
+ test_dset_size[0],
100
+ 0,
101
+ test_dset_size[1],
102
+ hdim_1_start_feat,
103
+ hdim_1_end_feat,
104
+ hdim_2_start_feat,
105
+ hdim_2_end_feat,
106
+ PBC_flag=pbc_option,
107
+ )
108
+ # Make sure that all labeled points are segmented
109
+ for seg_box in segmented_box_expected:
110
+ assert np.all(
111
+ out_seg_mask.core_data()[
112
+ seg_box[0] : seg_box[1], seg_box[2] : seg_box[3]
113
+ ]
114
+ == np.ones((seg_box[1] - seg_box[0], seg_box[3] - seg_box[2]))
115
+ )
116
+
117
+ if pbc_option in ["none", "hdim_2"]:
118
+ # there will only be one seg_box
119
+ assert np.sum(
120
+ out_seg_mask.core_data()[out_seg_mask.core_data() == 1]
121
+ ) == np.sum(np.ones((seg_box[1] - seg_box[0], seg_box[3] - seg_box[2])))
122
+ else:
123
+ # We should be capturing the whole feature
124
+ assert np.sum(
125
+ out_seg_mask.core_data()[out_seg_mask.core_data() == 1]
126
+ ) == np.sum(np.ones((test_hdim_1_sz, test_hdim_2_sz)))
127
+
128
+ # Same as the above test, but for hdim_2
129
+ # First, try the cases where we shouldn't get the points on the opposite
130
+ # hdim_2 side
131
+ test_hdim_1_pt = 20.0
132
+ test_hdim_2_pt = 0.0
133
+ test_data = np.zeros(test_dset_size)
134
+ test_data = testing.make_feature_blob(
135
+ test_data,
136
+ test_hdim_1_pt,
137
+ test_hdim_2_pt,
138
+ h1_size=test_hdim_1_sz,
139
+ h2_size=test_hdim_2_sz,
140
+ amplitude=test_amp,
141
+ PBC_flag="both",
142
+ )
143
+ test_data_iris = testing.make_dataset_from_arr(test_data, data_type="iris")
144
+ # Generate dummy feature dataset
145
+ test_feature_ds = testing.generate_single_feature(
146
+ start_h1=test_hdim_1_pt, start_h2=test_hdim_2_pt, max_h1=1000, max_h2=1000
147
+ )
148
+ hdim_1_start_feat, hdim_1_end_feat = testing.get_start_end_of_feat(
149
+ test_hdim_1_pt, test_hdim_1_sz, 0, test_dset_size[0], is_pbc=True
150
+ )
151
+
152
+ hdim_2_start_feat, hdim_2_end_feat = testing.get_start_end_of_feat(
153
+ test_hdim_2_pt, test_hdim_2_sz, 0, test_dset_size[1], is_pbc=True
154
+ )
155
+
156
+ for pbc_option in ["none", "hdim_1", "hdim_2", "both"]:
157
+ out_seg_mask, out_df = seg.segmentation_timestep(
158
+ field_in=test_data_iris,
159
+ features_in=test_feature_ds,
160
+ dxy=test_dxy,
161
+ threshold=test_amp - 0.5,
162
+ PBC_flag=pbc_option,
163
+ )
164
+ # This will automatically give the appropriate box(es), and it's tested separately.
165
+ segmented_box_expected = pbc_utils.get_pbc_coordinates(
166
+ 0,
167
+ test_dset_size[0],
168
+ 0,
169
+ test_dset_size[1],
170
+ hdim_1_start_feat,
171
+ hdim_1_end_feat,
172
+ hdim_2_start_feat,
173
+ hdim_2_end_feat,
174
+ PBC_flag=pbc_option,
175
+ )
176
+ # Make sure that all labeled points are segmented
177
+ for seg_box in segmented_box_expected:
178
+ assert np.all(
179
+ out_seg_mask.core_data()[
180
+ seg_box[0] : seg_box[1], seg_box[2] : seg_box[3]
181
+ ]
182
+ == np.ones((seg_box[1] - seg_box[0], seg_box[3] - seg_box[2]))
183
+ )
184
+
185
+ if pbc_option in ["none", "hdim_1"]:
186
+ # there will only be one seg_box
187
+ assert np.sum(
188
+ out_seg_mask.core_data()[out_seg_mask.core_data() == 1]
189
+ ) == np.sum(np.ones((seg_box[1] - seg_box[0], seg_box[3] - seg_box[2])))
190
+ else:
191
+ # We should be capturing the whole feature
192
+ assert np.sum(
193
+ out_seg_mask.core_data()[out_seg_mask.core_data() == 1]
194
+ ) == np.sum(np.ones((test_hdim_1_sz, test_hdim_2_sz)))
195
+
196
+ # Same as the above test, but for hdim_2
197
+ # First, try the cases where we shouldn't get the points on the opposite
198
+ # both sides (corner point)
199
+ test_hdim_1_pt = 0.0
200
+ test_hdim_2_pt = 0.0
201
+ test_data = np.zeros(test_dset_size)
202
+ test_data = testing.make_feature_blob(
203
+ test_data,
204
+ test_hdim_1_pt,
205
+ test_hdim_2_pt,
206
+ h1_size=test_hdim_1_sz,
207
+ h2_size=test_hdim_2_sz,
208
+ amplitude=test_amp,
209
+ PBC_flag="both",
210
+ )
211
+ test_data_iris = testing.make_dataset_from_arr(test_data, data_type="iris")
212
+ # Generate dummy feature dataset
213
+ test_feature_ds = testing.generate_single_feature(
214
+ start_h1=test_hdim_1_pt, start_h2=test_hdim_2_pt, max_h1=1000, max_h2=1000
215
+ )
216
+ hdim_1_start_feat, hdim_1_end_feat = testing.get_start_end_of_feat(
217
+ test_hdim_1_pt, test_hdim_1_sz, 0, test_dset_size[0], is_pbc=True
218
+ )
219
+
220
+ hdim_2_start_feat, hdim_2_end_feat = testing.get_start_end_of_feat(
221
+ test_hdim_2_pt, test_hdim_2_sz, 0, test_dset_size[1], is_pbc=True
222
+ )
223
+
224
+ for pbc_option in ["none", "hdim_1", "hdim_2", "both"]:
225
+ out_seg_mask, out_df = seg.segmentation_timestep(
226
+ field_in=test_data_iris,
227
+ features_in=test_feature_ds,
228
+ dxy=test_dxy,
229
+ threshold=test_amp - 0.5,
230
+ PBC_flag=pbc_option,
231
+ )
232
+ # This will automatically give the appropriate box(es), and it's tested separately.
233
+ segmented_box_expected = pbc_utils.get_pbc_coordinates(
234
+ 0,
235
+ test_dset_size[0],
236
+ 0,
237
+ test_dset_size[1],
238
+ hdim_1_start_feat,
239
+ hdim_1_end_feat,
240
+ hdim_2_start_feat,
241
+ hdim_2_end_feat,
242
+ PBC_flag=pbc_option,
243
+ )
244
+ # Make sure that all labeled points are segmented
245
+ for seg_box in segmented_box_expected:
246
+ print(pbc_option, seg_box)
247
+ # TODO: something is wrong with this case, unclear what.
248
+ assert np.all(
249
+ out_seg_mask.core_data()[
250
+ seg_box[0] : seg_box[1], seg_box[2] : seg_box[3]
251
+ ]
252
+ == np.ones((seg_box[1] - seg_box[0], seg_box[3] - seg_box[2]))
253
+ )
254
+
255
+ # TODO: Make sure for none, hdim_1, hdim_2 that only the appropriate points are segmented
256
+
257
+
258
+ def test_segmentation_timestep_level():
259
+ """Tests `tobac.segmentation.segmentation_timestep` with a 2D
260
+ input feature and a 3D segmentation array, specifying the `level` parameter.
261
+ """
262
+ # Before we can run segmentation, we must run feature detection.
263
+
264
+ # start by building a simple dataset with a single feature
265
+
266
+ test_dset_size = (20, 50, 50)
267
+ test_hdim_1_pt = 20.0
268
+ test_hdim_2_pt = 20.0
269
+ test_vdim_pt = 2
270
+ test_hdim_1_sz = 5
271
+ test_hdim_2_sz = 5
272
+ test_vdim_sz = 3
273
+ test_dxy = 1000
274
+
275
+ vdim_start_feat = int(np.ceil(test_vdim_pt - test_vdim_sz / 2))
276
+ vdim_end_feat = int(np.ceil(test_vdim_pt + test_vdim_sz / 2))
277
+ hdim_1_start_feat = int(np.ceil(test_hdim_1_pt - test_hdim_1_sz / 2))
278
+ hdim_1_end_feat = int(np.ceil(test_hdim_1_pt + test_hdim_1_sz / 2))
279
+ hdim_2_start_feat = int(np.ceil(test_hdim_2_pt - test_hdim_2_sz / 2))
280
+ hdim_2_end_feat = int(np.ceil(test_hdim_2_pt + test_hdim_2_sz / 2))
281
+
282
+ test_amp = 2
283
+
284
+ test_data = np.zeros(test_dset_size)
285
+ test_data = testing.make_feature_blob(
286
+ test_data,
287
+ test_hdim_1_pt,
288
+ test_hdim_2_pt,
289
+ test_vdim_pt,
290
+ h1_size=test_hdim_1_sz,
291
+ h2_size=test_hdim_2_sz,
292
+ v_size=test_vdim_sz,
293
+ amplitude=test_amp,
294
+ )
295
+
296
+ # Make a second feature, above the first.
297
+
298
+ delta_height = 8
299
+ test_data = testing.make_feature_blob(
300
+ test_data,
301
+ test_hdim_1_pt,
302
+ test_hdim_2_pt,
303
+ test_vdim_pt + delta_height,
304
+ h1_size=test_hdim_1_sz,
305
+ h2_size=test_hdim_2_sz,
306
+ v_size=test_vdim_sz,
307
+ amplitude=test_amp,
308
+ )
309
+
310
+ test_data_iris = testing.make_dataset_from_arr(
311
+ test_data, data_type="iris", z_dim_num=0, y_dim_num=1, x_dim_num=2
312
+ )
313
+ # Generate dummy feature dataset
314
+ test_feature_ds = testing.generate_single_feature(
315
+ start_h1=20.0, start_h2=20.0, max_h1=1000, max_h2=1000
316
+ )
317
+
318
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
319
+ field_in=test_data_iris,
320
+ features_in=test_feature_ds,
321
+ dxy=test_dxy,
322
+ threshold=1.5,
323
+ seed_3D_flag="column",
324
+ )
325
+ out_seg_mask_arr = out_seg_mask.core_data()
326
+ # Make sure that all labeled points are segmented, before setting specific levels
327
+ assert np.all(
328
+ out_seg_mask_arr[
329
+ vdim_start_feat:vdim_end_feat,
330
+ hdim_1_start_feat:hdim_1_end_feat,
331
+ hdim_2_start_feat:hdim_2_end_feat,
332
+ ]
333
+ == np.ones((test_vdim_sz, test_hdim_1_sz, test_hdim_2_sz))
334
+ )
335
+ assert np.all(
336
+ out_seg_mask_arr[
337
+ vdim_start_feat + delta_height : vdim_end_feat + delta_height,
338
+ hdim_1_start_feat:hdim_1_end_feat,
339
+ hdim_2_start_feat:hdim_2_end_feat,
340
+ ]
341
+ == np.ones((test_vdim_sz, test_hdim_1_sz, test_hdim_2_sz))
342
+ )
343
+
344
+ # now set specific levels
345
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
346
+ field_in=test_data_iris,
347
+ features_in=test_feature_ds,
348
+ dxy=test_dxy,
349
+ level=slice(vdim_start_feat, vdim_end_feat),
350
+ threshold=1.5,
351
+ seed_3D_flag="column",
352
+ )
353
+ out_seg_mask_arr = out_seg_mask.core_data()
354
+ # Make sure that all labeled points are segmented, before setting specific levels
355
+ assert np.all(
356
+ out_seg_mask_arr[
357
+ vdim_start_feat:vdim_end_feat,
358
+ hdim_1_start_feat:hdim_1_end_feat,
359
+ hdim_2_start_feat:hdim_2_end_feat,
360
+ ]
361
+ == np.ones((test_vdim_sz, test_hdim_1_sz, test_hdim_2_sz))
362
+ )
363
+ assert np.all(
364
+ out_seg_mask_arr[
365
+ vdim_start_feat + delta_height : vdim_end_feat + delta_height,
366
+ hdim_1_start_feat:hdim_1_end_feat,
367
+ hdim_2_start_feat:hdim_2_end_feat,
368
+ ]
369
+ == np.zeros((test_vdim_sz, test_hdim_1_sz, test_hdim_2_sz))
370
+ )
371
+
372
+
373
+ @pytest.mark.parametrize(
374
+ "blob_size, shift_pts, seed_3D_size" ", expected_both_segmented",
375
+ [
376
+ ((3, 3, 3), (0, 0, 4), 3, False),
377
+ ((3, 3, 3), (0, 0, 4), 5, False),
378
+ ((3, 3, 3), (0, 0, 4), 7, True),
379
+ ],
380
+ )
381
+ def test_segmentation_timestep_3d_seed_box_nopbcs(
382
+ blob_size, shift_pts, seed_3D_size, expected_both_segmented
383
+ ):
384
+ """Tests ```tobac.segmentation.segmentation_timestep```
385
+ to make sure that the 3D seed box works.
386
+ Parameters
387
+ ----------
388
+ blob_size: tuple(int, int, int)
389
+ Size of the initial blob to add to the domain in (z, y, x) space.
390
+ We strongly recommend that these be *odd* numbers.
391
+ shift_pts: tuple(int, int, int)
392
+ Number of points *relative to the center* to shift the blob in
393
+ (z, y, x) space.
394
+ seed_3D_size: int or tuple
395
+ Seed size to pass to tobac
396
+ expected_both_segmented: bool
397
+ True if we expect both features to be segmented, false
398
+ if we don't expect them both to be segmented
399
+
400
+ The best way to do this I think is to create two blobs near (but not touching)
401
+ each other, varying the seed_3D_size so that they are either segmented together
402
+ or not segmented together.
403
+ """
404
+ test_dset_size = (20, 50, 50)
405
+ test_hdim_1_pt_1 = 20.0
406
+ test_hdim_2_pt_1 = 20.0
407
+ test_vdim_pt_1 = 8
408
+ test_dxy = 1000
409
+ test_amp = 2
410
+
411
+ PBC_opt = "none"
412
+
413
+ test_data = np.zeros(test_dset_size)
414
+ test_data = testing.make_feature_blob(
415
+ test_data,
416
+ test_hdim_1_pt_1,
417
+ test_hdim_2_pt_1,
418
+ test_vdim_pt_1,
419
+ h1_size=blob_size[1],
420
+ h2_size=blob_size[2],
421
+ v_size=blob_size[0],
422
+ amplitude=test_amp,
423
+ )
424
+
425
+ # Make a second feature
426
+ test_data = testing.make_feature_blob(
427
+ test_data,
428
+ test_hdim_1_pt_1 + shift_pts[1],
429
+ test_hdim_2_pt_1 + shift_pts[2],
430
+ test_vdim_pt_1 + shift_pts[0],
431
+ h1_size=blob_size[1],
432
+ h2_size=blob_size[2],
433
+ v_size=blob_size[0],
434
+ amplitude=test_amp,
435
+ )
436
+
437
+ test_data_iris = testing.make_dataset_from_arr(
438
+ test_data, data_type="iris", z_dim_num=0, y_dim_num=1, x_dim_num=2
439
+ )
440
+ # Generate dummy feature dataset only on the first feature.
441
+ test_feature_ds = testing.generate_single_feature(
442
+ start_v=test_vdim_pt_1,
443
+ start_h1=test_hdim_1_pt_1,
444
+ start_h2=test_hdim_2_pt_1,
445
+ max_h1=1000,
446
+ max_h2=1000,
447
+ )
448
+
449
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
450
+ field_in=test_data_iris,
451
+ features_in=test_feature_ds,
452
+ dxy=test_dxy,
453
+ threshold=1.5,
454
+ seed_3D_flag="box",
455
+ seed_3D_size=seed_3D_size,
456
+ )
457
+
458
+ second_point_seg = out_seg_mask.core_data()[
459
+ int(test_vdim_pt_1 + shift_pts[0]),
460
+ int(test_hdim_1_pt_1 + shift_pts[1]),
461
+ int(test_hdim_2_pt_1 + shift_pts[2]),
462
+ ]
463
+ # We really only need to check the center point here for this test.
464
+ seg_point_overlaps = second_point_seg == 1
465
+ assert seg_point_overlaps == expected_both_segmented
466
+
467
+
468
+ @pytest.mark.parametrize(
469
+ "test_dset_size, vertical_axis_num, "
470
+ "vertical_coord_name,"
471
+ " vertical_coord_opt, expected_raise",
472
+ [
473
+ ((20, 30, 40), 0, "altitude", None, False),
474
+ ((20, 30, 40), 1, "altitude", None, False),
475
+ ((20, 30, 40), 2, "altitude", None, False),
476
+ ((20, 30, 40), 0, "air_pressure", "air_pressure", False),
477
+ ((20, 30, 40), 0, "air_pressure", None, True),
478
+ ((20, 30, 40), 0, "model_level_number", None, False),
479
+ ((20, 30, 40), 0, "altitude", None, False),
480
+ ((20, 30, 40), 0, "geopotential_height", None, False),
481
+ ],
482
+ )
483
+ def test_different_z_axes(
484
+ test_dset_size,
485
+ vertical_axis_num,
486
+ vertical_coord_name,
487
+ vertical_coord_opt,
488
+ expected_raise,
489
+ ):
490
+ """Tests ```tobac.segmentation.segmentation_timestep```
491
+ Tests:
492
+ The output is the same no matter what order we have axes in.
493
+ A ValueError is raised if an invalid vertical coordinate is
494
+ passed in
495
+
496
+ Parameters
497
+ ----------
498
+ test_dset_size: tuple(int, int, int)
499
+ Size of the test dataset
500
+ vertical_axis_num: int (0-2, inclusive)
501
+ Which axis in test_dset_size is the vertical axis
502
+ vertical_coord_name: str
503
+ Name of the vertical coordinate.
504
+ vertical_coord_opt: str
505
+ What to pass in as the vertical coordinate option to segmentation_timestep
506
+ expected_raise: bool
507
+ True if we expect a ValueError to be raised, false otherwise
508
+ """
509
+
510
+ # First, just check that input and output shapes are the same.
511
+ test_dxy = 1000
512
+ test_vdim_pt_1 = 8
513
+ test_hdim_1_pt_1 = 12
514
+ test_hdim_2_pt_1 = 12
515
+ test_data = np.zeros(test_dset_size)
516
+ common_dset_opts = {
517
+ "in_arr": test_data,
518
+ "data_type": "iris",
519
+ "z_dim_name": vertical_coord_name,
520
+ }
521
+ if vertical_axis_num == 0:
522
+ test_data_iris = testing.make_dataset_from_arr(
523
+ z_dim_num=0, y_dim_num=1, x_dim_num=2, **common_dset_opts
524
+ )
525
+ elif vertical_axis_num == 1:
526
+ test_data_iris = testing.make_dataset_from_arr(
527
+ z_dim_num=1, y_dim_num=0, x_dim_num=1, **common_dset_opts
528
+ )
529
+ elif vertical_axis_num == 2:
530
+ test_data_iris = testing.make_dataset_from_arr(
531
+ z_dim_num=2, y_dim_num=0, x_dim_num=1, **common_dset_opts
532
+ )
533
+
534
+ # Generate dummy feature dataset only on the first feature.
535
+ test_feature_ds = testing.generate_single_feature(
536
+ start_v=test_vdim_pt_1,
537
+ start_h1=test_hdim_1_pt_1,
538
+ start_h2=test_hdim_2_pt_1,
539
+ max_h1=1000,
540
+ max_h2=1000,
541
+ )
542
+ if not expected_raise:
543
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
544
+ field_in=test_data_iris,
545
+ features_in=test_feature_ds,
546
+ dxy=test_dxy,
547
+ threshold=1.5,
548
+ vertical_coord=vertical_coord_opt,
549
+ )
550
+ # Check that shapes don't change.
551
+ assert test_data.shape == out_seg_mask.core_data().shape
552
+
553
+ else:
554
+ # Expecting a raise
555
+ with pytest.raises(ValueError):
556
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
557
+ field_in=test_data_iris,
558
+ features_in=test_feature_ds,
559
+ dxy=test_dxy,
560
+ threshold=1.5,
561
+ )
562
+
563
+
564
+ def test_segmentation_multiple_features():
565
+ """Tests `tobac.segmentation.segmentation_timestep` with a 2D input containing multiple features with different areas.
566
+ Tests specifically whether their area (ncells) is correctly calculate and assigned to the different features.
567
+ """
568
+ test_dset_size = (50, 50)
569
+ test_hdim_1_pt = 20.0
570
+ test_hdim_2_pt = 20.0
571
+ test_hdim_1_sz = 5
572
+ test_hdim_2_sz = 5
573
+ size_feature1 = test_hdim_1_sz * test_hdim_2_sz
574
+ test_amp = 2
575
+ test_min_num = 2
576
+
577
+ test_data = np.zeros(test_dset_size)
578
+ test_data = testing.make_feature_blob(
579
+ test_data,
580
+ test_hdim_1_pt,
581
+ test_hdim_2_pt,
582
+ h1_size=test_hdim_1_sz,
583
+ h2_size=test_hdim_2_sz,
584
+ amplitude=test_amp,
585
+ )
586
+
587
+ # add feature of different size
588
+ test_hdim_1_pt = 40.0
589
+ test_hdim_2_pt = 40.0
590
+ test_hdim_1_sz = 10
591
+ test_hdim_2_sz = 10
592
+ size_feature2 = test_hdim_1_sz * test_hdim_2_sz
593
+ test_amp = 10
594
+ test_dxy = 1
595
+
596
+ test_data = testing.make_feature_blob(
597
+ test_data,
598
+ test_hdim_1_pt,
599
+ test_hdim_2_pt,
600
+ h1_size=test_hdim_1_sz,
601
+ h2_size=test_hdim_2_sz,
602
+ amplitude=test_amp,
603
+ )
604
+
605
+ test_data_iris = testing.make_dataset_from_arr(test_data, data_type="iris")
606
+
607
+ # detect both features
608
+ fd_output = feature_detection.feature_detection_multithreshold_timestep(
609
+ test_data_iris,
610
+ i_time=0,
611
+ dxy=1,
612
+ threshold=[1, 2, 3],
613
+ n_min_threshold=test_min_num,
614
+ target="maximum",
615
+ statistic={"features_mean": np.mean},
616
+ )
617
+
618
+ # add feature IDs to data frame for one time step
619
+ fd_output["feature"] = [1, 2]
620
+
621
+ # perform segmentation
622
+ out_seg_mask, out_df = segmentation.segmentation_timestep(
623
+ field_in=test_data_iris,
624
+ features_in=fd_output,
625
+ dxy=test_dxy,
626
+ threshold=1.5,
627
+ statistic={"segments_mean": np.mean},
628
+ )
629
+ out_seg_mask_arr = out_seg_mask.core_data()
630
+
631
+ # assure that the number of grid cells belonging to each feature (ncells) are consistent with segmentation mask
632
+ assert int(out_df[out_df.feature == 1].ncells.values[0]) == size_feature1
633
+ assert int(out_df[out_df.feature == 2].ncells.values[0]) == size_feature2
634
+ # assure that bulk statistic columns are created in output (one column added after segmentation)
635
+ assert out_df.columns.size - fd_output.columns.size > 1
636
+ # assure that statistics are calculated everywhere where an area for ncells is found
637
+ assert (
638
+ out_df.ncells[out_df["ncells"] > 0].shape
639
+ == out_df.ncells[out_df["features_mean"] > 0].shape
640
+ )
641
+
642
+
643
+ # TODO: add more tests to make sure buddy box code is run.
644
+ # From this list right now, I'm not sure why buddy box isn't run actually.
645
+ @pytest.mark.parametrize(
646
+ "dset_size, blob_1_loc, blob_1_size, blob_2_loc, blob_2_size,"
647
+ "shift_domain, seed_3D_size",
648
+ [
649
+ ((20, 30, 40), (8, 0, 0), (5, 5, 5), (8, 3, 3), (5, 5, 5), (0, -8, -8), None),
650
+ ((20, 30, 40), (8, 0, 0), (5, 5, 5), (8, 3, 3), (5, 5, 5), (0, -8, -8), None),
651
+ ((20, 30, 40), (8, 1, 1), (5, 5, 5), (8, 28, 38), (5, 5, 5), (0, 15, 15), None),
652
+ ((20, 30, 40), (8, 0, 0), (5, 5, 5), (8, 28, 38), (5, 5, 5), (0, -8, -8), None),
653
+ (
654
+ (20, 30, 40),
655
+ (8, 0, 0),
656
+ (5, 5, 5),
657
+ (8, 28, 38),
658
+ (5, 5, 5),
659
+ (0, -8, -8),
660
+ (5, 5, 5),
661
+ ),
662
+ ],
663
+ )
664
+ # TODO: last test fails
665
+ def test_segmentation_timestep_3d_buddy_box(
666
+ dset_size,
667
+ blob_1_loc,
668
+ blob_1_size,
669
+ blob_2_loc,
670
+ blob_2_size,
671
+ shift_domain,
672
+ seed_3D_size,
673
+ ):
674
+ """Tests ```tobac.segmentation.segmentation_timestep```
675
+ to make sure that the "buddy box" 3D PBC implementation works.
676
+ Basic procedure: build a dataset with two features (preferrably on the corner)
677
+ and then run segmentation, shift the points, and then run segmentation again.
678
+ After shifting back, the results should be identical.
679
+ Note: only tests 'both' PBC condition.
680
+ Parameters
681
+ ----------
682
+ dset_size: tuple(int, int, int)
683
+ Size of the domain (assumes z, hdim_1, hdim_2)
684
+ blob_1_loc: tuple(int, int, int)
685
+ Location of the first blob
686
+ blob_1_size: tuple(int, int, int)
687
+ Size of the first blob. Note: use odd numbers here.
688
+ blob_2_loc: tuple(int, int, int)
689
+ Location of the second blob
690
+ blob_2_size: tuple(int, int, int)
691
+ Size of the second blob. Note: use odd numbers here.
692
+ shift_domain: tuple(int, int, int)
693
+ How many points to shift the domain by.
694
+ seed_3D_size: None, int, or tuple
695
+ Seed size to pass to tobac. If None, passes in a column seed
696
+ """
697
+
698
+ import pandas as pd
699
+
700
+ """
701
+ The best way to do this I think is to create two blobs near (but not touching)
702
+ each other, varying the seed_3D_size so that they are either segmented together
703
+ or not segmented together.
704
+ """
705
+ test_dxy = 1000
706
+ test_amp = 2
707
+
708
+ test_data = np.zeros(dset_size)
709
+ test_data = testing.make_feature_blob(
710
+ test_data,
711
+ blob_1_loc[1],
712
+ blob_1_loc[2],
713
+ blob_1_loc[0],
714
+ h1_size=blob_1_size[1],
715
+ h2_size=blob_1_size[2],
716
+ v_size=blob_1_size[0],
717
+ amplitude=test_amp,
718
+ PBC_flag="both",
719
+ )
720
+
721
+ # Make a second feature
722
+ test_data = testing.make_feature_blob(
723
+ test_data,
724
+ blob_2_loc[1],
725
+ blob_2_loc[2],
726
+ blob_2_loc[0],
727
+ h1_size=blob_2_size[1],
728
+ h2_size=blob_2_size[2],
729
+ v_size=blob_2_size[0],
730
+ amplitude=test_amp,
731
+ PBC_flag="both",
732
+ )
733
+
734
+ test_data_iris = testing.make_dataset_from_arr(
735
+ test_data, data_type="iris", z_dim_num=0, y_dim_num=1, x_dim_num=2
736
+ )
737
+ # Generate dummy feature dataset only on the first feature.
738
+ test_feature_ds_1 = testing.generate_single_feature(
739
+ start_v=blob_1_loc[0],
740
+ start_h1=blob_1_loc[1],
741
+ start_h2=blob_1_loc[2],
742
+ max_h1=dset_size[1],
743
+ max_h2=dset_size[2],
744
+ feature_num=1,
745
+ PBC_flag="both",
746
+ )
747
+ test_feature_ds_2 = testing.generate_single_feature(
748
+ start_v=blob_2_loc[0],
749
+ start_h1=blob_2_loc[1],
750
+ start_h2=blob_2_loc[2],
751
+ max_h1=dset_size[1],
752
+ max_h2=dset_size[2],
753
+ feature_num=2,
754
+ PBC_flag="both",
755
+ )
756
+ test_feature_ds = pd.concat([test_feature_ds_1, test_feature_ds_2])
757
+
758
+ common_seg_opts = {"dxy": test_dxy, "threshold": 1.5, "PBC_flag": "both"}
759
+ if seed_3D_size is None:
760
+ common_seg_opts["seed_3D_flag"] = "column"
761
+ else:
762
+ common_seg_opts["seed_3D_flag"] = "box"
763
+ common_seg_opts["seed_3D_size"] = seed_3D_size
764
+
765
+ out_seg_mask, out_df = seg.segmentation_timestep(
766
+ field_in=test_data_iris, features_in=test_feature_ds, **common_seg_opts
767
+ )
768
+
769
+ # Now, shift the data over and re-run segmentation.
770
+ test_data_shifted = np.roll(test_data, shift_domain, axis=(0, 1, 2))
771
+ test_data_iris_shifted = testing.make_dataset_from_arr(
772
+ test_data_shifted, data_type="iris", z_dim_num=0, y_dim_num=1, x_dim_num=2
773
+ )
774
+ test_feature_ds_1 = testing.generate_single_feature(
775
+ start_v=blob_1_loc[0] + shift_domain[0],
776
+ start_h1=blob_1_loc[1] + shift_domain[1],
777
+ start_h2=blob_1_loc[2] + shift_domain[2],
778
+ max_h1=dset_size[1],
779
+ max_h2=dset_size[2],
780
+ feature_num=1,
781
+ PBC_flag="both",
782
+ )
783
+ test_feature_ds_2 = testing.generate_single_feature(
784
+ start_v=blob_2_loc[0] + shift_domain[0],
785
+ start_h1=blob_2_loc[1] + shift_domain[1],
786
+ start_h2=blob_2_loc[2] + shift_domain[2],
787
+ max_h1=dset_size[1],
788
+ max_h2=dset_size[2],
789
+ feature_num=2,
790
+ PBC_flag="both",
791
+ )
792
+ test_feature_ds_shifted = pd.concat([test_feature_ds_1, test_feature_ds_2])
793
+ out_seg_mask_shifted, out_df = seg.segmentation_timestep(
794
+ field_in=test_data_iris_shifted,
795
+ features_in=test_feature_ds_shifted,
796
+ **common_seg_opts,
797
+ )
798
+
799
+ # Now, shift output back.
800
+ out_seg_reshifted = np.roll(
801
+ out_seg_mask_shifted.core_data(),
802
+ tuple((-x for x in shift_domain)),
803
+ axis=(0, 1, 2),
804
+ )
805
+
806
+ assert np.all(out_seg_mask.core_data() == out_seg_reshifted)
807
+
808
+
809
+ @pytest.mark.parametrize(
810
+ "dset_size, feat_1_loc, feat_2_loc," "shift_domain, seed_3D_size",
811
+ [
812
+ ((20, 30, 40), (8, 0, 0), (8, 3, 3), (0, -8, -8), None),
813
+ ((20, 30, 40), (8, 0, 0), (8, 3, 3), (0, -8, -8), None),
814
+ ((20, 30, 40), (8, 1, 1), (8, 28, 38), (0, 15, 15), None),
815
+ ((20, 30, 40), (8, 0, 0), (8, 28, 38), (0, -8, -8), None),
816
+ ((20, 30, 40), (8, 0, 0), (8, 28, 38), (0, -8, -8), (5, 5, 5)),
817
+ ((30, 40), (0, 0), (3, 3), (-8, -8), None),
818
+ ((30, 40), (0, 0), (3, 3), (-8, -8), None),
819
+ ((30, 40), (1, 1), (28, 38), (15, 15), None),
820
+ ((30, 40), (0, 0), (28, 38), (-8, -8), None),
821
+ ((30, 40), (0, 0), (28, 38), (-8, -8), (5, 5)),
822
+ ],
823
+ )
824
+ def test_add_markers_pbcs(
825
+ dset_size, feat_1_loc, feat_2_loc, shift_domain, seed_3D_size
826
+ ):
827
+ """Tests ```tobac.segmentation.add_markers```
828
+ to make sure that adding markers works and is consistent across PBCs
829
+ Parameters
830
+ ----------
831
+ dset_size: tuple(int, int, int) or (int, int)
832
+ Size of the domain (assumes z, hdim_1, hdim_2) or (hdim_1, hdim_2)
833
+ feat_1_loc: tuple, same length as dset_size
834
+ Location of the first blob
835
+ feat_2_loc: tuple, same length as dset_size
836
+ Location of the second blob
837
+ shift_domain: tuple, same length as dset_size
838
+ How many points to shift the domain by.
839
+ seed_3D_size: None, int, or tuple
840
+ Seed size to pass to tobac. If None, passes in a column seed
841
+ """
842
+
843
+ import pandas as pd
844
+
845
+ if len(dset_size) == 2:
846
+ is_3D = False
847
+ start_h1_ax = 0
848
+ else:
849
+ is_3D = True
850
+ start_h1_ax = 1
851
+
852
+ common_feat_opts = {
853
+ "PBC_flag": "both",
854
+ "max_h1": dset_size[start_h1_ax],
855
+ "max_h2": dset_size[start_h1_ax + 1],
856
+ }
857
+
858
+ # Generate dummy feature dataset only on the first feature.
859
+ if is_3D:
860
+ test_feature_ds_1 = testing.generate_single_feature(
861
+ start_v=feat_1_loc[0],
862
+ start_h1=feat_1_loc[1],
863
+ start_h2=feat_1_loc[2],
864
+ feature_num=1,
865
+ **common_feat_opts,
866
+ )
867
+ test_feature_ds_2 = testing.generate_single_feature(
868
+ start_v=feat_2_loc[0],
869
+ start_h1=feat_2_loc[1],
870
+ start_h2=feat_2_loc[2],
871
+ feature_num=2,
872
+ **common_feat_opts,
873
+ )
874
+ else:
875
+ test_feature_ds_1 = testing.generate_single_feature(
876
+ start_h1=feat_1_loc[0],
877
+ start_h2=feat_1_loc[1],
878
+ feature_num=1,
879
+ **common_feat_opts,
880
+ )
881
+ test_feature_ds_2 = testing.generate_single_feature(
882
+ start_h1=feat_2_loc[0],
883
+ start_h2=feat_2_loc[1],
884
+ feature_num=2,
885
+ **common_feat_opts,
886
+ )
887
+ test_feature_ds = pd.concat([test_feature_ds_1, test_feature_ds_2])
888
+
889
+ common_marker_opts = dict()
890
+ common_marker_opts["PBC_flag"] = "both"
891
+
892
+ if seed_3D_size is None:
893
+ common_marker_opts["seed_3D_flag"] = "column"
894
+ else:
895
+ common_marker_opts["seed_3D_flag"] = "box"
896
+ common_marker_opts["seed_3D_size"] = seed_3D_size
897
+
898
+ marker_arr = seg.add_markers(
899
+ test_feature_ds, np.zeros(dset_size), **common_marker_opts
900
+ )
901
+
902
+ # Now, shift the data over and re-run markers.
903
+ if is_3D:
904
+ test_feature_ds_1 = testing.generate_single_feature(
905
+ start_v=feat_1_loc[0] + shift_domain[0],
906
+ start_h1=feat_1_loc[1] + shift_domain[1],
907
+ start_h2=feat_1_loc[2] + shift_domain[2],
908
+ feature_num=1,
909
+ **common_feat_opts,
910
+ )
911
+ test_feature_ds_2 = testing.generate_single_feature(
912
+ start_v=feat_2_loc[0] + shift_domain[0],
913
+ start_h1=feat_2_loc[1] + shift_domain[1],
914
+ start_h2=feat_2_loc[2] + shift_domain[2],
915
+ feature_num=2,
916
+ **common_feat_opts,
917
+ )
918
+ else:
919
+ test_feature_ds_1 = testing.generate_single_feature(
920
+ start_h1=feat_1_loc[0] + shift_domain[0],
921
+ start_h2=feat_1_loc[1] + shift_domain[1],
922
+ feature_num=1,
923
+ **common_feat_opts,
924
+ )
925
+ test_feature_ds_2 = testing.generate_single_feature(
926
+ start_h1=feat_2_loc[0] + shift_domain[0],
927
+ start_h2=feat_2_loc[1] + shift_domain[1],
928
+ feature_num=2,
929
+ **common_feat_opts,
930
+ )
931
+
932
+ test_feature_ds_shifted = pd.concat([test_feature_ds_1, test_feature_ds_2])
933
+
934
+ marker_arr_shifted = seg.add_markers(
935
+ test_feature_ds_shifted, np.zeros(dset_size), **common_marker_opts
936
+ )
937
+
938
+ # Now, shift output back.
939
+ if is_3D:
940
+ marker_arr_reshifted = np.roll(
941
+ marker_arr_shifted, tuple((-x for x in shift_domain)), axis=(0, 1, 2)
942
+ )
943
+ else:
944
+ marker_arr_reshifted = np.roll(
945
+ marker_arr_shifted, tuple((-x for x in shift_domain)), axis=(0, 1)
946
+ )
947
+
948
+ assert np.all(marker_arr == marker_arr_reshifted)
949
+
950
+
951
+ @pytest.mark.parametrize(
952
+ "PBC_flag",
953
+ [
954
+ ("none"),
955
+ ("hdim_1"),
956
+ ("hdim_2"),
957
+ ("both"),
958
+ ],
959
+ )
960
+ def test_empty_segmentation(PBC_flag):
961
+ """Tests ```tobac.segmentation.segmentation_timestep``` with an
962
+ empty/zeroed out array
963
+
964
+ """
965
+
966
+ h1_size = 100
967
+ h2_size = 100
968
+ v_size = 5
969
+ test_dxy = 1000
970
+ test_feature = testing.generate_single_feature(
971
+ start_v=1,
972
+ start_h1=1,
973
+ start_h2=1,
974
+ max_h1=h1_size,
975
+ max_h2=h2_size,
976
+ feature_num=1,
977
+ PBC_flag=PBC_flag,
978
+ )
979
+
980
+ seg_arr = np.zeros((v_size, h1_size, h2_size))
981
+ seg_opts = {
982
+ "dxy": test_dxy,
983
+ "threshold": 1.5,
984
+ "PBC_flag": PBC_flag,
985
+ "segment_number_unassigned": 0,
986
+ "segment_number_below_threshold": -1,
987
+ }
988
+ test_data_iris = testing.make_dataset_from_arr(
989
+ seg_arr, data_type="iris", z_dim_num=0, y_dim_num=1, x_dim_num=2
990
+ )
991
+
992
+ out_seg_mask, out_df = seg.segmentation_timestep(
993
+ field_in=test_data_iris, features_in=test_feature, **seg_opts
994
+ )
995
+
996
+ assert np.all(out_seg_mask.core_data() == -1)
997
+
998
+
999
+ def test_pbc_snake_segmentation():
1000
+ """
1001
+ Test that a "snake" feature that crosses PBCs multiple times is recognized as a single feature
1002
+ """
1003
+
1004
+ test_arr = np.zeros((50, 50))
1005
+ test_arr[::4, 0] = 2
1006
+ test_arr[1::4, 0] = 2
1007
+ test_arr[3::4, 0] = 2
1008
+
1009
+ test_arr[1::4, 49] = 2
1010
+ test_arr[2::4, 49] = 2
1011
+ test_arr[3::4, 49] = 2
1012
+
1013
+ test_data_iris = testing.make_dataset_from_arr(test_arr, data_type="iris")
1014
+ fd_output = feature_detection.feature_detection_multithreshold_timestep(
1015
+ test_data_iris,
1016
+ 0,
1017
+ threshold=[1, 2, 3],
1018
+ n_min_threshold=2,
1019
+ dxy=1,
1020
+ target="maximum",
1021
+ PBC_flag="hdim_2",
1022
+ )
1023
+ fd_output["feature"] = [1]
1024
+
1025
+ seg_output, seg_feats = segmentation.segmentation_timestep(
1026
+ test_data_iris,
1027
+ fd_output,
1028
+ 1,
1029
+ threshold=1,
1030
+ PBC_flag="hdim_2",
1031
+ seed_3D_flag="box",
1032
+ seed_3D_size=3,
1033
+ segment_number_unassigned=0,
1034
+ segment_number_below_threshold=-1,
1035
+ )
1036
+
1037
+ correct_seg_arr = np.full((50, 50), -1, dtype=np.int32)
1038
+ feat_num = 1
1039
+ correct_seg_arr[::4, 0] = feat_num
1040
+ correct_seg_arr[1::4, 0] = feat_num
1041
+ correct_seg_arr[3::4, 0] = feat_num
1042
+
1043
+ correct_seg_arr[1::4, 49] = feat_num
1044
+ correct_seg_arr[2::4, 49] = feat_num
1045
+ correct_seg_arr[3::4, 49] = feat_num
1046
+ np.where(correct_seg_arr == 0)
1047
+ seg_out_arr = seg_output.core_data()
1048
+ assert np.all(correct_seg_arr == seg_out_arr)
1049
+
1050
+ # test hdim_1
1051
+ test_data_iris = testing.make_dataset_from_arr(test_arr.T, data_type="iris")
1052
+ fd_output = feature_detection.feature_detection_multithreshold_timestep(
1053
+ test_data_iris,
1054
+ 0,
1055
+ threshold=[1, 2, 3],
1056
+ n_min_threshold=2,
1057
+ dxy=1,
1058
+ target="maximum",
1059
+ PBC_flag="hdim_1",
1060
+ )
1061
+ fd_output["feature"] = [1]
1062
+
1063
+ seg_output, seg_feats = segmentation.segmentation_timestep(
1064
+ test_data_iris,
1065
+ fd_output,
1066
+ 1,
1067
+ threshold=1,
1068
+ PBC_flag="hdim_1",
1069
+ seed_3D_flag="box",
1070
+ seed_3D_size=3,
1071
+ segment_number_unassigned=0,
1072
+ segment_number_below_threshold=-1,
1073
+ )
1074
+ seg_out_arr = seg_output.core_data()
1075
+
1076
+ assert np.all(correct_seg_arr.T == seg_out_arr)
1077
+
1078
+
1079
+ def test_max_distance():
1080
+ """
1081
+ Tests that max_distance works for both PBCs and normal segmentation
1082
+ """
1083
+
1084
+ test_arr = np.zeros((50, 50))
1085
+ test_arr[:, :] = 10
1086
+
1087
+ fd_output = testing.generate_single_feature(5, 5, max_h1=50, max_h2=50)
1088
+
1089
+ test_data_iris = testing.make_dataset_from_arr(test_arr, data_type="iris")
1090
+
1091
+ seg_output, seg_feats = segmentation.segmentation_timestep(
1092
+ test_data_iris,
1093
+ fd_output,
1094
+ 1,
1095
+ threshold=1,
1096
+ PBC_flag="none",
1097
+ max_distance=1,
1098
+ )
1099
+
1100
+ correct_seg_arr = np.full((50, 50), 0, dtype=np.int32)
1101
+ feat_num: int = 1
1102
+ correct_seg_arr[4:7, 5] = feat_num
1103
+ correct_seg_arr[5, 4:7] = feat_num
1104
+
1105
+ seg_out_arr = seg_output.core_data()
1106
+ assert np.all(correct_seg_arr == seg_out_arr)
1107
+
1108
+ test_arr = np.zeros((50, 50))
1109
+ test_arr[:, :20] = 10
1110
+ test_arr[:, 45:] = 10
1111
+
1112
+ fd_output = testing.generate_single_feature(0, 0, max_h1=50, max_h2=50)
1113
+
1114
+ test_data_iris = testing.make_dataset_from_arr(test_arr, data_type="iris")
1115
+
1116
+ with pytest.raises(NotImplementedError):
1117
+ seg_output, seg_feats = segmentation.segmentation_timestep(
1118
+ test_data_iris,
1119
+ fd_output,
1120
+ 1,
1121
+ threshold=1,
1122
+ PBC_flag="hdim_2",
1123
+ max_distance=1,
1124
+ )
1125
+ """
1126
+ correct_seg_arr = np.full((50, 50), 0, dtype=np.int32)
1127
+ feat_num: int = 1
1128
+ correct_seg_arr[0:3, 0] = feat_num
1129
+ correct_seg_arr[0, 0:3] = feat_num
1130
+ correct_seg_arr[:, 20:45] = -1
1131
+
1132
+ seg_out_arr = seg_output.core_data()
1133
+ assert np.all(correct_seg_arr == seg_out_arr)
1134
+ """
1135
+
1136
+
1137
+ @pytest.mark.parametrize(
1138
+ ("below_thresh", "above_thresh", "error"),
1139
+ ((0, 0, False), (0, -1, False), (-5, -10, False), (20, 30, True)),
1140
+ )
1141
+ def test_seg_alt_unseed_num(below_thresh, above_thresh, error):
1142
+ """
1143
+ Tests ```segmentation.segmentation_timestep``` to
1144
+ make sure that the unseeded regions are labeled appropriately.
1145
+
1146
+ """
1147
+ test_arr = np.zeros((50, 50))
1148
+ test_arr[0:10, 0:10] = 10
1149
+ test_arr[40:50, 40:50] = 10
1150
+
1151
+ fd_output = testing.generate_single_feature(5, 5, max_h1=50, max_h2=50)
1152
+
1153
+ test_data_iris = testing.make_dataset_from_arr(test_arr, data_type="iris")
1154
+ if error:
1155
+ with pytest.raises(ValueError):
1156
+ seg_output, seg_feats = segmentation.segmentation_timestep(
1157
+ test_data_iris,
1158
+ fd_output,
1159
+ 1,
1160
+ threshold=1,
1161
+ PBC_flag="none",
1162
+ segment_number_below_threshold=below_thresh,
1163
+ segment_number_unassigned=above_thresh,
1164
+ )
1165
+ else:
1166
+ seg_output, seg_feats = segmentation.segmentation_timestep(
1167
+ test_data_iris,
1168
+ fd_output,
1169
+ 1,
1170
+ threshold=1,
1171
+ PBC_flag="none",
1172
+ segment_number_below_threshold=below_thresh,
1173
+ segment_number_unassigned=above_thresh,
1174
+ )
1175
+
1176
+ correct_seg_arr = np.full((50, 50), below_thresh, dtype=np.int32)
1177
+ feat_num: int = 1
1178
+ correct_seg_arr[0:10, 0:10] = feat_num
1179
+ correct_seg_arr[10:40, 10:40] = below_thresh
1180
+ correct_seg_arr[40:50, 40:50] = above_thresh
1181
+
1182
+ seg_out_arr = seg_output.core_data()
1183
+ assert np.all(correct_seg_arr == seg_out_arr)