supervisely 6.73.238__py3-none-any.whl → 6.73.240__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. supervisely/annotation/annotation.py +2 -2
  2. supervisely/api/entity_annotation/tag_api.py +11 -4
  3. supervisely/api/file_api.py +17 -3
  4. supervisely/nn/__init__.py +1 -0
  5. supervisely/nn/benchmark/__init__.py +14 -2
  6. supervisely/nn/benchmark/base_benchmark.py +84 -37
  7. supervisely/nn/benchmark/base_evaluator.py +120 -0
  8. supervisely/nn/benchmark/base_visualizer.py +265 -0
  9. supervisely/nn/benchmark/comparison/detection_visualization/text_templates.py +5 -5
  10. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/calibration_score.py +2 -2
  11. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/explore_predicttions.py +39 -16
  12. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/localization_accuracy.py +1 -1
  13. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/outcome_counts.py +4 -4
  14. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/overview.py +12 -11
  15. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/pr_curve.py +1 -1
  16. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/precision_recal_f1.py +6 -6
  17. supervisely/nn/benchmark/comparison/detection_visualization/vis_metrics/speedtest.py +3 -3
  18. supervisely/nn/benchmark/{instance_segmentation_benchmark.py → instance_segmentation/benchmark.py} +9 -3
  19. supervisely/nn/benchmark/instance_segmentation/evaluator.py +58 -0
  20. supervisely/nn/benchmark/{visualization/text_templates/instance_segmentation_text.py → instance_segmentation/text_templates.py} +53 -69
  21. supervisely/nn/benchmark/instance_segmentation/visualizer.py +18 -0
  22. supervisely/nn/benchmark/object_detection/__init__.py +0 -0
  23. supervisely/nn/benchmark/object_detection/base_vis_metric.py +51 -0
  24. supervisely/nn/benchmark/{object_detection_benchmark.py → object_detection/benchmark.py} +4 -2
  25. supervisely/nn/benchmark/object_detection/evaluation_params.yaml +2 -0
  26. supervisely/nn/benchmark/{evaluation/object_detection_evaluator.py → object_detection/evaluator.py} +67 -9
  27. supervisely/nn/benchmark/{evaluation/coco → object_detection}/metric_provider.py +13 -14
  28. supervisely/nn/benchmark/{visualization/text_templates/object_detection_text.py → object_detection/text_templates.py} +49 -41
  29. supervisely/nn/benchmark/object_detection/vis_metrics/__init__.py +48 -0
  30. supervisely/nn/benchmark/{visualization → object_detection}/vis_metrics/confidence_distribution.py +20 -24
  31. supervisely/nn/benchmark/object_detection/vis_metrics/confidence_score.py +119 -0
  32. supervisely/nn/benchmark/{visualization → object_detection}/vis_metrics/confusion_matrix.py +34 -22
  33. supervisely/nn/benchmark/object_detection/vis_metrics/explore_predictions.py +129 -0
  34. supervisely/nn/benchmark/{visualization → object_detection}/vis_metrics/f1_score_at_different_iou.py +21 -26
  35. supervisely/nn/benchmark/object_detection/vis_metrics/frequently_confused.py +137 -0
  36. supervisely/nn/benchmark/object_detection/vis_metrics/iou_distribution.py +106 -0
  37. supervisely/nn/benchmark/object_detection/vis_metrics/key_metrics.py +136 -0
  38. supervisely/nn/benchmark/{visualization → object_detection}/vis_metrics/model_predictions.py +53 -49
  39. supervisely/nn/benchmark/object_detection/vis_metrics/outcome_counts.py +188 -0
  40. supervisely/nn/benchmark/object_detection/vis_metrics/outcome_counts_per_class.py +191 -0
  41. supervisely/nn/benchmark/object_detection/vis_metrics/overview.py +116 -0
  42. supervisely/nn/benchmark/object_detection/vis_metrics/pr_curve.py +106 -0
  43. supervisely/nn/benchmark/object_detection/vis_metrics/pr_curve_by_class.py +49 -0
  44. supervisely/nn/benchmark/object_detection/vis_metrics/precision.py +72 -0
  45. supervisely/nn/benchmark/object_detection/vis_metrics/precision_avg_per_class.py +59 -0
  46. supervisely/nn/benchmark/object_detection/vis_metrics/recall.py +71 -0
  47. supervisely/nn/benchmark/object_detection/vis_metrics/recall_vs_precision.py +56 -0
  48. supervisely/nn/benchmark/object_detection/vis_metrics/reliability_diagram.py +110 -0
  49. supervisely/nn/benchmark/object_detection/vis_metrics/speedtest.py +151 -0
  50. supervisely/nn/benchmark/object_detection/visualizer.py +697 -0
  51. supervisely/nn/benchmark/semantic_segmentation/__init__.py +9 -0
  52. supervisely/nn/benchmark/semantic_segmentation/base_vis_metric.py +55 -0
  53. supervisely/nn/benchmark/semantic_segmentation/benchmark.py +32 -0
  54. supervisely/nn/benchmark/semantic_segmentation/evaluation_params.yaml +0 -0
  55. supervisely/nn/benchmark/semantic_segmentation/evaluator.py +162 -0
  56. supervisely/nn/benchmark/semantic_segmentation/metric_provider.py +153 -0
  57. supervisely/nn/benchmark/semantic_segmentation/text_templates.py +130 -0
  58. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/__init__.py +0 -0
  59. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/acknowledgement.py +15 -0
  60. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/classwise_error_analysis.py +57 -0
  61. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/confusion_matrix.py +92 -0
  62. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/explore_predictions.py +84 -0
  63. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/frequently_confused.py +101 -0
  64. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/iou_eou.py +45 -0
  65. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/key_metrics.py +60 -0
  66. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/model_predictions.py +107 -0
  67. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/overview.py +112 -0
  68. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/renormalized_error_ou.py +48 -0
  69. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/speedtest.py +178 -0
  70. supervisely/nn/benchmark/semantic_segmentation/vis_metrics/vis_texts.py +21 -0
  71. supervisely/nn/benchmark/semantic_segmentation/visualizer.py +304 -0
  72. supervisely/nn/benchmark/utils/__init__.py +12 -0
  73. supervisely/nn/benchmark/utils/detection/__init__.py +2 -0
  74. supervisely/nn/benchmark/{evaluation/coco → utils/detection}/calculate_metrics.py +6 -4
  75. supervisely/nn/benchmark/utils/detection/metric_provider.py +533 -0
  76. supervisely/nn/benchmark/{coco_utils → utils/detection}/sly2coco.py +4 -4
  77. supervisely/nn/benchmark/{coco_utils/utils.py → utils/detection/utlis.py} +11 -0
  78. supervisely/nn/benchmark/utils/semantic_segmentation/__init__.py +0 -0
  79. supervisely/nn/benchmark/utils/semantic_segmentation/calculate_metrics.py +35 -0
  80. supervisely/nn/benchmark/utils/semantic_segmentation/evaluator.py +804 -0
  81. supervisely/nn/benchmark/utils/semantic_segmentation/loader.py +65 -0
  82. supervisely/nn/benchmark/utils/semantic_segmentation/utils.py +109 -0
  83. supervisely/nn/benchmark/visualization/evaluation_result.py +17 -3
  84. supervisely/nn/benchmark/visualization/vis_click_data.py +1 -1
  85. supervisely/nn/benchmark/visualization/widgets/__init__.py +3 -0
  86. supervisely/nn/benchmark/visualization/widgets/chart/chart.py +12 -4
  87. supervisely/nn/benchmark/visualization/widgets/gallery/gallery.py +35 -8
  88. supervisely/nn/benchmark/visualization/widgets/gallery/template.html +8 -4
  89. supervisely/nn/benchmark/visualization/widgets/markdown/markdown.py +1 -1
  90. supervisely/nn/benchmark/visualization/widgets/notification/notification.py +11 -7
  91. supervisely/nn/benchmark/visualization/widgets/radio_group/__init__.py +0 -0
  92. supervisely/nn/benchmark/visualization/widgets/radio_group/radio_group.py +34 -0
  93. supervisely/nn/benchmark/visualization/widgets/table/table.py +9 -3
  94. supervisely/nn/benchmark/visualization/widgets/widget.py +4 -0
  95. supervisely/project/project.py +18 -6
  96. {supervisely-6.73.238.dist-info → supervisely-6.73.240.dist-info}/METADATA +3 -1
  97. {supervisely-6.73.238.dist-info → supervisely-6.73.240.dist-info}/RECORD +104 -82
  98. supervisely/nn/benchmark/coco_utils/__init__.py +0 -2
  99. supervisely/nn/benchmark/evaluation/__init__.py +0 -3
  100. supervisely/nn/benchmark/evaluation/base_evaluator.py +0 -64
  101. supervisely/nn/benchmark/evaluation/coco/__init__.py +0 -2
  102. supervisely/nn/benchmark/evaluation/instance_segmentation_evaluator.py +0 -88
  103. supervisely/nn/benchmark/utils.py +0 -13
  104. supervisely/nn/benchmark/visualization/inference_speed/__init__.py +0 -19
  105. supervisely/nn/benchmark/visualization/inference_speed/speedtest_batch.py +0 -161
  106. supervisely/nn/benchmark/visualization/inference_speed/speedtest_intro.py +0 -28
  107. supervisely/nn/benchmark/visualization/inference_speed/speedtest_overview.py +0 -141
  108. supervisely/nn/benchmark/visualization/inference_speed/speedtest_real_time.py +0 -63
  109. supervisely/nn/benchmark/visualization/text_templates/inference_speed_text.py +0 -23
  110. supervisely/nn/benchmark/visualization/vis_metric_base.py +0 -337
  111. supervisely/nn/benchmark/visualization/vis_metrics/__init__.py +0 -67
  112. supervisely/nn/benchmark/visualization/vis_metrics/classwise_error_analysis.py +0 -55
  113. supervisely/nn/benchmark/visualization/vis_metrics/confidence_score.py +0 -93
  114. supervisely/nn/benchmark/visualization/vis_metrics/explorer_grid.py +0 -144
  115. supervisely/nn/benchmark/visualization/vis_metrics/frequently_confused.py +0 -115
  116. supervisely/nn/benchmark/visualization/vis_metrics/iou_distribution.py +0 -86
  117. supervisely/nn/benchmark/visualization/vis_metrics/outcome_counts.py +0 -119
  118. supervisely/nn/benchmark/visualization/vis_metrics/outcome_counts_per_class.py +0 -148
  119. supervisely/nn/benchmark/visualization/vis_metrics/overall_error_analysis.py +0 -109
  120. supervisely/nn/benchmark/visualization/vis_metrics/overview.py +0 -189
  121. supervisely/nn/benchmark/visualization/vis_metrics/percision_avg_per_class.py +0 -57
  122. supervisely/nn/benchmark/visualization/vis_metrics/pr_curve.py +0 -101
  123. supervisely/nn/benchmark/visualization/vis_metrics/pr_curve_by_class.py +0 -46
  124. supervisely/nn/benchmark/visualization/vis_metrics/precision.py +0 -56
  125. supervisely/nn/benchmark/visualization/vis_metrics/recall.py +0 -54
  126. supervisely/nn/benchmark/visualization/vis_metrics/recall_vs_precision.py +0 -57
  127. supervisely/nn/benchmark/visualization/vis_metrics/reliability_diagram.py +0 -88
  128. supervisely/nn/benchmark/visualization/vis_metrics/what_is.py +0 -23
  129. supervisely/nn/benchmark/visualization/vis_templates.py +0 -241
  130. supervisely/nn/benchmark/visualization/vis_widgets.py +0 -128
  131. supervisely/nn/benchmark/visualization/visualizer.py +0 -729
  132. /supervisely/nn/benchmark/{visualization/text_templates → instance_segmentation}/__init__.py +0 -0
  133. /supervisely/nn/benchmark/{evaluation/coco → instance_segmentation}/evaluation_params.yaml +0 -0
  134. /supervisely/nn/benchmark/{evaluation/coco → utils/detection}/metrics.py +0 -0
  135. {supervisely-6.73.238.dist-info → supervisely-6.73.240.dist-info}/LICENSE +0 -0
  136. {supervisely-6.73.238.dist-info → supervisely-6.73.240.dist-info}/WHEEL +0 -0
  137. {supervisely-6.73.238.dist-info → supervisely-6.73.240.dist-info}/entry_points.txt +0 -0
  138. {supervisely-6.73.238.dist-info → supervisely-6.73.240.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,804 @@
1
+ import os
2
+ from collections import defaultdict
3
+ from typing import Dict, Iterable, List, Optional, Union
4
+
5
+ import cv2
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ from supervisely.nn.benchmark.utils.semantic_segmentation.utils import (
10
+ dilate_mask,
11
+ get_contiguous_segments,
12
+ get_exterior_boundary,
13
+ get_interior_boundary,
14
+ get_single_contiguous_segment,
15
+ one_hot,
16
+ single_one_hot,
17
+ )
18
+ from supervisely.sly_logger import logger
19
+ from supervisely.task.progress import tqdm_sly
20
+
21
+ ERROR_CODES = {
22
+ "ignore": -1,
23
+ "unassigned": 0,
24
+ "TP": 1,
25
+ "TN": 2,
26
+ "FP_boundary": 3,
27
+ "FN_boundary": 4,
28
+ "FP_extent": 5,
29
+ "FN_extent": 6,
30
+ "FP_segment": 7,
31
+ "FN_segment": 8,
32
+ }
33
+
34
+
35
+ ERROR_PALETTE = {
36
+ -1: (100, 100, 100),
37
+ 0: (150, 150, 150),
38
+ 1: (255, 255, 255),
39
+ 2: (0, 0, 0),
40
+ 3: (255, 200, 150),
41
+ 4: (150, 200, 255),
42
+ 5: (255, 100, 150),
43
+ 6: (150, 100, 255),
44
+ 7: (255, 0, 0),
45
+ 8: (0, 0, 255),
46
+ }
47
+
48
+
49
+ class Evaluator:
50
+ def __init__(
51
+ self,
52
+ class_names: List[str],
53
+ boundary_width: Union[float, int] = 0.01,
54
+ boundary_iou_d: float = 0.02,
55
+ boundary_implementation: str = "exact",
56
+ result_dir: str = "output",
57
+ progress: Optional[tqdm_sly] = None,
58
+ ):
59
+ """The main class for running our error analysis.
60
+ :param class_names: List of strings providing names for class ids 0,...,C.
61
+ :param boundary_width: The parameter d in the paper, either as a float in [0,1] (relative to diagonal)
62
+ or as an integer > 1 (absolute number of pixels).
63
+ :param boundary_implementation: Choose "exact" for the euclidean pixel distance.
64
+ The Boundary IoU paper uses the L1 distance ("fast").
65
+ """
66
+ global torch, np, GPU
67
+ import torch # pylint: disable=import-error
68
+
69
+ if torch.cuda.is_available():
70
+ GPU = True
71
+ logger.info("Using GPU for evaluation.")
72
+ try:
73
+ # gpu-compatible numpy analogue
74
+ import cupy as np # pylint: disable=import-error
75
+
76
+ global numpy
77
+ import numpy as numpy
78
+ except:
79
+ logger.warning(
80
+ "Failed to import cupy. Use cupy official documentation to install this "
81
+ "module: https://docs.cupy.dev/en/stable/install.html"
82
+ )
83
+ else:
84
+ GPU = False
85
+ import numpy as np
86
+
87
+ global numpy
88
+ numpy = np
89
+
90
+ self.progress = progress or tqdm_sly
91
+ self.class_names = class_names
92
+ self.num_classes = len(self.class_names)
93
+
94
+ self.boundary_width = boundary_width
95
+ if 0 < self.boundary_width < 1:
96
+ self.use_relative_boundary_width = True
97
+ elif self.boundary_width % 1 != 0 or self.boundary_width < 0:
98
+ raise ValueError("boundary_width should be an integer or a float in (0,1)!")
99
+ else:
100
+ self.use_relative_boundary_width = False
101
+
102
+ self.boundary_implementation = boundary_implementation
103
+ self.boundary_iou_d = boundary_iou_d
104
+
105
+ self.confusion_matrix = np.zeros(
106
+ (self.num_classes, self.num_classes),
107
+ )
108
+ self.cell_img_names = defaultdict(list)
109
+ self.result_dir = result_dir
110
+
111
+ self.image_metrics = {
112
+ "img_names": [],
113
+ "pixel_acc": [],
114
+ "precision": [],
115
+ "recall": [],
116
+ "f1_score": [],
117
+ "iou": [],
118
+ "boundary_iou": [],
119
+ "boundary_eou": [],
120
+ "extent_eou": [],
121
+ "segment_eou": [],
122
+ "boundary_eou_renormed": [],
123
+ "extent_eou_renormed": [],
124
+ "segment_eou_renormed": [],
125
+ }
126
+ self.img_names = []
127
+ self.per_image_metrics = pd.DataFrame()
128
+
129
+ def extract_masks(self, seg, cl, n_cl):
130
+ if GPU:
131
+ seg = np.asarray(seg)
132
+ h, w = seg.shape
133
+ masks = np.zeros((n_cl, h, w))
134
+
135
+ for i, c in enumerate(cl):
136
+ masks[i, :, :] = seg == c
137
+
138
+ return masks
139
+
140
+ def extract_masks_gen(self, seg, cl):
141
+ if GPU:
142
+ seg = np.asarray(seg)
143
+ h, w = seg.shape
144
+ for c in cl:
145
+ mask = np.zeros((h, w))
146
+ mask[seg == c] = 1
147
+ yield mask
148
+
149
+ def calc_confusion_matrix(self, pred, gt, cmat, img_name):
150
+ assert pred.shape == gt.shape
151
+
152
+ cl = np.arange(1, self.num_classes + 1)
153
+ for ig, gm in enumerate(self.extract_masks_gen(gt, cl)):
154
+ if np.sum(gm) == 0:
155
+ continue
156
+ for ip, pm in enumerate(self.extract_masks_gen(pred, cl)):
157
+ if np.sum(pm) == 0:
158
+ continue
159
+ cmat[ig, ip] += np.sum(np.logical_and(pm, gm))
160
+ self.cell_img_names[str(ig) + "_" + str(ip)].append(img_name)
161
+
162
+ return cmat
163
+
164
+ def evaluate(self, loader: Iterable):
165
+ """This runs the analysis for a whole dataset.
166
+ :param loader: Iterable providing pairs of (pred, gt).
167
+ :returns: beyond_iou.Result.
168
+ """
169
+ self.results = {
170
+ "unassigned": np.zeros(self.num_classes, dtype=np.int64),
171
+ "ignore": np.zeros(self.num_classes, dtype=np.int64),
172
+ "TP": np.zeros(self.num_classes, dtype=np.int64),
173
+ "TN": np.zeros(self.num_classes, dtype=np.int64),
174
+ "FP_boundary": np.zeros(self.num_classes, dtype=np.int64),
175
+ "FN_boundary": np.zeros(self.num_classes, dtype=np.int64),
176
+ "FP_extent": np.zeros(self.num_classes, dtype=np.int64),
177
+ "FN_extent": np.zeros(self.num_classes, dtype=np.int64),
178
+ "FP_segment": np.zeros(self.num_classes, dtype=np.int64),
179
+ "FN_segment": np.zeros(self.num_classes, dtype=np.int64),
180
+ }
181
+ self.boundary_iou_intersection_counts = np.zeros(self.num_classes, dtype=np.int64)
182
+ self.boundary_iou_union_counts = np.zeros(self.num_classes, dtype=np.int64)
183
+
184
+ with self.progress(message="Calculating metrics...", total=len(loader)) as pbar:
185
+ for pred, gt, img_name in loader:
186
+ sample_results = self.evaluate_sample(pred, gt, img_name)
187
+ self.update_results(sample_results, img_name)
188
+ self.confusion_matrix = self.calc_confusion_matrix(
189
+ pred,
190
+ gt,
191
+ self.confusion_matrix,
192
+ img_name,
193
+ )
194
+ pbar.update(1)
195
+ if GPU:
196
+ for key, value in self.results.items():
197
+ self.results[key] = value.get() # pylint: disable=no-member
198
+ self.boundary_iou_intersection_counts = self.boundary_iou_intersection_counts.get()
199
+ self.boundary_iou_union_counts = self.boundary_iou_union_counts.get()
200
+
201
+ result = self.calculate_error_metrics()
202
+ normalized_confusion_matrix = self.confusion_matrix / self.confusion_matrix.sum(
203
+ axis=1, keepdims=True
204
+ )
205
+ normalized_confusion_matrix[np.isnan(normalized_confusion_matrix)] = 0
206
+ normalized_confusion_matrix = np.round(normalized_confusion_matrix, 3)
207
+ self.per_image_metrics = pd.DataFrame(self.image_metrics, index=self.img_names)
208
+ return {
209
+ "result": result,
210
+ "confusion_matrix": normalized_confusion_matrix,
211
+ "per_image_metrics": self.per_image_metrics,
212
+ "cell_img_names": self.cell_img_names,
213
+ }
214
+
215
+ def evaluate_sample(self, pred, gt, img_name):
216
+ """Runs the analysis for a single sample.
217
+ :param pred: Predicted segmentation as a numpy array of shape (H,W).
218
+ :param gt: Ground-truth segmentation as a numpy array of shape (H,W).
219
+ :returns: Dictionary holding results for this sample.
220
+ """
221
+ if pred.shape != gt.shape:
222
+ raise RuntimeError(
223
+ f"Shapes of prediction and annotation do not match! Pred: {pred.shape}, GT: {gt.shape}"
224
+ )
225
+ H, W = pred.shape
226
+ results = np.full(
227
+ shape=(self.num_classes, H, W),
228
+ fill_value=ERROR_CODES["unassigned"],
229
+ dtype=np.int8,
230
+ )
231
+
232
+ boundary_intersection_counts = np.zeros(self.num_classes, dtype=np.int64)
233
+ boundary_union_counts = np.zeros(self.num_classes, dtype=np.int64)
234
+ for c in range(self.num_classes):
235
+ gt_mask = gt == c + 1
236
+ pred_mask = pred == c + 1
237
+ if not gt_mask.any() and not pred_mask.any():
238
+ results[c] = ERROR_CODES["TN"]
239
+ continue
240
+ tp_mask = np.logical_and(gt_mask, pred_mask)
241
+ tn_mask = ~np.logical_or(gt_mask, pred_mask)
242
+ fp_mask = np.logical_and(pred_mask, ~gt_mask)
243
+ fn_mask = np.logical_and(~pred_mask, gt_mask)
244
+ results[c][tp_mask] = ERROR_CODES["TP"]
245
+ results[c][tn_mask] = ERROR_CODES["TN"]
246
+ # results[c][fp_mask] = ERROR_CODES["FP"]
247
+ # results[c][fn_mask] = ERROR_CODES["FN"]
248
+
249
+ # BOUNDARY
250
+ results[c] = self.get_single_boundary_errors(
251
+ class_results=results[c],
252
+ tp_mask=tp_mask,
253
+ tn_mask=tn_mask,
254
+ fp_mask=fp_mask,
255
+ fn_mask=fn_mask,
256
+ )
257
+
258
+ # EXTENT / SEGMENT
259
+ results[c] = self.get_single_extent_segment_errors(
260
+ class_results=results[c],
261
+ pred_mask=pred_mask,
262
+ gt_mask=gt_mask,
263
+ )
264
+
265
+ assert not (results[c] == ERROR_CODES["unassigned"]).any()
266
+
267
+ # Boundary IoU
268
+ ignore_inds = None
269
+ (
270
+ boundary_intersection_counts_active,
271
+ boundary_union_counts_active,
272
+ ) = self.evaluate_single_sample_boundary_iou(
273
+ class_results=results[c],
274
+ pred_mask=pred_mask,
275
+ gt_mask=gt_mask,
276
+ ignore_inds=ignore_inds,
277
+ )
278
+
279
+ boundary_intersection_counts[c] += boundary_intersection_counts_active
280
+ boundary_union_counts[c] += boundary_union_counts_active
281
+
282
+ return dict(
283
+ main_results=results,
284
+ boundary_iou_results=(boundary_intersection_counts, boundary_union_counts),
285
+ )
286
+
287
+ def update_results(self, sample_results, img_name):
288
+ # main results
289
+ image_stats = {}
290
+ for error_name, error_code in ERROR_CODES.items():
291
+ error_values = (sample_results["main_results"] == error_code).sum(axis=(1, 2))
292
+ self.results[error_name] += error_values
293
+ image_stats[error_name] = error_values
294
+
295
+ # boundary IoU
296
+ boundary_intersection_counts, boundary_union_counts = sample_results["boundary_iou_results"]
297
+ self.boundary_iou_intersection_counts += boundary_intersection_counts
298
+ self.boundary_iou_union_counts += boundary_union_counts
299
+ image_stats["boundary_iou_intersection_counts"] = boundary_intersection_counts
300
+ image_stats["boundary_iou_union_counts"] = boundary_union_counts
301
+ self.calculate_per_image_metrics(image_stats, img_name)
302
+
303
+ def calculate_per_image_metrics(self, image_stats, img_name):
304
+ fp = image_stats["FP_boundary"] + image_stats["FP_extent"] + image_stats["FP_segment"]
305
+ fn = image_stats["FN_boundary"] + image_stats["FN_extent"] + image_stats["FN_segment"]
306
+ tp = image_stats["TP"]
307
+ tn = image_stats["TN"]
308
+
309
+ e_boundary = image_stats["FP_boundary"] + image_stats["FN_boundary"]
310
+ e_extent = image_stats["FP_extent"] + image_stats["FN_extent"]
311
+ e_segment = image_stats["FP_segment"] + image_stats["FN_segment"]
312
+
313
+ union = tp + fp + fn
314
+ iou = tp / union
315
+
316
+ overall_tp = tp[: self.num_classes].sum()
317
+ overall_fn = fn[: self.num_classes].sum()
318
+ pixel_acc = overall_tp / (overall_tp + overall_fn)
319
+
320
+ precision = tp / (tp + fp)
321
+ recall = tp / (tp + fn)
322
+ f1_score = 2 / (1.0 / precision + 1.0 / recall)
323
+
324
+ fp_boundary_ou = image_stats["FP_boundary"] / union
325
+ fn_boundary_ou = image_stats["FN_boundary"] / union
326
+ e_boundary_ou = e_boundary / union
327
+
328
+ fp_extent_ou = image_stats["FP_extent"] / union
329
+ fn_extent_ou = image_stats["FN_extent"] / union
330
+ e_extent_ou = e_extent / union
331
+
332
+ fp_segment_ou = image_stats["FP_segment"] / union
333
+ fn_segment_ou = image_stats["FN_segment"] / union
334
+ e_segment_ou = e_segment / union
335
+
336
+ e_boundary_ou_renormed = e_boundary / (tp + e_boundary)
337
+ e_extent_ou_renormed = e_extent / (tp + e_boundary + e_extent)
338
+ e_segment_ou_renormed = e_segment_ou
339
+
340
+ with numpy.errstate(invalid="ignore"):
341
+ boundary_iou = (
342
+ image_stats["boundary_iou_intersection_counts"]
343
+ / image_stats["boundary_iou_union_counts"]
344
+ )
345
+
346
+ def postprocess_values(values):
347
+ values = values[~numpy.isnan(values)]
348
+ value = round(float(np.mean(values)), 2)
349
+ return value
350
+
351
+ self.image_metrics["pixel_acc"].append(round(float(pixel_acc), 2))
352
+ self.image_metrics["precision"].append(postprocess_values(precision))
353
+ self.image_metrics["recall"].append(postprocess_values(recall))
354
+ self.image_metrics["f1_score"].append(postprocess_values(f1_score))
355
+ self.image_metrics["iou"].append(postprocess_values(iou))
356
+ self.image_metrics["boundary_iou"].append(postprocess_values(boundary_iou))
357
+ self.image_metrics["boundary_eou"].append(postprocess_values(e_boundary_ou))
358
+ self.image_metrics["extent_eou"].append(postprocess_values(e_extent_ou))
359
+ self.image_metrics["segment_eou"].append(postprocess_values(e_segment_ou))
360
+ self.image_metrics["boundary_eou_renormed"].append(
361
+ postprocess_values(e_boundary_ou_renormed)
362
+ )
363
+ self.image_metrics["extent_eou_renormed"].append(postprocess_values(e_extent_ou_renormed))
364
+ self.image_metrics["segment_eou_renormed"].append(postprocess_values(e_segment_ou_renormed))
365
+ self.image_metrics["img_names"].append(img_name)
366
+
367
+ self.img_names.append(img_name)
368
+
369
+ def get_boundary_errors(self, results, tp_mask, tn_mask, fp_mask, fn_mask):
370
+ H, W = tp_mask.shape[-2:]
371
+ if self.use_relative_boundary_width:
372
+ img_diag = np.sqrt(H**2 + W**2)
373
+ if GPU:
374
+ img_diag = img_diag.get()
375
+ tp_mask = tp_mask.get()
376
+ tn_mask = tn_mask.get()
377
+
378
+ boundary_width = int(round(self.boundary_width * img_diag))
379
+ else:
380
+ boundary_width = self.boundary_width
381
+
382
+ tp_ext_boundary = get_exterior_boundary(
383
+ tp_mask, width=boundary_width, implementation=self.boundary_implementation
384
+ )
385
+ tn_ext_boundary = get_exterior_boundary(
386
+ tn_mask, width=boundary_width, implementation=self.boundary_implementation
387
+ )
388
+
389
+ if GPU:
390
+ tp_ext_boundary, tn_ext_boundary = np.asarray(tp_ext_boundary), np.asarray(
391
+ tn_ext_boundary
392
+ )
393
+
394
+ boundary_intersection = np.logical_and(tp_ext_boundary, tn_ext_boundary)
395
+ fp_boundary_mask_naive = np.logical_and(fp_mask, boundary_intersection)
396
+ fn_boundary_mask_naive = np.logical_and(fn_mask, boundary_intersection)
397
+
398
+ if GPU:
399
+ fp_boundary_mask_naive, fn_boundary_mask_naive = (
400
+ fp_boundary_mask_naive.get(),
401
+ fn_boundary_mask_naive.get(),
402
+ )
403
+
404
+ dilated_fp_boundary_mask = dilate_mask(
405
+ mask=fp_boundary_mask_naive,
406
+ width=boundary_width,
407
+ implementation=self.boundary_implementation,
408
+ )
409
+ dilated_fn_boundary_mask = dilate_mask(
410
+ mask=fn_boundary_mask_naive,
411
+ width=boundary_width,
412
+ implementation=self.boundary_implementation,
413
+ )
414
+
415
+ if GPU:
416
+ dilated_fp_boundary_mask = np.asarray(dilated_fp_boundary_mask)
417
+ dilated_fn_boundary_mask = np.asarray(dilated_fn_boundary_mask)
418
+
419
+ fp_boundary_mask = np.logical_and(dilated_fp_boundary_mask, fp_mask)
420
+ fn_boundary_mask = np.logical_and(dilated_fn_boundary_mask, fn_mask)
421
+
422
+ if GPU:
423
+ fp_boundary_mask = fp_boundary_mask.get()
424
+ fn_boundary_mask = fn_boundary_mask.get()
425
+
426
+ # check if every segment of boundary errors has a TP and a TN as direct neighbor
427
+ fp_boundary_segments = get_contiguous_segments(fp_boundary_mask)
428
+ fn_boundary_segments = get_contiguous_segments(fn_boundary_mask)
429
+
430
+ tp_contour = get_exterior_boundary(tp_mask, width=1, implementation="fast")
431
+ tn_contour = get_exterior_boundary(tn_mask, width=1, implementation="fast")
432
+
433
+ for c, boundary_segments in fp_boundary_segments.items():
434
+ for segment in boundary_segments:
435
+ if (not tp_contour[c][segment].any()) or (not tn_contour[c][segment].any()):
436
+ fp_boundary_mask[c][segment] = False
437
+
438
+ for c, boundary_segments in fn_boundary_segments.items():
439
+ for segment in boundary_segments:
440
+ if (not tp_contour[c][segment].any()) or (not tn_contour[c][segment].any()):
441
+ fn_boundary_mask[c][segment] = False
442
+
443
+ results_on_mask = results[fp_boundary_mask]
444
+ results[fp_boundary_mask] = np.where(
445
+ results_on_mask != ERROR_CODES["unassigned"],
446
+ results_on_mask,
447
+ ERROR_CODES["FP_boundary"],
448
+ )
449
+ results_on_mask = results[fn_boundary_mask]
450
+ results[fn_boundary_mask] = np.where(
451
+ results_on_mask != ERROR_CODES["unassigned"],
452
+ results_on_mask,
453
+ ERROR_CODES["FN_boundary"],
454
+ )
455
+ return results
456
+
457
+ def get_single_boundary_errors(self, class_results, tp_mask, tn_mask, fp_mask, fn_mask):
458
+ H, W = tp_mask.shape[-2:]
459
+ if self.use_relative_boundary_width:
460
+ img_diag = np.sqrt(H**2 + W**2)
461
+ if GPU:
462
+ img_diag = img_diag.get()
463
+ tp_mask = tp_mask.get()
464
+ tn_mask = tn_mask.get()
465
+
466
+ boundary_width = int(round(self.boundary_width * img_diag))
467
+ else:
468
+ boundary_width = self.boundary_width
469
+
470
+ tp_ext_boundary = get_exterior_boundary(
471
+ tp_mask, width=boundary_width, implementation=self.boundary_implementation
472
+ )
473
+ tn_ext_boundary = get_exterior_boundary(
474
+ tn_mask, width=boundary_width, implementation=self.boundary_implementation
475
+ )
476
+
477
+ if GPU:
478
+ tp_ext_boundary, tn_ext_boundary = np.asarray(tp_ext_boundary), np.asarray(
479
+ tn_ext_boundary
480
+ )
481
+
482
+ boundary_intersection = np.logical_and(tp_ext_boundary, tn_ext_boundary)
483
+ fp_boundary_mask_naive = np.logical_and(fp_mask, boundary_intersection)
484
+ fn_boundary_mask_naive = np.logical_and(fn_mask, boundary_intersection)
485
+
486
+ if GPU:
487
+ fp_boundary_mask_naive, fn_boundary_mask_naive = (
488
+ fp_boundary_mask_naive.get(),
489
+ fn_boundary_mask_naive.get(),
490
+ )
491
+
492
+ dilated_fp_boundary_mask = dilate_mask(
493
+ mask=fp_boundary_mask_naive,
494
+ width=boundary_width,
495
+ implementation=self.boundary_implementation,
496
+ )
497
+ dilated_fn_boundary_mask = dilate_mask(
498
+ mask=fn_boundary_mask_naive,
499
+ width=boundary_width,
500
+ implementation=self.boundary_implementation,
501
+ )
502
+
503
+ if GPU:
504
+ dilated_fp_boundary_mask = np.asarray(dilated_fp_boundary_mask)
505
+ dilated_fn_boundary_mask = np.asarray(dilated_fn_boundary_mask)
506
+
507
+ fp_boundary_mask = np.logical_and(dilated_fp_boundary_mask, fp_mask)
508
+ fn_boundary_mask = np.logical_and(dilated_fn_boundary_mask, fn_mask)
509
+
510
+ if GPU:
511
+ fp_boundary_mask = fp_boundary_mask.get()
512
+ fn_boundary_mask = fn_boundary_mask.get()
513
+
514
+ # check if every segment of boundary errors has a TP and a TN as direct neighbor
515
+ fp_boundary_segments = get_single_contiguous_segment(fp_boundary_mask)
516
+ fn_boundary_segments = get_single_contiguous_segment(fn_boundary_mask)
517
+
518
+ tp_contour = get_exterior_boundary(tp_mask, width=1, implementation="fast")
519
+ tn_contour = get_exterior_boundary(tn_mask, width=1, implementation="fast")
520
+
521
+ for segment in fp_boundary_segments:
522
+ if (not tp_contour[segment].any()) or (not tn_contour[segment].any()):
523
+ fp_boundary_mask[segment] = False
524
+
525
+ for segment in fn_boundary_segments:
526
+ if (not tp_contour[segment].any()) or (not tn_contour[segment].any()):
527
+ fn_boundary_mask[segment] = False
528
+
529
+ results_on_mask = class_results[fp_boundary_mask]
530
+ class_results[fp_boundary_mask] = np.where(
531
+ results_on_mask != ERROR_CODES["unassigned"],
532
+ results_on_mask,
533
+ ERROR_CODES["FP_boundary"],
534
+ )
535
+ results_on_mask = class_results[fn_boundary_mask]
536
+ class_results[fn_boundary_mask] = np.where(
537
+ results_on_mask != ERROR_CODES["unassigned"],
538
+ results_on_mask,
539
+ ERROR_CODES["FN_boundary"],
540
+ )
541
+ return class_results
542
+
543
+ def get_extent_segment_errors(
544
+ self,
545
+ results,
546
+ pred_one_hot,
547
+ gt_one_hot,
548
+ ):
549
+ if GPU:
550
+ pred_one_hot = pred_one_hot.get()
551
+ gt_one_hot = gt_one_hot.get()
552
+
553
+ pred_segments = get_contiguous_segments(pred_one_hot)
554
+ gt_segments = get_contiguous_segments(gt_one_hot)
555
+
556
+ for c, (pred_c, gt_c) in enumerate(zip(pred_one_hot, gt_one_hot)):
557
+ if pred_c.any():
558
+ if gt_c.any():
559
+ # positive
560
+ for pred_segment in pred_segments[c]:
561
+ results_on_segment = results[c][pred_segment]
562
+ if (results_on_segment == ERROR_CODES["unassigned"]).any():
563
+ error_type = (
564
+ "FP_extent"
565
+ if (results_on_segment == ERROR_CODES["TP"]).any()
566
+ else "FP_segment"
567
+ )
568
+ results[c][pred_segment] = np.where(
569
+ results_on_segment != ERROR_CODES["unassigned"],
570
+ results_on_segment,
571
+ ERROR_CODES[error_type],
572
+ )
573
+
574
+ # negative
575
+ for gt_segment in gt_segments[c]:
576
+ results_on_segment = results[c][gt_segment]
577
+ if (results_on_segment == ERROR_CODES["unassigned"]).any():
578
+ error_type = (
579
+ "FN_extent"
580
+ if (results_on_segment == ERROR_CODES["TP"]).any()
581
+ else "FN_segment"
582
+ )
583
+ results[c][gt_segment] = np.where(
584
+ results_on_segment != ERROR_CODES["unassigned"],
585
+ results_on_segment,
586
+ ERROR_CODES[error_type],
587
+ )
588
+ else: # only FP segment errors for this class
589
+ # positive prediction must be a superset of unassigned
590
+ # every prediction can only be unassigned or ignore
591
+ if GPU:
592
+ pred_c = np.asarray(pred_c)
593
+ assert pred_c[results[c] == ERROR_CODES["unassigned"]].all()
594
+ results[c][results[c] == ERROR_CODES["unassigned"]] = ERROR_CODES["FP_segment"]
595
+ else:
596
+ if gt_c.any(): # only FN segment errors for this class
597
+ results[c][results[c] == ERROR_CODES["unassigned"]] = ERROR_CODES["FN_segment"]
598
+ else:
599
+ continue
600
+
601
+ return results
602
+
603
+ def get_single_extent_segment_errors(
604
+ self,
605
+ class_results,
606
+ pred_mask,
607
+ gt_mask,
608
+ ):
609
+ if GPU:
610
+ pred_mask = pred_mask.get()
611
+ gt_mask = gt_mask.get()
612
+
613
+ pred_segments = get_single_contiguous_segment(pred_mask)
614
+ gt_segments = get_single_contiguous_segment(gt_mask)
615
+
616
+ if pred_mask.any():
617
+ if gt_mask.any():
618
+ # positive
619
+ for pred_segment in pred_segments:
620
+ results_on_segment = class_results[pred_segment]
621
+ if (results_on_segment == ERROR_CODES["unassigned"]).any():
622
+ error_type = (
623
+ "FP_extent"
624
+ if (results_on_segment == ERROR_CODES["TP"]).any()
625
+ else "FP_segment"
626
+ )
627
+ class_results[pred_segment] = np.where(
628
+ results_on_segment != ERROR_CODES["unassigned"],
629
+ results_on_segment,
630
+ ERROR_CODES[error_type],
631
+ )
632
+
633
+ # negative
634
+ for gt_segment in gt_segments:
635
+ results_on_segment = class_results[gt_segment]
636
+ if (results_on_segment == ERROR_CODES["unassigned"]).any():
637
+ error_type = (
638
+ "FN_extent"
639
+ if (results_on_segment == ERROR_CODES["TP"]).any()
640
+ else "FN_segment"
641
+ )
642
+ class_results[gt_segment] = np.where(
643
+ results_on_segment != ERROR_CODES["unassigned"],
644
+ results_on_segment,
645
+ ERROR_CODES[error_type],
646
+ )
647
+ else: # only FP segment errors for this class
648
+ # positive prediction must be a superset of unassigned
649
+ # every prediction can only be unassigned or ignore
650
+ if GPU:
651
+ pred_mask = np.asarray(pred_mask)
652
+ assert pred_mask[class_results == ERROR_CODES["unassigned"]].all()
653
+ class_results[class_results == ERROR_CODES["unassigned"]] = ERROR_CODES[
654
+ "FP_segment"
655
+ ]
656
+ else:
657
+ if gt_mask.any(): # only FN segment errors for this class
658
+ class_results[class_results == ERROR_CODES["unassigned"]] = ERROR_CODES[
659
+ "FN_segment"
660
+ ]
661
+ else:
662
+ return class_results
663
+
664
+ return class_results
665
+
666
+ def evaluate_sample_boundary_iou(
667
+ self, sample_results, pred_one_hot, gt_one_hot, ignore_inds=None
668
+ ):
669
+ H, W = sample_results.shape[-2:]
670
+ img_diag = np.sqrt(H**2 + W**2)
671
+
672
+ if GPU:
673
+ img_diag = img_diag.get()
674
+ pred_one_hot = pred_one_hot.get()
675
+ gt_one_hot = gt_one_hot.get()
676
+
677
+ boundary_width = max(int(round(self.boundary_iou_d * img_diag)), 1)
678
+
679
+ # BoundaryIoU uses "fast" boundary implementation, see https://github.com/bowenc0221/boundary-iou-api/blob/37d25586a677b043ed585f10e5c42d4e80176ea9/boundary_iou/utils/boundary_utils.py#L12
680
+ pred_one_hot_int_boundary = get_interior_boundary(
681
+ pred_one_hot, width=boundary_width, implementation="fast"
682
+ ) # P_d ∩ P
683
+ gt_one_hot_int_boundary = get_interior_boundary(
684
+ gt_one_hot, width=boundary_width, implementation="fast"
685
+ ) # G_d ∩ G
686
+ gt_one_hot_ext_boundary = get_exterior_boundary(
687
+ gt_one_hot, width=boundary_width, implementation="fast"
688
+ ) # G_d - G
689
+
690
+ if GPU:
691
+ pred_one_hot_int_boundary = np.asarray(pred_one_hot_int_boundary)
692
+ gt_one_hot_int_boundary = np.asarray(gt_one_hot_int_boundary)
693
+
694
+ boundary_intersection = np.logical_and(pred_one_hot_int_boundary, gt_one_hot_int_boundary)
695
+ boundary_union = np.logical_or(pred_one_hot_int_boundary, gt_one_hot_int_boundary)
696
+
697
+ boundary_intersection_counts = boundary_intersection.sum(axis=(1, 2))
698
+ boundary_union_counts = boundary_union.sum(axis=(1, 2))
699
+
700
+ return (
701
+ boundary_intersection_counts,
702
+ boundary_union_counts,
703
+ )
704
+
705
+ def evaluate_single_sample_boundary_iou(
706
+ self, class_results, pred_mask, gt_mask, ignore_inds=None
707
+ ):
708
+ H, W = class_results.shape[-2:]
709
+ img_diag = np.sqrt(H**2 + W**2)
710
+
711
+ if GPU:
712
+ img_diag = img_diag.get()
713
+ pred_mask = pred_mask.get()
714
+ gt_mask = gt_mask.get()
715
+
716
+ boundary_width = max(int(round(self.boundary_iou_d * img_diag)), 1)
717
+
718
+ # BoundaryIoU uses "fast" boundary implementation, see https://github.com/bowenc0221/boundary-iou-api/blob/37d25586a677b043ed585f10e5c42d4e80176ea9/boundary_iou/utils/boundary_utils.py#L12
719
+ pred_mask_int_boundary = get_interior_boundary(
720
+ pred_mask, width=boundary_width, implementation="fast"
721
+ ) # P_d ∩ P
722
+ gt_mask_int_boundary = get_interior_boundary(
723
+ gt_mask, width=boundary_width, implementation="fast"
724
+ ) # G_d ∩ G
725
+ gt_mask_ext_boundary = get_exterior_boundary(
726
+ gt_mask, width=boundary_width, implementation="fast"
727
+ ) # G_d - G
728
+
729
+ if GPU:
730
+ pred_mask_int_boundary = np.asarray(pred_mask_int_boundary)
731
+ gt_mask_int_boundary = np.asarray(gt_mask_int_boundary)
732
+
733
+ boundary_intersection = np.logical_and(pred_mask_int_boundary, gt_mask_int_boundary)
734
+ boundary_union = np.logical_or(pred_mask_int_boundary, gt_mask_int_boundary)
735
+
736
+ if ignore_inds: # remove ignore pixels
737
+ ignore_inds_y, ignore_inds_x = ignore_inds
738
+ assert not gt_mask[ignore_inds_y, ignore_inds_x].any()
739
+ boundary_intersection[ignore_inds_y, ignore_inds_x] = 0
740
+ boundary_union[ignore_inds_y, ignore_inds_x] = 0
741
+
742
+ boundary_intersection_counts = boundary_intersection.sum()
743
+ boundary_union_counts = boundary_union.sum()
744
+
745
+ return (
746
+ boundary_intersection_counts,
747
+ boundary_union_counts,
748
+ )
749
+
750
+ def calculate_error_metrics(self):
751
+ dataframe = pd.DataFrame(index=self.class_names)
752
+ for error_name, error_counts in self.results.items():
753
+ if error_name == "unassigned":
754
+ assert (error_counts == 0).all()
755
+ continue
756
+ dataframe[error_name] = error_counts
757
+
758
+ dataframe["FP"] = (
759
+ dataframe["FP_boundary"] + dataframe["FP_extent"] + dataframe["FP_segment"]
760
+ )
761
+ dataframe["FN"] = (
762
+ dataframe["FN_boundary"] + dataframe["FN_extent"] + dataframe["FN_segment"]
763
+ )
764
+ dataframe["E_boundary"] = dataframe["FP_boundary"] + dataframe["FN_boundary"]
765
+ dataframe["E_extent"] = dataframe["FP_extent"] + dataframe["FN_extent"]
766
+ dataframe["E_segment"] = dataframe["FP_segment"] + dataframe["FN_segment"]
767
+
768
+ union = dataframe["TP"] + dataframe["FP"] + dataframe["FN"]
769
+ dataframe["IoU"] = dataframe["TP"] / union
770
+ dataframe["precision"] = dataframe["TP"] / (dataframe["TP"] + dataframe["FP"])
771
+ dataframe["recall"] = dataframe["TP"] / (dataframe["TP"] + dataframe["FN"])
772
+ dataframe["F1_score"] = 2 / (1.0 / dataframe["precision"] + 1.0 / dataframe["recall"])
773
+
774
+ dataframe["FP_boundary_oU"] = dataframe["FP_boundary"] / union
775
+ dataframe["FN_boundary_oU"] = dataframe["FN_boundary"] / union
776
+ dataframe["E_boundary_oU"] = dataframe["E_boundary"] / union
777
+
778
+ dataframe["FP_extent_oU"] = dataframe["FP_extent"] / union
779
+ dataframe["FN_extent_oU"] = dataframe["FN_extent"] / union
780
+ dataframe["E_extent_oU"] = dataframe["E_extent"] / union
781
+
782
+ dataframe["FP_segment_oU"] = dataframe["FP_segment"] / union
783
+ dataframe["FN_segment_oU"] = dataframe["FN_segment"] / union
784
+ dataframe["E_segment_oU"] = dataframe["E_segment"] / union
785
+
786
+ dataframe["E_boundary_oU_renormed"] = dataframe["E_boundary"] / (
787
+ dataframe["TP"] + dataframe["E_boundary"]
788
+ )
789
+ dataframe["E_extent_oU_renormed"] = dataframe["E_extent"] / (
790
+ dataframe["TP"] + dataframe["E_boundary"] + dataframe["E_extent"]
791
+ )
792
+ dataframe["E_segment_oU_renormed"] = dataframe["E_segment_oU"]
793
+
794
+ with np.errstate(invalid="ignore"): # avoid warnings for zero-division
795
+ # boundary IoU
796
+ dataframe["boundary_IoU"] = (
797
+ self.boundary_iou_intersection_counts / self.boundary_iou_union_counts
798
+ )
799
+ # aggregate classes
800
+ dataframe.loc["mean"] = dataframe.mean(axis=0)
801
+
802
+ # dataframe.to_csv(f"{evaluator.result_dir}/result_df.csv", index=True)
803
+
804
+ return dataframe