dtlpy 1.113.10__py3-none-any.whl → 1.114.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dtlpy/__init__.py +488 -488
- dtlpy/__version__.py +1 -1
- dtlpy/assets/__init__.py +26 -26
- dtlpy/assets/__pycache__/__init__.cpython-38.pyc +0 -0
- dtlpy/assets/code_server/config.yaml +2 -2
- dtlpy/assets/code_server/installation.sh +24 -24
- dtlpy/assets/code_server/launch.json +13 -13
- dtlpy/assets/code_server/settings.json +2 -2
- dtlpy/assets/main.py +53 -53
- dtlpy/assets/main_partial.py +18 -18
- dtlpy/assets/mock.json +11 -11
- dtlpy/assets/model_adapter.py +83 -83
- dtlpy/assets/package.json +61 -61
- dtlpy/assets/package_catalog.json +29 -29
- dtlpy/assets/package_gitignore +307 -307
- dtlpy/assets/service_runners/__init__.py +33 -33
- dtlpy/assets/service_runners/converter.py +96 -96
- dtlpy/assets/service_runners/multi_method.py +49 -49
- dtlpy/assets/service_runners/multi_method_annotation.py +54 -54
- dtlpy/assets/service_runners/multi_method_dataset.py +55 -55
- dtlpy/assets/service_runners/multi_method_item.py +52 -52
- dtlpy/assets/service_runners/multi_method_json.py +52 -52
- dtlpy/assets/service_runners/single_method.py +37 -37
- dtlpy/assets/service_runners/single_method_annotation.py +43 -43
- dtlpy/assets/service_runners/single_method_dataset.py +43 -43
- dtlpy/assets/service_runners/single_method_item.py +41 -41
- dtlpy/assets/service_runners/single_method_json.py +42 -42
- dtlpy/assets/service_runners/single_method_multi_input.py +45 -45
- dtlpy/assets/voc_annotation_template.xml +23 -23
- dtlpy/caches/base_cache.py +32 -32
- dtlpy/caches/cache.py +473 -473
- dtlpy/caches/dl_cache.py +201 -201
- dtlpy/caches/filesystem_cache.py +89 -89
- dtlpy/caches/redis_cache.py +84 -84
- dtlpy/dlp/__init__.py +20 -20
- dtlpy/dlp/cli_utilities.py +367 -367
- dtlpy/dlp/command_executor.py +764 -764
- dtlpy/dlp/dlp +1 -1
- dtlpy/dlp/dlp.bat +1 -1
- dtlpy/dlp/dlp.py +128 -128
- dtlpy/dlp/parser.py +651 -651
- dtlpy/entities/__init__.py +83 -83
- dtlpy/entities/analytic.py +311 -311
- dtlpy/entities/annotation.py +1879 -1879
- dtlpy/entities/annotation_collection.py +699 -699
- dtlpy/entities/annotation_definitions/__init__.py +20 -20
- dtlpy/entities/annotation_definitions/base_annotation_definition.py +100 -100
- dtlpy/entities/annotation_definitions/box.py +195 -195
- dtlpy/entities/annotation_definitions/classification.py +67 -67
- dtlpy/entities/annotation_definitions/comparison.py +72 -72
- dtlpy/entities/annotation_definitions/cube.py +204 -204
- dtlpy/entities/annotation_definitions/cube_3d.py +149 -149
- dtlpy/entities/annotation_definitions/description.py +32 -32
- dtlpy/entities/annotation_definitions/ellipse.py +124 -124
- dtlpy/entities/annotation_definitions/free_text.py +62 -62
- dtlpy/entities/annotation_definitions/gis.py +69 -69
- dtlpy/entities/annotation_definitions/note.py +139 -139
- dtlpy/entities/annotation_definitions/point.py +117 -117
- dtlpy/entities/annotation_definitions/polygon.py +182 -182
- dtlpy/entities/annotation_definitions/polyline.py +111 -111
- dtlpy/entities/annotation_definitions/pose.py +92 -92
- dtlpy/entities/annotation_definitions/ref_image.py +86 -86
- dtlpy/entities/annotation_definitions/segmentation.py +240 -240
- dtlpy/entities/annotation_definitions/subtitle.py +34 -34
- dtlpy/entities/annotation_definitions/text.py +85 -85
- dtlpy/entities/annotation_definitions/undefined_annotation.py +74 -74
- dtlpy/entities/app.py +220 -220
- dtlpy/entities/app_module.py +107 -107
- dtlpy/entities/artifact.py +174 -174
- dtlpy/entities/assignment.py +399 -399
- dtlpy/entities/base_entity.py +214 -214
- dtlpy/entities/bot.py +113 -113
- dtlpy/entities/codebase.py +296 -296
- dtlpy/entities/collection.py +38 -38
- dtlpy/entities/command.py +169 -169
- dtlpy/entities/compute.py +442 -442
- dtlpy/entities/dataset.py +1285 -1285
- dtlpy/entities/directory_tree.py +44 -44
- dtlpy/entities/dpk.py +470 -470
- dtlpy/entities/driver.py +222 -222
- dtlpy/entities/execution.py +397 -397
- dtlpy/entities/feature.py +124 -124
- dtlpy/entities/feature_set.py +145 -145
- dtlpy/entities/filters.py +641 -641
- dtlpy/entities/gis_item.py +107 -107
- dtlpy/entities/integration.py +184 -184
- dtlpy/entities/item.py +953 -953
- dtlpy/entities/label.py +123 -123
- dtlpy/entities/links.py +85 -85
- dtlpy/entities/message.py +175 -175
- dtlpy/entities/model.py +694 -691
- dtlpy/entities/node.py +1005 -1005
- dtlpy/entities/ontology.py +803 -803
- dtlpy/entities/organization.py +287 -287
- dtlpy/entities/package.py +657 -657
- dtlpy/entities/package_defaults.py +5 -5
- dtlpy/entities/package_function.py +185 -185
- dtlpy/entities/package_module.py +113 -113
- dtlpy/entities/package_slot.py +118 -118
- dtlpy/entities/paged_entities.py +290 -267
- dtlpy/entities/pipeline.py +593 -593
- dtlpy/entities/pipeline_execution.py +279 -279
- dtlpy/entities/project.py +394 -394
- dtlpy/entities/prompt_item.py +499 -499
- dtlpy/entities/recipe.py +301 -301
- dtlpy/entities/reflect_dict.py +102 -102
- dtlpy/entities/resource_execution.py +138 -138
- dtlpy/entities/service.py +958 -958
- dtlpy/entities/service_driver.py +117 -117
- dtlpy/entities/setting.py +294 -294
- dtlpy/entities/task.py +491 -491
- dtlpy/entities/time_series.py +143 -143
- dtlpy/entities/trigger.py +426 -426
- dtlpy/entities/user.py +118 -118
- dtlpy/entities/webhook.py +124 -124
- dtlpy/examples/__init__.py +19 -19
- dtlpy/examples/add_labels.py +135 -135
- dtlpy/examples/add_metadata_to_item.py +21 -21
- dtlpy/examples/annotate_items_using_model.py +65 -65
- dtlpy/examples/annotate_video_using_model_and_tracker.py +75 -75
- dtlpy/examples/annotations_convert_to_voc.py +9 -9
- dtlpy/examples/annotations_convert_to_yolo.py +9 -9
- dtlpy/examples/convert_annotation_types.py +51 -51
- dtlpy/examples/converter.py +143 -143
- dtlpy/examples/copy_annotations.py +22 -22
- dtlpy/examples/copy_folder.py +31 -31
- dtlpy/examples/create_annotations.py +51 -51
- dtlpy/examples/create_video_annotations.py +83 -83
- dtlpy/examples/delete_annotations.py +26 -26
- dtlpy/examples/filters.py +113 -113
- dtlpy/examples/move_item.py +23 -23
- dtlpy/examples/play_video_annotation.py +13 -13
- dtlpy/examples/show_item_and_mask.py +53 -53
- dtlpy/examples/triggers.py +49 -49
- dtlpy/examples/upload_batch_of_items.py +20 -20
- dtlpy/examples/upload_items_and_custom_format_annotations.py +55 -55
- dtlpy/examples/upload_items_with_modalities.py +43 -43
- dtlpy/examples/upload_segmentation_annotations_from_mask_image.py +44 -44
- dtlpy/examples/upload_yolo_format_annotations.py +70 -70
- dtlpy/exceptions.py +125 -125
- dtlpy/miscellaneous/__init__.py +20 -20
- dtlpy/miscellaneous/dict_differ.py +95 -95
- dtlpy/miscellaneous/git_utils.py +217 -217
- dtlpy/miscellaneous/json_utils.py +14 -14
- dtlpy/miscellaneous/list_print.py +105 -105
- dtlpy/miscellaneous/zipping.py +130 -130
- dtlpy/ml/__init__.py +20 -20
- dtlpy/ml/base_feature_extractor_adapter.py +27 -27
- dtlpy/ml/base_model_adapter.py +945 -940
- dtlpy/ml/metrics.py +461 -461
- dtlpy/ml/predictions_utils.py +274 -274
- dtlpy/ml/summary_writer.py +57 -57
- dtlpy/ml/train_utils.py +60 -60
- dtlpy/new_instance.py +252 -252
- dtlpy/repositories/__init__.py +56 -56
- dtlpy/repositories/analytics.py +85 -85
- dtlpy/repositories/annotations.py +916 -916
- dtlpy/repositories/apps.py +383 -383
- dtlpy/repositories/artifacts.py +452 -452
- dtlpy/repositories/assignments.py +599 -599
- dtlpy/repositories/bots.py +213 -213
- dtlpy/repositories/codebases.py +559 -559
- dtlpy/repositories/collections.py +332 -348
- dtlpy/repositories/commands.py +158 -158
- dtlpy/repositories/compositions.py +61 -61
- dtlpy/repositories/computes.py +434 -406
- dtlpy/repositories/datasets.py +1291 -1291
- dtlpy/repositories/downloader.py +895 -895
- dtlpy/repositories/dpks.py +433 -433
- dtlpy/repositories/drivers.py +266 -266
- dtlpy/repositories/executions.py +817 -817
- dtlpy/repositories/feature_sets.py +226 -226
- dtlpy/repositories/features.py +238 -238
- dtlpy/repositories/integrations.py +484 -484
- dtlpy/repositories/items.py +909 -915
- dtlpy/repositories/messages.py +94 -94
- dtlpy/repositories/models.py +877 -867
- dtlpy/repositories/nodes.py +80 -80
- dtlpy/repositories/ontologies.py +511 -511
- dtlpy/repositories/organizations.py +525 -525
- dtlpy/repositories/packages.py +1941 -1941
- dtlpy/repositories/pipeline_executions.py +448 -448
- dtlpy/repositories/pipelines.py +642 -642
- dtlpy/repositories/projects.py +539 -539
- dtlpy/repositories/recipes.py +399 -399
- dtlpy/repositories/resource_executions.py +137 -137
- dtlpy/repositories/schema.py +120 -120
- dtlpy/repositories/service_drivers.py +213 -213
- dtlpy/repositories/services.py +1704 -1704
- dtlpy/repositories/settings.py +339 -339
- dtlpy/repositories/tasks.py +1124 -1124
- dtlpy/repositories/times_series.py +278 -278
- dtlpy/repositories/triggers.py +536 -536
- dtlpy/repositories/upload_element.py +257 -257
- dtlpy/repositories/uploader.py +651 -651
- dtlpy/repositories/webhooks.py +249 -249
- dtlpy/services/__init__.py +22 -22
- dtlpy/services/aihttp_retry.py +131 -131
- dtlpy/services/api_client.py +1782 -1782
- dtlpy/services/api_reference.py +40 -40
- dtlpy/services/async_utils.py +133 -133
- dtlpy/services/calls_counter.py +44 -44
- dtlpy/services/check_sdk.py +68 -68
- dtlpy/services/cookie.py +115 -115
- dtlpy/services/create_logger.py +156 -156
- dtlpy/services/events.py +84 -84
- dtlpy/services/logins.py +235 -235
- dtlpy/services/reporter.py +256 -256
- dtlpy/services/service_defaults.py +91 -91
- dtlpy/utilities/__init__.py +20 -20
- dtlpy/utilities/annotations/__init__.py +16 -16
- dtlpy/utilities/annotations/annotation_converters.py +269 -269
- dtlpy/utilities/base_package_runner.py +264 -264
- dtlpy/utilities/converter.py +1650 -1650
- dtlpy/utilities/dataset_generators/__init__.py +1 -1
- dtlpy/utilities/dataset_generators/dataset_generator.py +670 -670
- dtlpy/utilities/dataset_generators/dataset_generator_tensorflow.py +23 -23
- dtlpy/utilities/dataset_generators/dataset_generator_torch.py +21 -21
- dtlpy/utilities/local_development/__init__.py +1 -1
- dtlpy/utilities/local_development/local_session.py +179 -179
- dtlpy/utilities/reports/__init__.py +2 -2
- dtlpy/utilities/reports/figures.py +343 -343
- dtlpy/utilities/reports/report.py +71 -71
- dtlpy/utilities/videos/__init__.py +17 -17
- dtlpy/utilities/videos/video_player.py +598 -598
- dtlpy/utilities/videos/videos.py +470 -470
- {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp +1 -1
- dtlpy-1.114.13.data/scripts/dlp.bat +2 -0
- {dtlpy-1.113.10.data → dtlpy-1.114.13.data}/scripts/dlp.py +128 -128
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/LICENSE +200 -200
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/METADATA +172 -172
- dtlpy-1.114.13.dist-info/RECORD +240 -0
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/WHEEL +1 -1
- tests/features/environment.py +551 -550
- dtlpy-1.113.10.data/scripts/dlp.bat +0 -2
- dtlpy-1.113.10.dist-info/RECORD +0 -244
- tests/assets/__init__.py +0 -0
- tests/assets/models_flow/__init__.py +0 -0
- tests/assets/models_flow/failedmain.py +0 -52
- tests/assets/models_flow/main.py +0 -62
- tests/assets/models_flow/main_model.py +0 -54
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/entry_points.txt +0 -0
- {dtlpy-1.113.10.dist-info → dtlpy-1.114.13.dist-info}/top_level.txt +0 -0
dtlpy/ml/metrics.py
CHANGED
|
@@ -1,461 +1,461 @@
|
|
|
1
|
-
import numpy as np
|
|
2
|
-
import pandas as pd
|
|
3
|
-
import logging
|
|
4
|
-
import datetime
|
|
5
|
-
|
|
6
|
-
from .. import entities
|
|
7
|
-
|
|
8
|
-
logger = logging.getLogger(name='dtlpy')
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class Results:
|
|
12
|
-
def __init__(self, matches, annotation_type):
|
|
13
|
-
self.matches = matches
|
|
14
|
-
self.annotation_type = annotation_type
|
|
15
|
-
|
|
16
|
-
def to_df(self):
|
|
17
|
-
return self.matches.to_df()
|
|
18
|
-
|
|
19
|
-
def summary(self):
|
|
20
|
-
df = self.matches.to_df()
|
|
21
|
-
total_set_one = len(df['first_id'].dropna())
|
|
22
|
-
total_set_two = len(df['second_id'].dropna())
|
|
23
|
-
# each set unmatched is the number of Nones from the other set
|
|
24
|
-
unmatched_set_one = df.shape[0] - total_set_two
|
|
25
|
-
unmatched_set_two = df.shape[0] - total_set_one
|
|
26
|
-
matched_set_one = total_set_one - unmatched_set_one
|
|
27
|
-
matched_set_two = total_set_two - unmatched_set_two
|
|
28
|
-
# sanity
|
|
29
|
-
assert matched_set_one == matched_set_two, 'matched numbers are not the same'
|
|
30
|
-
assert df['annotation_score'].shape[0] == (unmatched_set_one + unmatched_set_two + matched_set_one), \
|
|
31
|
-
'mis-match number if scores and annotations'
|
|
32
|
-
return {
|
|
33
|
-
'annotation_type': self.annotation_type,
|
|
34
|
-
'mean_annotations_scores': df['annotation_score'].mean(),
|
|
35
|
-
'mean_attributes_scores': df['attribute_score'].mean(),
|
|
36
|
-
'mean_labels_scores': df['label_score'].mean(),
|
|
37
|
-
'n_annotations_set_one': total_set_one,
|
|
38
|
-
'n_annotations_set_two': total_set_two,
|
|
39
|
-
'n_annotations_total': total_set_one + total_set_two,
|
|
40
|
-
'n_annotations_unmatched_set_one': unmatched_set_one,
|
|
41
|
-
'n_annotations_unmatched_set_two': unmatched_set_two,
|
|
42
|
-
'n_annotations_unmatched_total': unmatched_set_one + unmatched_set_two,
|
|
43
|
-
'n_annotations_matched_total': matched_set_one,
|
|
44
|
-
'precision': matched_set_one / (matched_set_one + unmatched_set_two),
|
|
45
|
-
'recall': matched_set_one / (matched_set_one + unmatched_set_one)
|
|
46
|
-
}
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
class Match:
|
|
50
|
-
def __init__(self,
|
|
51
|
-
first_annotation_id, first_annotation_label, first_annotation_confidence,
|
|
52
|
-
second_annotation_id, second_annotation_label, second_annotation_confidence,
|
|
53
|
-
# defaults
|
|
54
|
-
annotation_score=0, attributes_score=0, geometry_score=0, label_score=0):
|
|
55
|
-
"""
|
|
56
|
-
Save a match between two annotations with all relevant scores
|
|
57
|
-
|
|
58
|
-
:param first_annotation_id:
|
|
59
|
-
:param second_annotation_id:
|
|
60
|
-
:param annotation_score:
|
|
61
|
-
:param attributes_score:
|
|
62
|
-
:param geometry_score:
|
|
63
|
-
:param label_score:
|
|
64
|
-
"""
|
|
65
|
-
self.first_annotation_id = first_annotation_id
|
|
66
|
-
self.first_annotation_label = first_annotation_label
|
|
67
|
-
self.first_annotation_confidence = first_annotation_confidence
|
|
68
|
-
self.second_annotation_id = second_annotation_id
|
|
69
|
-
self.second_annotation_label = second_annotation_label
|
|
70
|
-
self.second_annotation_confidence = second_annotation_confidence
|
|
71
|
-
self.annotation_score = annotation_score
|
|
72
|
-
self.attributes_score = attributes_score
|
|
73
|
-
# Replace the old annotation score
|
|
74
|
-
self.geometry_score = geometry_score
|
|
75
|
-
self.label_score = label_score
|
|
76
|
-
|
|
77
|
-
def __repr__(self):
|
|
78
|
-
return 'annotation: {:.2f}, attributes: {:.2f}, geometry: {:.2f}, label: {:.2f}'.format(
|
|
79
|
-
self.annotation_score, self.attributes_score, self.geometry_score, self.label_score)
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
class Matches:
|
|
83
|
-
def __init__(self):
|
|
84
|
-
self.matches = list()
|
|
85
|
-
self._annotations_raw_df = list()
|
|
86
|
-
|
|
87
|
-
def __len__(self):
|
|
88
|
-
return len(self.matches)
|
|
89
|
-
|
|
90
|
-
def __repr__(self):
|
|
91
|
-
return self.to_df().to_string()
|
|
92
|
-
|
|
93
|
-
def to_df(self):
|
|
94
|
-
results = list()
|
|
95
|
-
for match in self.matches:
|
|
96
|
-
results.append({
|
|
97
|
-
'first_id': match.first_annotation_id,
|
|
98
|
-
'first_label': match.first_annotation_label,
|
|
99
|
-
'first_confidence': match.first_annotation_confidence,
|
|
100
|
-
'second_id': match.second_annotation_id,
|
|
101
|
-
'second_label': match.second_annotation_label,
|
|
102
|
-
'second_confidence': match.second_annotation_confidence,
|
|
103
|
-
'annotation_score': match.annotation_score,
|
|
104
|
-
'attribute_score': match.attributes_score,
|
|
105
|
-
'geometry_score': match.geometry_score,
|
|
106
|
-
'label_score': match.label_score,
|
|
107
|
-
})
|
|
108
|
-
df = pd.DataFrame(results)
|
|
109
|
-
return df
|
|
110
|
-
|
|
111
|
-
def add(self, match: Match):
|
|
112
|
-
self.matches.append(match)
|
|
113
|
-
|
|
114
|
-
def validate(self):
|
|
115
|
-
first = list()
|
|
116
|
-
second = list()
|
|
117
|
-
for match in self.matches:
|
|
118
|
-
if match.first_annotation_id in first:
|
|
119
|
-
raise ValueError('duplication for annotation id {!r} in FIRST set'.format(match.first_annotation_id))
|
|
120
|
-
if match.first_annotation_id is not None:
|
|
121
|
-
first.append(match.first_annotation_id)
|
|
122
|
-
if match.second_annotation_id in second:
|
|
123
|
-
raise ValueError('duplication for annotation id {!r} in SECOND set'.format(match.second_annotation_id))
|
|
124
|
-
if match.second_annotation_id is not None:
|
|
125
|
-
second.append(match.second_annotation_id)
|
|
126
|
-
return True
|
|
127
|
-
|
|
128
|
-
def find(self, annotation_id, loc='first'):
|
|
129
|
-
for match in self.matches:
|
|
130
|
-
if loc == 'first':
|
|
131
|
-
if match.first_annotation_id == annotation_id:
|
|
132
|
-
return match
|
|
133
|
-
elif loc == 'second':
|
|
134
|
-
if match.second_annotation_id == annotation_id:
|
|
135
|
-
return match
|
|
136
|
-
raise ValueError('could not find annotation id {!r} in {}'.format(annotation_id, loc))
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
######################
|
|
140
|
-
# Matching functions #
|
|
141
|
-
######################
|
|
142
|
-
class Matchers:
|
|
143
|
-
|
|
144
|
-
@staticmethod
|
|
145
|
-
def calculate_iou_box(pts1, pts2, config):
|
|
146
|
-
"""
|
|
147
|
-
Measure the two list of points IoU
|
|
148
|
-
:param pts1: ann.geo coordinates
|
|
149
|
-
:param pts2: ann.geo coordinates
|
|
150
|
-
:return: `float` how Intersection over Union of tho shapes
|
|
151
|
-
"""
|
|
152
|
-
try:
|
|
153
|
-
from shapely.geometry import Polygon
|
|
154
|
-
except (ImportError, ModuleNotFoundError) as err:
|
|
155
|
-
raise RuntimeError('dtlpy depends on external package. Please install ') from err
|
|
156
|
-
if len(pts1) == 2:
|
|
157
|
-
# regular box annotation (2 pts)
|
|
158
|
-
pt1_left_top = [pts1[0][0], pts1[0][1]]
|
|
159
|
-
pt1_right_top = [pts1[0][0], pts1[1][1]]
|
|
160
|
-
pt1_right_bottom = [pts1[1][0], pts1[1][1]]
|
|
161
|
-
pt1_left_bottom = [pts1[1][0], pts1[0][1]]
|
|
162
|
-
else:
|
|
163
|
-
# rotated box annotation (4 pts)
|
|
164
|
-
pt1_left_top = pts1[0]
|
|
165
|
-
pt1_right_top = pts1[3]
|
|
166
|
-
pt1_left_bottom = pts1[1]
|
|
167
|
-
pt1_right_bottom = pts1[2]
|
|
168
|
-
|
|
169
|
-
poly_1 = Polygon([pt1_left_top,
|
|
170
|
-
pt1_right_top,
|
|
171
|
-
pt1_right_bottom,
|
|
172
|
-
pt1_left_bottom])
|
|
173
|
-
|
|
174
|
-
if len(pts2) == 2:
|
|
175
|
-
# regular box annotation (2 pts)
|
|
176
|
-
pt2_left_top = [pts2[0][0], pts2[0][1]]
|
|
177
|
-
pt2_right_top = [pts2[0][0], pts2[1][1]]
|
|
178
|
-
pt2_right_bottom = [pts2[1][0], pts2[1][1]]
|
|
179
|
-
pt2_left_bottom = [pts2[1][0], pts2[0][1]]
|
|
180
|
-
else:
|
|
181
|
-
# rotated box annotation (4 pts)
|
|
182
|
-
pt2_left_top = pts2[0]
|
|
183
|
-
pt2_right_top = pts2[3]
|
|
184
|
-
pt2_left_bottom = pts2[1]
|
|
185
|
-
pt2_right_bottom = pts2[2]
|
|
186
|
-
|
|
187
|
-
poly_2 = Polygon([pt2_left_top,
|
|
188
|
-
pt2_right_top,
|
|
189
|
-
pt2_right_bottom,
|
|
190
|
-
pt2_left_bottom])
|
|
191
|
-
iou = poly_1.intersection(poly_2).area / poly_1.union(poly_2).area
|
|
192
|
-
return iou
|
|
193
|
-
|
|
194
|
-
@staticmethod
|
|
195
|
-
def calculate_iou_classification(pts1, pts2, config):
|
|
196
|
-
"""
|
|
197
|
-
Measure the two list of points IoU
|
|
198
|
-
:param pts1: ann.geo coordinates
|
|
199
|
-
:param pts2: ann.geo coordinates
|
|
200
|
-
:return: `float` how Intersection over Union of tho shapes
|
|
201
|
-
"""
|
|
202
|
-
return 1
|
|
203
|
-
|
|
204
|
-
@staticmethod
|
|
205
|
-
def calculate_iou_polygon(pts1, pts2, config):
|
|
206
|
-
try:
|
|
207
|
-
# from shapely.geometry import Polygon
|
|
208
|
-
import cv2
|
|
209
|
-
except (ImportError, ModuleNotFoundError) as err:
|
|
210
|
-
raise RuntimeError('dtlpy depends on external package. Please install ') from err
|
|
211
|
-
# # using shapley
|
|
212
|
-
# poly_1 = Polygon(pts1)
|
|
213
|
-
# poly_2 = Polygon(pts2)
|
|
214
|
-
# iou = poly_1.intersection(poly_2).area / poly_1.union(poly_2).area
|
|
215
|
-
|
|
216
|
-
# # using opencv
|
|
217
|
-
width = int(np.ceil(np.max(np.concatenate((pts1[:, 0], pts2[:, 0]))))) + 10
|
|
218
|
-
height = int(np.ceil(np.max(np.concatenate((pts1[:, 1], pts2[:, 1]))))) + 10
|
|
219
|
-
mask1 = np.zeros((height, width))
|
|
220
|
-
mask2 = np.zeros((height, width))
|
|
221
|
-
mask1 = cv2.drawContours(
|
|
222
|
-
image=mask1,
|
|
223
|
-
contours=[pts1.round().astype(int)],
|
|
224
|
-
contourIdx=-1,
|
|
225
|
-
color=1,
|
|
226
|
-
thickness=-1,
|
|
227
|
-
)
|
|
228
|
-
mask2 = cv2.drawContours(
|
|
229
|
-
image=mask2,
|
|
230
|
-
contours=[pts2.round().astype(int)],
|
|
231
|
-
contourIdx=-1,
|
|
232
|
-
color=1,
|
|
233
|
-
thickness=-1,
|
|
234
|
-
)
|
|
235
|
-
iou = np.sum((mask1 + mask2) == 2) / np.sum((mask1 + mask2) > 0)
|
|
236
|
-
if np.sum((mask1 + mask2) > 2):
|
|
237
|
-
assert False
|
|
238
|
-
return iou
|
|
239
|
-
|
|
240
|
-
@staticmethod
|
|
241
|
-
def calculate_iou_semantic(mask1, mask2, config):
|
|
242
|
-
joint_mask = mask1 + mask2
|
|
243
|
-
return np.sum(np.sum(joint_mask == 2) / np.sum(joint_mask > 0))
|
|
244
|
-
|
|
245
|
-
@staticmethod
|
|
246
|
-
def calculate_iou_point(pt1, pt2, config):
|
|
247
|
-
"""
|
|
248
|
-
pt is [x,y]
|
|
249
|
-
normalizing to score between [0, 1] -> 1 is the exact match
|
|
250
|
-
if same point score is 1
|
|
251
|
-
at about 20 pix distance score is about 0.5, 100 goes to 0
|
|
252
|
-
:param pt1:
|
|
253
|
-
:param pt2:
|
|
254
|
-
:return:
|
|
255
|
-
"""
|
|
256
|
-
"""
|
|
257
|
-
x = np.arange(int(diag))
|
|
258
|
-
y = np.exp(-1 / diag * 20 * x)
|
|
259
|
-
plt.figure()
|
|
260
|
-
plt.plot(x, y)
|
|
261
|
-
"""
|
|
262
|
-
height = config.get('height', 500)
|
|
263
|
-
width = config.get('width', 500)
|
|
264
|
-
diag = np.sqrt(height ** 2 + width ** 2)
|
|
265
|
-
# 20% of the image diagonal tolerance (empirically). need to
|
|
266
|
-
return np.exp(-1 / diag * 20 * np.linalg.norm(np.asarray(pt1) - np.asarray(pt2)))
|
|
267
|
-
|
|
268
|
-
@staticmethod
|
|
269
|
-
def match_attributes(attributes1, attributes2):
|
|
270
|
-
"""
|
|
271
|
-
Returns IoU of the attributes. if both are empty - its a prefect match (returns 1)
|
|
272
|
-
0: no matching
|
|
273
|
-
1: perfect attributes match
|
|
274
|
-
"""
|
|
275
|
-
if type(attributes1) is not type(attributes2):
|
|
276
|
-
logger.warning('attributes are not same type: {}, {}'.format(type(attributes1), type(attributes2)))
|
|
277
|
-
return 0
|
|
278
|
-
|
|
279
|
-
if attributes1 is None and attributes2 is None:
|
|
280
|
-
return 1
|
|
281
|
-
|
|
282
|
-
if isinstance(attributes1, dict) and isinstance(attributes2, dict):
|
|
283
|
-
# convert to list
|
|
284
|
-
attributes1 = ['{}-{}'.format(key, val) for key, val in attributes1.items()]
|
|
285
|
-
attributes2 = ['{}-{}'.format(key, val) for key, val in attributes2.items()]
|
|
286
|
-
|
|
287
|
-
intersection = set(attributes1).intersection(set(attributes2))
|
|
288
|
-
union = set(attributes1).union(attributes2)
|
|
289
|
-
if len(union) == 0:
|
|
290
|
-
# if there is no union - there are no attributes at all
|
|
291
|
-
return 1
|
|
292
|
-
return len(intersection) / len(union)
|
|
293
|
-
|
|
294
|
-
@staticmethod
|
|
295
|
-
def match_labels(label1, label2):
|
|
296
|
-
"""
|
|
297
|
-
Returns 1 in one of the labels in substring of the second
|
|
298
|
-
"""
|
|
299
|
-
return int(label1 in label2 or label2 in label1)
|
|
300
|
-
|
|
301
|
-
@staticmethod
|
|
302
|
-
def general_match(matches: Matches,
|
|
303
|
-
first_set: entities.AnnotationCollection,
|
|
304
|
-
second_set: entities.AnnotationCollection,
|
|
305
|
-
match_type,
|
|
306
|
-
match_threshold: float,
|
|
307
|
-
ignore_attributes=False,
|
|
308
|
-
ignore_labels=False):
|
|
309
|
-
"""
|
|
310
|
-
|
|
311
|
-
:param matches:
|
|
312
|
-
:param first_set:
|
|
313
|
-
:param second_set:
|
|
314
|
-
:param match_type:
|
|
315
|
-
:param match_threshold:
|
|
316
|
-
:param ignore_attributes:
|
|
317
|
-
:param ignore_labels:
|
|
318
|
-
:return:
|
|
319
|
-
"""
|
|
320
|
-
annotation_type_to_func = {
|
|
321
|
-
entities.AnnotationType.BOX: Matchers.calculate_iou_box,
|
|
322
|
-
entities.AnnotationType.CLASSIFICATION: Matchers.calculate_iou_classification,
|
|
323
|
-
entities.AnnotationType.SEGMENTATION: Matchers.calculate_iou_semantic,
|
|
324
|
-
entities.AnnotationType.POLYGON: Matchers.calculate_iou_polygon,
|
|
325
|
-
entities.AnnotationType.POINT: Matchers.calculate_iou_point,
|
|
326
|
-
}
|
|
327
|
-
df = pd.DataFrame(data=-1 * np.ones((len(second_set), len(first_set))),
|
|
328
|
-
columns=[a.id for a in first_set],
|
|
329
|
-
index=[a.id for a in second_set])
|
|
330
|
-
for annotation_one in first_set:
|
|
331
|
-
for annotation_two in second_set:
|
|
332
|
-
if match_type not in annotation_type_to_func:
|
|
333
|
-
raise ValueError('unsupported type: {}'.format(match_type))
|
|
334
|
-
if df[annotation_one.id][annotation_two.id] == -1:
|
|
335
|
-
try:
|
|
336
|
-
config = {'height': annotation_one._item.height if annotation_one._item is not None else 500,
|
|
337
|
-
'width': annotation_one._item.width if annotation_one._item is not None else 500}
|
|
338
|
-
df[annotation_one.id][annotation_two.id] = annotation_type_to_func[match_type](
|
|
339
|
-
annotation_one.geo,
|
|
340
|
-
annotation_two.geo,
|
|
341
|
-
config)
|
|
342
|
-
except ZeroDivisionError:
|
|
343
|
-
logger.warning(
|
|
344
|
-
'Found annotations with area=0!: annotations ids: {!r}, {!r}'.format(annotation_one.id,
|
|
345
|
-
annotation_two.id))
|
|
346
|
-
df[annotation_one.id][annotation_two.id] = 0
|
|
347
|
-
# for debug - save the annotations scoring matrix
|
|
348
|
-
matches._annotations_raw_df.append(df.copy())
|
|
349
|
-
|
|
350
|
-
# go over all matches
|
|
351
|
-
while True:
|
|
352
|
-
# take max IoU score, list the match and remove annotations' ids from columns and rows
|
|
353
|
-
# keep doing that until no more matches or lower than match threshold
|
|
354
|
-
max_cell = df.max().max()
|
|
355
|
-
if max_cell < match_threshold or np.isnan(max_cell):
|
|
356
|
-
break
|
|
357
|
-
row_index, col_index = np.where(df == max_cell)
|
|
358
|
-
row_index = row_index[0]
|
|
359
|
-
col_index = col_index[0]
|
|
360
|
-
first_annotation_id = df.columns[col_index]
|
|
361
|
-
second_annotation_id = df.index[row_index]
|
|
362
|
-
first_annotation = [a for a in first_set if a.id == first_annotation_id][0]
|
|
363
|
-
second_annotation = [a for a in second_set if a.id == second_annotation_id][0]
|
|
364
|
-
geometry_score = df.iloc[row_index, col_index]
|
|
365
|
-
labels_score = Matchers.match_labels(label1=first_annotation.label,
|
|
366
|
-
label2=second_annotation.label)
|
|
367
|
-
attribute_score = Matchers.match_attributes(attributes1=first_annotation.attributes,
|
|
368
|
-
attributes2=second_annotation.attributes)
|
|
369
|
-
|
|
370
|
-
# TODO use ignores for final score
|
|
371
|
-
annotation_score = (geometry_score + attribute_score + labels_score) / 3
|
|
372
|
-
matches.add(Match(first_annotation_id=first_annotation_id,
|
|
373
|
-
first_annotation_label=first_annotation.label,
|
|
374
|
-
first_annotation_confidence=
|
|
375
|
-
first_annotation.metadata.get('user', dict()).get('model', dict()).get('confidence', 1),
|
|
376
|
-
second_annotation_id=second_annotation_id,
|
|
377
|
-
second_annotation_label=second_annotation.label,
|
|
378
|
-
second_annotation_confidence=
|
|
379
|
-
second_annotation.metadata.get('user', dict()).get('model', dict()).get('confidence', 1),
|
|
380
|
-
geometry_score=geometry_score,
|
|
381
|
-
annotation_score=annotation_score,
|
|
382
|
-
label_score=labels_score,
|
|
383
|
-
attributes_score=attribute_score))
|
|
384
|
-
df.drop(index=second_annotation_id, inplace=True)
|
|
385
|
-
df.drop(columns=first_annotation_id, inplace=True)
|
|
386
|
-
# add un-matched
|
|
387
|
-
for second_id in df.index:
|
|
388
|
-
second_annotation = [a for a in second_set if a.id == second_id][0]
|
|
389
|
-
matches.add(match=Match(first_annotation_id=None,
|
|
390
|
-
first_annotation_label=None,
|
|
391
|
-
first_annotation_confidence=None,
|
|
392
|
-
second_annotation_id=second_id,
|
|
393
|
-
second_annotation_label=second_annotation.label,
|
|
394
|
-
second_annotation_confidence=
|
|
395
|
-
second_annotation.metadata.get('user', dict()).get('model', dict()).get(
|
|
396
|
-
'confidence', 1),
|
|
397
|
-
))
|
|
398
|
-
for first_id in df.columns:
|
|
399
|
-
first_annotation = [a for a in first_set if a.id == first_id][0]
|
|
400
|
-
matches.add(match=Match(first_annotation_id=first_id,
|
|
401
|
-
first_annotation_label=first_annotation.label,
|
|
402
|
-
first_annotation_confidence=
|
|
403
|
-
first_annotation.metadata.get('user', dict()).get('model', dict()).get('confidence',
|
|
404
|
-
1),
|
|
405
|
-
second_annotation_id=None,
|
|
406
|
-
second_annotation_label=None,
|
|
407
|
-
second_annotation_confidence=None))
|
|
408
|
-
return matches
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
def item_annotation_duration(item: entities.Item = None,
|
|
412
|
-
dataset: entities.Dataset = None,
|
|
413
|
-
project: entities.Project = None,
|
|
414
|
-
task: entities.Task = None,
|
|
415
|
-
assignment: entities.Assignment = None):
|
|
416
|
-
if all(ent is None for ent in [item, dataset, project, assignment, task]):
|
|
417
|
-
raise ValueError('At least one input to annotation duration must not be None')
|
|
418
|
-
query = {
|
|
419
|
-
"startTime": 0,
|
|
420
|
-
"context": {
|
|
421
|
-
"accountId": [],
|
|
422
|
-
"orgId": [],
|
|
423
|
-
"projectId": [],
|
|
424
|
-
"datasetId": [],
|
|
425
|
-
"taskId": [],
|
|
426
|
-
"assignmentId": [],
|
|
427
|
-
"itemId": [],
|
|
428
|
-
"userId": [],
|
|
429
|
-
"serviceId": [],
|
|
430
|
-
"podId": [],
|
|
431
|
-
},
|
|
432
|
-
"measures": [
|
|
433
|
-
{
|
|
434
|
-
"measureType": "itemAnnotationDuration",
|
|
435
|
-
"pageSize": 1000,
|
|
436
|
-
"page": 0,
|
|
437
|
-
},
|
|
438
|
-
]
|
|
439
|
-
}
|
|
440
|
-
# add context for analytics
|
|
441
|
-
created_at = list()
|
|
442
|
-
if item is not None:
|
|
443
|
-
query['context']['itemId'].append(item.id)
|
|
444
|
-
created_at.append(int(1000 * datetime.datetime.fromisoformat(item.created_at[:-1]).timestamp()))
|
|
445
|
-
if task is not None:
|
|
446
|
-
query['context']['taskId'].append(task.id)
|
|
447
|
-
created_at.append(int(1000 * datetime.datetime.fromisoformat(task.created_at[:-1]).timestamp()))
|
|
448
|
-
if dataset is not None:
|
|
449
|
-
query['context']['datasetId'].append(dataset.id)
|
|
450
|
-
created_at.append(int(1000 * datetime.datetime.fromisoformat(dataset.created_at[:-1]).timestamp()))
|
|
451
|
-
if assignment is not None:
|
|
452
|
-
query['context']['assignmentId'].append(assignment.id)
|
|
453
|
-
# assignment doesnt have "created_at" attribute
|
|
454
|
-
query['startTime'] = int(np.min(created_at))
|
|
455
|
-
raw = project.analytics.get_samples(query=query, return_field=None, return_raw=True)
|
|
456
|
-
res = {row['itemId']: row['duration'] for row in raw[0]['response']}
|
|
457
|
-
if item.id not in res:
|
|
458
|
-
total_time_s = 0
|
|
459
|
-
else:
|
|
460
|
-
total_time_s = res[item.id] / 1000
|
|
461
|
-
return total_time_s
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import logging
|
|
4
|
+
import datetime
|
|
5
|
+
|
|
6
|
+
from .. import entities
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(name='dtlpy')
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Results:
|
|
12
|
+
def __init__(self, matches, annotation_type):
|
|
13
|
+
self.matches = matches
|
|
14
|
+
self.annotation_type = annotation_type
|
|
15
|
+
|
|
16
|
+
def to_df(self):
|
|
17
|
+
return self.matches.to_df()
|
|
18
|
+
|
|
19
|
+
def summary(self):
|
|
20
|
+
df = self.matches.to_df()
|
|
21
|
+
total_set_one = len(df['first_id'].dropna())
|
|
22
|
+
total_set_two = len(df['second_id'].dropna())
|
|
23
|
+
# each set unmatched is the number of Nones from the other set
|
|
24
|
+
unmatched_set_one = df.shape[0] - total_set_two
|
|
25
|
+
unmatched_set_two = df.shape[0] - total_set_one
|
|
26
|
+
matched_set_one = total_set_one - unmatched_set_one
|
|
27
|
+
matched_set_two = total_set_two - unmatched_set_two
|
|
28
|
+
# sanity
|
|
29
|
+
assert matched_set_one == matched_set_two, 'matched numbers are not the same'
|
|
30
|
+
assert df['annotation_score'].shape[0] == (unmatched_set_one + unmatched_set_two + matched_set_one), \
|
|
31
|
+
'mis-match number if scores and annotations'
|
|
32
|
+
return {
|
|
33
|
+
'annotation_type': self.annotation_type,
|
|
34
|
+
'mean_annotations_scores': df['annotation_score'].mean(),
|
|
35
|
+
'mean_attributes_scores': df['attribute_score'].mean(),
|
|
36
|
+
'mean_labels_scores': df['label_score'].mean(),
|
|
37
|
+
'n_annotations_set_one': total_set_one,
|
|
38
|
+
'n_annotations_set_two': total_set_two,
|
|
39
|
+
'n_annotations_total': total_set_one + total_set_two,
|
|
40
|
+
'n_annotations_unmatched_set_one': unmatched_set_one,
|
|
41
|
+
'n_annotations_unmatched_set_two': unmatched_set_two,
|
|
42
|
+
'n_annotations_unmatched_total': unmatched_set_one + unmatched_set_two,
|
|
43
|
+
'n_annotations_matched_total': matched_set_one,
|
|
44
|
+
'precision': matched_set_one / (matched_set_one + unmatched_set_two),
|
|
45
|
+
'recall': matched_set_one / (matched_set_one + unmatched_set_one)
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class Match:
|
|
50
|
+
def __init__(self,
|
|
51
|
+
first_annotation_id, first_annotation_label, first_annotation_confidence,
|
|
52
|
+
second_annotation_id, second_annotation_label, second_annotation_confidence,
|
|
53
|
+
# defaults
|
|
54
|
+
annotation_score=0, attributes_score=0, geometry_score=0, label_score=0):
|
|
55
|
+
"""
|
|
56
|
+
Save a match between two annotations with all relevant scores
|
|
57
|
+
|
|
58
|
+
:param first_annotation_id:
|
|
59
|
+
:param second_annotation_id:
|
|
60
|
+
:param annotation_score:
|
|
61
|
+
:param attributes_score:
|
|
62
|
+
:param geometry_score:
|
|
63
|
+
:param label_score:
|
|
64
|
+
"""
|
|
65
|
+
self.first_annotation_id = first_annotation_id
|
|
66
|
+
self.first_annotation_label = first_annotation_label
|
|
67
|
+
self.first_annotation_confidence = first_annotation_confidence
|
|
68
|
+
self.second_annotation_id = second_annotation_id
|
|
69
|
+
self.second_annotation_label = second_annotation_label
|
|
70
|
+
self.second_annotation_confidence = second_annotation_confidence
|
|
71
|
+
self.annotation_score = annotation_score
|
|
72
|
+
self.attributes_score = attributes_score
|
|
73
|
+
# Replace the old annotation score
|
|
74
|
+
self.geometry_score = geometry_score
|
|
75
|
+
self.label_score = label_score
|
|
76
|
+
|
|
77
|
+
def __repr__(self):
|
|
78
|
+
return 'annotation: {:.2f}, attributes: {:.2f}, geometry: {:.2f}, label: {:.2f}'.format(
|
|
79
|
+
self.annotation_score, self.attributes_score, self.geometry_score, self.label_score)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class Matches:
|
|
83
|
+
def __init__(self):
|
|
84
|
+
self.matches = list()
|
|
85
|
+
self._annotations_raw_df = list()
|
|
86
|
+
|
|
87
|
+
def __len__(self):
|
|
88
|
+
return len(self.matches)
|
|
89
|
+
|
|
90
|
+
def __repr__(self):
|
|
91
|
+
return self.to_df().to_string()
|
|
92
|
+
|
|
93
|
+
def to_df(self):
|
|
94
|
+
results = list()
|
|
95
|
+
for match in self.matches:
|
|
96
|
+
results.append({
|
|
97
|
+
'first_id': match.first_annotation_id,
|
|
98
|
+
'first_label': match.first_annotation_label,
|
|
99
|
+
'first_confidence': match.first_annotation_confidence,
|
|
100
|
+
'second_id': match.second_annotation_id,
|
|
101
|
+
'second_label': match.second_annotation_label,
|
|
102
|
+
'second_confidence': match.second_annotation_confidence,
|
|
103
|
+
'annotation_score': match.annotation_score,
|
|
104
|
+
'attribute_score': match.attributes_score,
|
|
105
|
+
'geometry_score': match.geometry_score,
|
|
106
|
+
'label_score': match.label_score,
|
|
107
|
+
})
|
|
108
|
+
df = pd.DataFrame(results)
|
|
109
|
+
return df
|
|
110
|
+
|
|
111
|
+
def add(self, match: Match):
|
|
112
|
+
self.matches.append(match)
|
|
113
|
+
|
|
114
|
+
def validate(self):
|
|
115
|
+
first = list()
|
|
116
|
+
second = list()
|
|
117
|
+
for match in self.matches:
|
|
118
|
+
if match.first_annotation_id in first:
|
|
119
|
+
raise ValueError('duplication for annotation id {!r} in FIRST set'.format(match.first_annotation_id))
|
|
120
|
+
if match.first_annotation_id is not None:
|
|
121
|
+
first.append(match.first_annotation_id)
|
|
122
|
+
if match.second_annotation_id in second:
|
|
123
|
+
raise ValueError('duplication for annotation id {!r} in SECOND set'.format(match.second_annotation_id))
|
|
124
|
+
if match.second_annotation_id is not None:
|
|
125
|
+
second.append(match.second_annotation_id)
|
|
126
|
+
return True
|
|
127
|
+
|
|
128
|
+
def find(self, annotation_id, loc='first'):
|
|
129
|
+
for match in self.matches:
|
|
130
|
+
if loc == 'first':
|
|
131
|
+
if match.first_annotation_id == annotation_id:
|
|
132
|
+
return match
|
|
133
|
+
elif loc == 'second':
|
|
134
|
+
if match.second_annotation_id == annotation_id:
|
|
135
|
+
return match
|
|
136
|
+
raise ValueError('could not find annotation id {!r} in {}'.format(annotation_id, loc))
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
######################
|
|
140
|
+
# Matching functions #
|
|
141
|
+
######################
|
|
142
|
+
class Matchers:
|
|
143
|
+
|
|
144
|
+
@staticmethod
|
|
145
|
+
def calculate_iou_box(pts1, pts2, config):
|
|
146
|
+
"""
|
|
147
|
+
Measure the two list of points IoU
|
|
148
|
+
:param pts1: ann.geo coordinates
|
|
149
|
+
:param pts2: ann.geo coordinates
|
|
150
|
+
:return: `float` how Intersection over Union of tho shapes
|
|
151
|
+
"""
|
|
152
|
+
try:
|
|
153
|
+
from shapely.geometry import Polygon
|
|
154
|
+
except (ImportError, ModuleNotFoundError) as err:
|
|
155
|
+
raise RuntimeError('dtlpy depends on external package. Please install ') from err
|
|
156
|
+
if len(pts1) == 2:
|
|
157
|
+
# regular box annotation (2 pts)
|
|
158
|
+
pt1_left_top = [pts1[0][0], pts1[0][1]]
|
|
159
|
+
pt1_right_top = [pts1[0][0], pts1[1][1]]
|
|
160
|
+
pt1_right_bottom = [pts1[1][0], pts1[1][1]]
|
|
161
|
+
pt1_left_bottom = [pts1[1][0], pts1[0][1]]
|
|
162
|
+
else:
|
|
163
|
+
# rotated box annotation (4 pts)
|
|
164
|
+
pt1_left_top = pts1[0]
|
|
165
|
+
pt1_right_top = pts1[3]
|
|
166
|
+
pt1_left_bottom = pts1[1]
|
|
167
|
+
pt1_right_bottom = pts1[2]
|
|
168
|
+
|
|
169
|
+
poly_1 = Polygon([pt1_left_top,
|
|
170
|
+
pt1_right_top,
|
|
171
|
+
pt1_right_bottom,
|
|
172
|
+
pt1_left_bottom])
|
|
173
|
+
|
|
174
|
+
if len(pts2) == 2:
|
|
175
|
+
# regular box annotation (2 pts)
|
|
176
|
+
pt2_left_top = [pts2[0][0], pts2[0][1]]
|
|
177
|
+
pt2_right_top = [pts2[0][0], pts2[1][1]]
|
|
178
|
+
pt2_right_bottom = [pts2[1][0], pts2[1][1]]
|
|
179
|
+
pt2_left_bottom = [pts2[1][0], pts2[0][1]]
|
|
180
|
+
else:
|
|
181
|
+
# rotated box annotation (4 pts)
|
|
182
|
+
pt2_left_top = pts2[0]
|
|
183
|
+
pt2_right_top = pts2[3]
|
|
184
|
+
pt2_left_bottom = pts2[1]
|
|
185
|
+
pt2_right_bottom = pts2[2]
|
|
186
|
+
|
|
187
|
+
poly_2 = Polygon([pt2_left_top,
|
|
188
|
+
pt2_right_top,
|
|
189
|
+
pt2_right_bottom,
|
|
190
|
+
pt2_left_bottom])
|
|
191
|
+
iou = poly_1.intersection(poly_2).area / poly_1.union(poly_2).area
|
|
192
|
+
return iou
|
|
193
|
+
|
|
194
|
+
@staticmethod
|
|
195
|
+
def calculate_iou_classification(pts1, pts2, config):
|
|
196
|
+
"""
|
|
197
|
+
Measure the two list of points IoU
|
|
198
|
+
:param pts1: ann.geo coordinates
|
|
199
|
+
:param pts2: ann.geo coordinates
|
|
200
|
+
:return: `float` how Intersection over Union of tho shapes
|
|
201
|
+
"""
|
|
202
|
+
return 1
|
|
203
|
+
|
|
204
|
+
@staticmethod
|
|
205
|
+
def calculate_iou_polygon(pts1, pts2, config):
|
|
206
|
+
try:
|
|
207
|
+
# from shapely.geometry import Polygon
|
|
208
|
+
import cv2
|
|
209
|
+
except (ImportError, ModuleNotFoundError) as err:
|
|
210
|
+
raise RuntimeError('dtlpy depends on external package. Please install ') from err
|
|
211
|
+
# # using shapley
|
|
212
|
+
# poly_1 = Polygon(pts1)
|
|
213
|
+
# poly_2 = Polygon(pts2)
|
|
214
|
+
# iou = poly_1.intersection(poly_2).area / poly_1.union(poly_2).area
|
|
215
|
+
|
|
216
|
+
# # using opencv
|
|
217
|
+
width = int(np.ceil(np.max(np.concatenate((pts1[:, 0], pts2[:, 0]))))) + 10
|
|
218
|
+
height = int(np.ceil(np.max(np.concatenate((pts1[:, 1], pts2[:, 1]))))) + 10
|
|
219
|
+
mask1 = np.zeros((height, width))
|
|
220
|
+
mask2 = np.zeros((height, width))
|
|
221
|
+
mask1 = cv2.drawContours(
|
|
222
|
+
image=mask1,
|
|
223
|
+
contours=[pts1.round().astype(int)],
|
|
224
|
+
contourIdx=-1,
|
|
225
|
+
color=1,
|
|
226
|
+
thickness=-1,
|
|
227
|
+
)
|
|
228
|
+
mask2 = cv2.drawContours(
|
|
229
|
+
image=mask2,
|
|
230
|
+
contours=[pts2.round().astype(int)],
|
|
231
|
+
contourIdx=-1,
|
|
232
|
+
color=1,
|
|
233
|
+
thickness=-1,
|
|
234
|
+
)
|
|
235
|
+
iou = np.sum((mask1 + mask2) == 2) / np.sum((mask1 + mask2) > 0)
|
|
236
|
+
if np.sum((mask1 + mask2) > 2):
|
|
237
|
+
assert False
|
|
238
|
+
return iou
|
|
239
|
+
|
|
240
|
+
@staticmethod
|
|
241
|
+
def calculate_iou_semantic(mask1, mask2, config):
|
|
242
|
+
joint_mask = mask1 + mask2
|
|
243
|
+
return np.sum(np.sum(joint_mask == 2) / np.sum(joint_mask > 0))
|
|
244
|
+
|
|
245
|
+
@staticmethod
|
|
246
|
+
def calculate_iou_point(pt1, pt2, config):
|
|
247
|
+
"""
|
|
248
|
+
pt is [x,y]
|
|
249
|
+
normalizing to score between [0, 1] -> 1 is the exact match
|
|
250
|
+
if same point score is 1
|
|
251
|
+
at about 20 pix distance score is about 0.5, 100 goes to 0
|
|
252
|
+
:param pt1:
|
|
253
|
+
:param pt2:
|
|
254
|
+
:return:
|
|
255
|
+
"""
|
|
256
|
+
"""
|
|
257
|
+
x = np.arange(int(diag))
|
|
258
|
+
y = np.exp(-1 / diag * 20 * x)
|
|
259
|
+
plt.figure()
|
|
260
|
+
plt.plot(x, y)
|
|
261
|
+
"""
|
|
262
|
+
height = config.get('height', 500)
|
|
263
|
+
width = config.get('width', 500)
|
|
264
|
+
diag = np.sqrt(height ** 2 + width ** 2)
|
|
265
|
+
# 20% of the image diagonal tolerance (empirically). need to
|
|
266
|
+
return np.exp(-1 / diag * 20 * np.linalg.norm(np.asarray(pt1) - np.asarray(pt2)))
|
|
267
|
+
|
|
268
|
+
@staticmethod
|
|
269
|
+
def match_attributes(attributes1, attributes2):
|
|
270
|
+
"""
|
|
271
|
+
Returns IoU of the attributes. if both are empty - its a prefect match (returns 1)
|
|
272
|
+
0: no matching
|
|
273
|
+
1: perfect attributes match
|
|
274
|
+
"""
|
|
275
|
+
if type(attributes1) is not type(attributes2):
|
|
276
|
+
logger.warning('attributes are not same type: {}, {}'.format(type(attributes1), type(attributes2)))
|
|
277
|
+
return 0
|
|
278
|
+
|
|
279
|
+
if attributes1 is None and attributes2 is None:
|
|
280
|
+
return 1
|
|
281
|
+
|
|
282
|
+
if isinstance(attributes1, dict) and isinstance(attributes2, dict):
|
|
283
|
+
# convert to list
|
|
284
|
+
attributes1 = ['{}-{}'.format(key, val) for key, val in attributes1.items()]
|
|
285
|
+
attributes2 = ['{}-{}'.format(key, val) for key, val in attributes2.items()]
|
|
286
|
+
|
|
287
|
+
intersection = set(attributes1).intersection(set(attributes2))
|
|
288
|
+
union = set(attributes1).union(attributes2)
|
|
289
|
+
if len(union) == 0:
|
|
290
|
+
# if there is no union - there are no attributes at all
|
|
291
|
+
return 1
|
|
292
|
+
return len(intersection) / len(union)
|
|
293
|
+
|
|
294
|
+
@staticmethod
|
|
295
|
+
def match_labels(label1, label2):
|
|
296
|
+
"""
|
|
297
|
+
Returns 1 in one of the labels in substring of the second
|
|
298
|
+
"""
|
|
299
|
+
return int(label1 in label2 or label2 in label1)
|
|
300
|
+
|
|
301
|
+
@staticmethod
|
|
302
|
+
def general_match(matches: Matches,
|
|
303
|
+
first_set: entities.AnnotationCollection,
|
|
304
|
+
second_set: entities.AnnotationCollection,
|
|
305
|
+
match_type,
|
|
306
|
+
match_threshold: float,
|
|
307
|
+
ignore_attributes=False,
|
|
308
|
+
ignore_labels=False):
|
|
309
|
+
"""
|
|
310
|
+
|
|
311
|
+
:param matches:
|
|
312
|
+
:param first_set:
|
|
313
|
+
:param second_set:
|
|
314
|
+
:param match_type:
|
|
315
|
+
:param match_threshold:
|
|
316
|
+
:param ignore_attributes:
|
|
317
|
+
:param ignore_labels:
|
|
318
|
+
:return:
|
|
319
|
+
"""
|
|
320
|
+
annotation_type_to_func = {
|
|
321
|
+
entities.AnnotationType.BOX: Matchers.calculate_iou_box,
|
|
322
|
+
entities.AnnotationType.CLASSIFICATION: Matchers.calculate_iou_classification,
|
|
323
|
+
entities.AnnotationType.SEGMENTATION: Matchers.calculate_iou_semantic,
|
|
324
|
+
entities.AnnotationType.POLYGON: Matchers.calculate_iou_polygon,
|
|
325
|
+
entities.AnnotationType.POINT: Matchers.calculate_iou_point,
|
|
326
|
+
}
|
|
327
|
+
df = pd.DataFrame(data=-1 * np.ones((len(second_set), len(first_set))),
|
|
328
|
+
columns=[a.id for a in first_set],
|
|
329
|
+
index=[a.id for a in second_set])
|
|
330
|
+
for annotation_one in first_set:
|
|
331
|
+
for annotation_two in second_set:
|
|
332
|
+
if match_type not in annotation_type_to_func:
|
|
333
|
+
raise ValueError('unsupported type: {}'.format(match_type))
|
|
334
|
+
if df[annotation_one.id][annotation_two.id] == -1:
|
|
335
|
+
try:
|
|
336
|
+
config = {'height': annotation_one._item.height if annotation_one._item is not None else 500,
|
|
337
|
+
'width': annotation_one._item.width if annotation_one._item is not None else 500}
|
|
338
|
+
df[annotation_one.id][annotation_two.id] = annotation_type_to_func[match_type](
|
|
339
|
+
annotation_one.geo,
|
|
340
|
+
annotation_two.geo,
|
|
341
|
+
config)
|
|
342
|
+
except ZeroDivisionError:
|
|
343
|
+
logger.warning(
|
|
344
|
+
'Found annotations with area=0!: annotations ids: {!r}, {!r}'.format(annotation_one.id,
|
|
345
|
+
annotation_two.id))
|
|
346
|
+
df[annotation_one.id][annotation_two.id] = 0
|
|
347
|
+
# for debug - save the annotations scoring matrix
|
|
348
|
+
matches._annotations_raw_df.append(df.copy())
|
|
349
|
+
|
|
350
|
+
# go over all matches
|
|
351
|
+
while True:
|
|
352
|
+
# take max IoU score, list the match and remove annotations' ids from columns and rows
|
|
353
|
+
# keep doing that until no more matches or lower than match threshold
|
|
354
|
+
max_cell = df.max().max()
|
|
355
|
+
if max_cell < match_threshold or np.isnan(max_cell):
|
|
356
|
+
break
|
|
357
|
+
row_index, col_index = np.where(df == max_cell)
|
|
358
|
+
row_index = row_index[0]
|
|
359
|
+
col_index = col_index[0]
|
|
360
|
+
first_annotation_id = df.columns[col_index]
|
|
361
|
+
second_annotation_id = df.index[row_index]
|
|
362
|
+
first_annotation = [a for a in first_set if a.id == first_annotation_id][0]
|
|
363
|
+
second_annotation = [a for a in second_set if a.id == second_annotation_id][0]
|
|
364
|
+
geometry_score = df.iloc[row_index, col_index]
|
|
365
|
+
labels_score = Matchers.match_labels(label1=first_annotation.label,
|
|
366
|
+
label2=second_annotation.label)
|
|
367
|
+
attribute_score = Matchers.match_attributes(attributes1=first_annotation.attributes,
|
|
368
|
+
attributes2=second_annotation.attributes)
|
|
369
|
+
|
|
370
|
+
# TODO use ignores for final score
|
|
371
|
+
annotation_score = (geometry_score + attribute_score + labels_score) / 3
|
|
372
|
+
matches.add(Match(first_annotation_id=first_annotation_id,
|
|
373
|
+
first_annotation_label=first_annotation.label,
|
|
374
|
+
first_annotation_confidence=
|
|
375
|
+
first_annotation.metadata.get('user', dict()).get('model', dict()).get('confidence', 1),
|
|
376
|
+
second_annotation_id=second_annotation_id,
|
|
377
|
+
second_annotation_label=second_annotation.label,
|
|
378
|
+
second_annotation_confidence=
|
|
379
|
+
second_annotation.metadata.get('user', dict()).get('model', dict()).get('confidence', 1),
|
|
380
|
+
geometry_score=geometry_score,
|
|
381
|
+
annotation_score=annotation_score,
|
|
382
|
+
label_score=labels_score,
|
|
383
|
+
attributes_score=attribute_score))
|
|
384
|
+
df.drop(index=second_annotation_id, inplace=True)
|
|
385
|
+
df.drop(columns=first_annotation_id, inplace=True)
|
|
386
|
+
# add un-matched
|
|
387
|
+
for second_id in df.index:
|
|
388
|
+
second_annotation = [a for a in second_set if a.id == second_id][0]
|
|
389
|
+
matches.add(match=Match(first_annotation_id=None,
|
|
390
|
+
first_annotation_label=None,
|
|
391
|
+
first_annotation_confidence=None,
|
|
392
|
+
second_annotation_id=second_id,
|
|
393
|
+
second_annotation_label=second_annotation.label,
|
|
394
|
+
second_annotation_confidence=
|
|
395
|
+
second_annotation.metadata.get('user', dict()).get('model', dict()).get(
|
|
396
|
+
'confidence', 1),
|
|
397
|
+
))
|
|
398
|
+
for first_id in df.columns:
|
|
399
|
+
first_annotation = [a for a in first_set if a.id == first_id][0]
|
|
400
|
+
matches.add(match=Match(first_annotation_id=first_id,
|
|
401
|
+
first_annotation_label=first_annotation.label,
|
|
402
|
+
first_annotation_confidence=
|
|
403
|
+
first_annotation.metadata.get('user', dict()).get('model', dict()).get('confidence',
|
|
404
|
+
1),
|
|
405
|
+
second_annotation_id=None,
|
|
406
|
+
second_annotation_label=None,
|
|
407
|
+
second_annotation_confidence=None))
|
|
408
|
+
return matches
|
|
409
|
+
|
|
410
|
+
|
|
411
|
+
def item_annotation_duration(item: entities.Item = None,
|
|
412
|
+
dataset: entities.Dataset = None,
|
|
413
|
+
project: entities.Project = None,
|
|
414
|
+
task: entities.Task = None,
|
|
415
|
+
assignment: entities.Assignment = None):
|
|
416
|
+
if all(ent is None for ent in [item, dataset, project, assignment, task]):
|
|
417
|
+
raise ValueError('At least one input to annotation duration must not be None')
|
|
418
|
+
query = {
|
|
419
|
+
"startTime": 0,
|
|
420
|
+
"context": {
|
|
421
|
+
"accountId": [],
|
|
422
|
+
"orgId": [],
|
|
423
|
+
"projectId": [],
|
|
424
|
+
"datasetId": [],
|
|
425
|
+
"taskId": [],
|
|
426
|
+
"assignmentId": [],
|
|
427
|
+
"itemId": [],
|
|
428
|
+
"userId": [],
|
|
429
|
+
"serviceId": [],
|
|
430
|
+
"podId": [],
|
|
431
|
+
},
|
|
432
|
+
"measures": [
|
|
433
|
+
{
|
|
434
|
+
"measureType": "itemAnnotationDuration",
|
|
435
|
+
"pageSize": 1000,
|
|
436
|
+
"page": 0,
|
|
437
|
+
},
|
|
438
|
+
]
|
|
439
|
+
}
|
|
440
|
+
# add context for analytics
|
|
441
|
+
created_at = list()
|
|
442
|
+
if item is not None:
|
|
443
|
+
query['context']['itemId'].append(item.id)
|
|
444
|
+
created_at.append(int(1000 * datetime.datetime.fromisoformat(item.created_at[:-1]).timestamp()))
|
|
445
|
+
if task is not None:
|
|
446
|
+
query['context']['taskId'].append(task.id)
|
|
447
|
+
created_at.append(int(1000 * datetime.datetime.fromisoformat(task.created_at[:-1]).timestamp()))
|
|
448
|
+
if dataset is not None:
|
|
449
|
+
query['context']['datasetId'].append(dataset.id)
|
|
450
|
+
created_at.append(int(1000 * datetime.datetime.fromisoformat(dataset.created_at[:-1]).timestamp()))
|
|
451
|
+
if assignment is not None:
|
|
452
|
+
query['context']['assignmentId'].append(assignment.id)
|
|
453
|
+
# assignment doesnt have "created_at" attribute
|
|
454
|
+
query['startTime'] = int(np.min(created_at))
|
|
455
|
+
raw = project.analytics.get_samples(query=query, return_field=None, return_raw=True)
|
|
456
|
+
res = {row['itemId']: row['duration'] for row in raw[0]['response']}
|
|
457
|
+
if item.id not in res:
|
|
458
|
+
total_time_s = 0
|
|
459
|
+
else:
|
|
460
|
+
total_time_s = res[item.id] / 1000
|
|
461
|
+
return total_time_s
|