supervisely 6.73.417__py3-none-any.whl → 6.73.419__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. supervisely/api/entity_annotation/figure_api.py +89 -45
  2. supervisely/nn/inference/inference.py +61 -45
  3. supervisely/nn/inference/instance_segmentation/instance_segmentation.py +1 -0
  4. supervisely/nn/inference/object_detection/object_detection.py +1 -0
  5. supervisely/nn/inference/session.py +4 -4
  6. supervisely/nn/model/model_api.py +31 -20
  7. supervisely/nn/model/prediction.py +11 -0
  8. supervisely/nn/model/prediction_session.py +33 -6
  9. supervisely/nn/tracker/__init__.py +1 -2
  10. supervisely/nn/tracker/base_tracker.py +44 -0
  11. supervisely/nn/tracker/botsort/__init__.py +1 -0
  12. supervisely/nn/tracker/botsort/botsort_config.yaml +31 -0
  13. supervisely/nn/tracker/botsort/osnet_reid/osnet.py +566 -0
  14. supervisely/nn/tracker/botsort/osnet_reid/osnet_reid_interface.py +88 -0
  15. supervisely/nn/tracker/botsort/tracker/__init__.py +0 -0
  16. supervisely/nn/tracker/{bot_sort → botsort/tracker}/basetrack.py +1 -2
  17. supervisely/nn/tracker/{utils → botsort/tracker}/gmc.py +51 -59
  18. supervisely/nn/tracker/{deep_sort/deep_sort → botsort/tracker}/kalman_filter.py +71 -33
  19. supervisely/nn/tracker/botsort/tracker/matching.py +202 -0
  20. supervisely/nn/tracker/{bot_sort/bot_sort.py → botsort/tracker/mc_bot_sort.py} +68 -81
  21. supervisely/nn/tracker/botsort_tracker.py +259 -0
  22. supervisely/project/project.py +1 -1
  23. {supervisely-6.73.417.dist-info → supervisely-6.73.419.dist-info}/METADATA +5 -3
  24. {supervisely-6.73.417.dist-info → supervisely-6.73.419.dist-info}/RECORD +29 -42
  25. supervisely/nn/tracker/bot_sort/__init__.py +0 -21
  26. supervisely/nn/tracker/bot_sort/fast_reid_interface.py +0 -152
  27. supervisely/nn/tracker/bot_sort/matching.py +0 -127
  28. supervisely/nn/tracker/bot_sort/sly_tracker.py +0 -401
  29. supervisely/nn/tracker/deep_sort/__init__.py +0 -6
  30. supervisely/nn/tracker/deep_sort/deep_sort/__init__.py +0 -1
  31. supervisely/nn/tracker/deep_sort/deep_sort/detection.py +0 -49
  32. supervisely/nn/tracker/deep_sort/deep_sort/iou_matching.py +0 -81
  33. supervisely/nn/tracker/deep_sort/deep_sort/linear_assignment.py +0 -202
  34. supervisely/nn/tracker/deep_sort/deep_sort/nn_matching.py +0 -176
  35. supervisely/nn/tracker/deep_sort/deep_sort/track.py +0 -166
  36. supervisely/nn/tracker/deep_sort/deep_sort/tracker.py +0 -145
  37. supervisely/nn/tracker/deep_sort/deep_sort.py +0 -301
  38. supervisely/nn/tracker/deep_sort/generate_clip_detections.py +0 -90
  39. supervisely/nn/tracker/deep_sort/preprocessing.py +0 -70
  40. supervisely/nn/tracker/deep_sort/sly_tracker.py +0 -273
  41. supervisely/nn/tracker/tracker.py +0 -285
  42. supervisely/nn/tracker/utils/kalman_filter.py +0 -492
  43. supervisely/nn/tracking/__init__.py +0 -1
  44. supervisely/nn/tracking/boxmot.py +0 -114
  45. supervisely/nn/tracking/tracking.py +0 -24
  46. /supervisely/nn/tracker/{utils → botsort/osnet_reid}/__init__.py +0 -0
  47. {supervisely-6.73.417.dist-info → supervisely-6.73.419.dist-info}/LICENSE +0 -0
  48. {supervisely-6.73.417.dist-info → supervisely-6.73.419.dist-info}/WHEEL +0 -0
  49. {supervisely-6.73.417.dist-info → supervisely-6.73.419.dist-info}/entry_points.txt +0 -0
  50. {supervisely-6.73.417.dist-info → supervisely-6.73.419.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,202 @@
1
+ import numpy as np
2
+ from supervisely import logger
3
+
4
+ try:
5
+ import scipy
6
+ import lap
7
+ from scipy.spatial.distance import cdist
8
+
9
+ from cython_bbox import bbox_overlaps as bbox_ious
10
+ except ImportError:
11
+ logger.warning("scipy, lap, and cython_bbox are not installed, some tracker functionalities may not work.")
12
+
13
+ from . import kalman_filter
14
+
15
+ def merge_matches(m1, m2, shape):
16
+ O,P,Q = shape
17
+ m1 = np.asarray(m1)
18
+ m2 = np.asarray(m2)
19
+
20
+ M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P))
21
+ M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q))
22
+
23
+ mask = M1*M2
24
+ match = mask.nonzero()
25
+ match = list(zip(match[0], match[1]))
26
+ unmatched_O = tuple(set(range(O)) - set([i for i, j in match]))
27
+ unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match]))
28
+
29
+ return match, unmatched_O, unmatched_Q
30
+
31
+
32
+ def _indices_to_matches(cost_matrix, indices, thresh):
33
+ matched_cost = cost_matrix[tuple(zip(*indices))]
34
+ matched_mask = (matched_cost <= thresh)
35
+
36
+ matches = indices[matched_mask]
37
+ unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0]))
38
+ unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1]))
39
+
40
+ return matches, unmatched_a, unmatched_b
41
+
42
+
43
+ def linear_assignment(cost_matrix, thresh):
44
+ if cost_matrix.size == 0:
45
+ return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1]))
46
+ matches, unmatched_a, unmatched_b = [], [], []
47
+ cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh)
48
+ for ix, mx in enumerate(x):
49
+ if mx >= 0:
50
+ matches.append([ix, mx])
51
+ unmatched_a = np.where(x < 0)[0]
52
+ unmatched_b = np.where(y < 0)[0]
53
+ matches = np.asarray(matches)
54
+ return matches, unmatched_a, unmatched_b
55
+
56
+
57
+ def ious(atlbrs, btlbrs):
58
+ """
59
+ Compute cost based on IoU
60
+ :type atlbrs: list[tlbr] | np.ndarray
61
+ :type atlbrs: list[tlbr] | np.ndarray
62
+
63
+ :rtype ious np.ndarray
64
+ """
65
+ ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=float)
66
+ if ious.size == 0:
67
+ return ious
68
+
69
+ ious = bbox_ious(
70
+ np.ascontiguousarray(atlbrs, dtype=float),
71
+ np.ascontiguousarray(btlbrs, dtype=float)
72
+ )
73
+
74
+ return ious
75
+
76
+
77
+ def tlbr_expand(tlbr, scale=1.2):
78
+ w = tlbr[2] - tlbr[0]
79
+ h = tlbr[3] - tlbr[1]
80
+
81
+ half_scale = 0.5 * scale
82
+
83
+ tlbr[0] -= half_scale * w
84
+ tlbr[1] -= half_scale * h
85
+ tlbr[2] += half_scale * w
86
+ tlbr[3] += half_scale * h
87
+
88
+ return tlbr
89
+
90
+
91
+ def iou_distance(atracks, btracks):
92
+ """
93
+ Compute cost based on IoU
94
+ :type atracks: list[STrack]
95
+ :type btracks: list[STrack]
96
+
97
+ :rtype cost_matrix np.ndarray
98
+ """
99
+
100
+ if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)):
101
+ atlbrs = atracks
102
+ btlbrs = btracks
103
+ else:
104
+ atlbrs = [track.tlbr for track in atracks]
105
+ btlbrs = [track.tlbr for track in btracks]
106
+ _ious = ious(atlbrs, btlbrs)
107
+ cost_matrix = 1 - _ious
108
+
109
+ return cost_matrix
110
+
111
+
112
+ def v_iou_distance(atracks, btracks):
113
+ """
114
+ Compute cost based on IoU
115
+ :type atracks: list[STrack]
116
+ :type btracks: list[STrack]
117
+
118
+ :rtype cost_matrix np.ndarray
119
+ """
120
+
121
+ if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)):
122
+ atlbrs = atracks
123
+ btlbrs = btracks
124
+ else:
125
+ atlbrs = [track.tlwh_to_tlbr(track.pred_bbox) for track in atracks]
126
+ btlbrs = [track.tlwh_to_tlbr(track.pred_bbox) for track in btracks]
127
+ _ious = ious(atlbrs, btlbrs)
128
+ cost_matrix = 1 - _ious
129
+
130
+ return cost_matrix
131
+
132
+
133
+ def embedding_distance(tracks, detections, metric='cosine'):
134
+ """
135
+ :param tracks: list[STrack]
136
+ :param detections: list[BaseTrack]
137
+ :param metric:
138
+ :return: cost_matrix np.ndarray
139
+ """
140
+
141
+ cost_matrix = np.zeros((len(tracks), len(detections)), dtype=float)
142
+ if cost_matrix.size == 0:
143
+ return cost_matrix
144
+ det_features = np.asarray([track.curr_feat for track in detections], dtype=float)
145
+ track_features = np.asarray([track.smooth_feat for track in tracks], dtype=float)
146
+
147
+ cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # / 2.0 # Nomalized features
148
+ return cost_matrix
149
+
150
+
151
+ def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False):
152
+ if cost_matrix.size == 0:
153
+ return cost_matrix
154
+ gating_dim = 2 if only_position else 4
155
+ gating_threshold = kalman_filter.chi2inv95[gating_dim]
156
+ # measurements = np.asarray([det.to_xyah() for det in detections])
157
+ measurements = np.asarray([det.to_xywh() for det in detections])
158
+ for row, track in enumerate(tracks):
159
+ gating_distance = kf.gating_distance(
160
+ track.mean, track.covariance, measurements, only_position)
161
+ cost_matrix[row, gating_distance > gating_threshold] = np.inf
162
+ return cost_matrix
163
+
164
+
165
+ def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98):
166
+ if cost_matrix.size == 0:
167
+ return cost_matrix
168
+ gating_dim = 2 if only_position else 4
169
+ gating_threshold = kalman_filter.chi2inv95[gating_dim]
170
+ # measurements = np.asarray([det.to_xyah() for det in detections])
171
+ measurements = np.asarray([det.to_xywh() for det in detections])
172
+ for row, track in enumerate(tracks):
173
+ gating_distance = kf.gating_distance(
174
+ track.mean, track.covariance, measurements, only_position, metric='maha')
175
+ cost_matrix[row, gating_distance > gating_threshold] = np.inf
176
+ cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance
177
+ return cost_matrix
178
+
179
+
180
+ def fuse_iou(cost_matrix, tracks, detections):
181
+ if cost_matrix.size == 0:
182
+ return cost_matrix
183
+ reid_sim = 1 - cost_matrix
184
+ iou_dist = iou_distance(tracks, detections)
185
+ iou_sim = 1 - iou_dist
186
+ fuse_sim = reid_sim * (1 + iou_sim) / 2
187
+ det_scores = np.array([det.score for det in detections])
188
+ det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0)
189
+ #fuse_sim = fuse_sim * (1 + det_scores) / 2
190
+ fuse_cost = 1 - fuse_sim
191
+ return fuse_cost
192
+
193
+
194
+ def fuse_score(cost_matrix, detections):
195
+ if cost_matrix.size == 0:
196
+ return cost_matrix
197
+ iou_sim = 1 - cost_matrix
198
+ det_scores = np.array([det.score for det in detections])
199
+ det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0)
200
+ fuse_sim = iou_sim * det_scores
201
+ fuse_cost = 1 - fuse_sim
202
+ return fuse_cost
@@ -1,16 +1,16 @@
1
- from collections import deque
2
-
3
1
  import numpy as np
4
-
5
- from supervisely.nn.tracker.utils.gmc import GMC
6
- from supervisely.nn.tracker.utils.kalman_filter import KalmanFilterXYWH as KalmanFilter
2
+ from collections import deque
7
3
 
8
4
  from . import matching
5
+ from .gmc import GMC
9
6
  from .basetrack import BaseTrack, TrackState
10
- from .fast_reid_interface import FastReIDInterface
7
+ from .kalman_filter import KalmanFilter
8
+
9
+ from supervisely.nn.tracker.botsort.osnet_reid.osnet_reid_interface import OsnetReIDInterface
11
10
 
12
11
 
13
12
  class STrack(BaseTrack):
13
+
14
14
  shared_kalman = KalmanFilter()
15
15
 
16
16
  def __init__(self, tlwh, score, cls, feat=None, feat_history=50):
@@ -81,9 +81,7 @@ class STrack(BaseTrack):
81
81
  if st.state != TrackState.Tracked:
82
82
  multi_mean[i][6] = 0
83
83
  multi_mean[i][7] = 0
84
- multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(
85
- multi_mean, multi_covariance
86
- )
84
+ multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance)
87
85
  for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)):
88
86
  stracks[i].mean = mean
89
87
  stracks[i].covariance = cov
@@ -122,9 +120,7 @@ class STrack(BaseTrack):
122
120
 
123
121
  def re_activate(self, new_track, frame_id, new_id=False):
124
122
 
125
- self.mean, self.covariance = self.kalman_filter.update(
126
- self.mean, self.covariance, self.tlwh_to_xywh(new_track.tlwh)
127
- )
123
+ self.mean, self.covariance = self.kalman_filter.update(self.mean, self.covariance, self.tlwh_to_xywh(new_track.tlwh))
128
124
  if new_track.curr_feat is not None:
129
125
  self.update_features(new_track.curr_feat)
130
126
  self.tracklet_len = 0
@@ -150,9 +146,7 @@ class STrack(BaseTrack):
150
146
 
151
147
  new_tlwh = new_track.tlwh
152
148
 
153
- self.mean, self.covariance = self.kalman_filter.update(
154
- self.mean, self.covariance, self.tlwh_to_xywh(new_tlwh)
155
- )
149
+ self.mean, self.covariance = self.kalman_filter.update(self.mean, self.covariance, self.tlwh_to_xywh(new_tlwh))
156
150
 
157
151
  if new_track.curr_feat is not None:
158
152
  self.update_features(new_track.curr_feat)
@@ -166,7 +160,7 @@ class STrack(BaseTrack):
166
160
  @property
167
161
  def tlwh(self):
168
162
  """Get current position in bounding box format `(top left x, top left y,
169
- width, height)`.
163
+ width, height)`.
170
164
  """
171
165
  if self.mean is None:
172
166
  return self._tlwh.copy()
@@ -227,7 +221,7 @@ class STrack(BaseTrack):
227
221
  return ret
228
222
 
229
223
  def __repr__(self):
230
- return "OT_{}_({}-{})".format(self.track_id, self.start_frame, self.end_frame)
224
+ return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame)
231
225
 
232
226
 
233
227
  class BoTSORT(object):
@@ -254,11 +248,9 @@ class BoTSORT(object):
254
248
  self.appearance_thresh = args.appearance_thresh
255
249
 
256
250
  if args.with_reid:
257
- self.encoder = FastReIDInterface(
258
- args.fast_reid_config, args.fast_reid_weights, args.device
259
- )
251
+ self.encoder = OsnetReIDInterface(args.reid_weights, args.device, args.fp16)
260
252
 
261
- self.gmc = GMC(method=args.cmc_method, gmc_file=args.gmc_config)
253
+ self.gmc = GMC(method=args.cmc_method, verbose=[args.name, args.ablation])
262
254
 
263
255
  def update(self, output_results, img):
264
256
  self.frame_id += 1
@@ -267,6 +259,14 @@ class BoTSORT(object):
267
259
  lost_stracks = []
268
260
  removed_stracks = []
269
261
 
262
+ # mapping detection to track - INITIALIZE det_id immediately
263
+ detection_track_map = []
264
+ for i in range(len(output_results)):
265
+ detection_track_map.append({
266
+ "det_id": i, # Detection index - fill immediately
267
+ "track_id": None # track_id remains None until track assignment
268
+ })
269
+
270
270
  if len(output_results):
271
271
  bboxes = output_results[:, :4]
272
272
  scores = output_results[:, 4]
@@ -279,6 +279,7 @@ class BoTSORT(object):
279
279
  scores = scores[lowest_inds]
280
280
  classes = classes[lowest_inds]
281
281
  features = output_results[lowest_inds]
282
+ low_inds_map = lowest_inds.nonzero()[0] # Mapping to original indices
282
283
 
283
284
  # Find high threshold detections
284
285
  remain_inds = scores > self.args.track_high_thresh
@@ -286,6 +287,7 @@ class BoTSORT(object):
286
287
  scores_keep = scores[remain_inds]
287
288
  classes_keep = classes[remain_inds]
288
289
  features_keep = features[remain_inds]
290
+ high_inds_map = low_inds_map[remain_inds] # Mapping to original indices
289
291
  else:
290
292
  bboxes = []
291
293
  scores = []
@@ -293,86 +295,69 @@ class BoTSORT(object):
293
295
  dets = []
294
296
  scores_keep = []
295
297
  classes_keep = []
298
+ high_inds_map = []
296
299
 
297
- """Extract embeddings """
298
- if self.args.with_reid:
300
+ '''Extract embeddings '''
301
+ if self.args.with_reid and len(dets) > 0:
299
302
  features_keep = self.encoder.inference(img, dets)
300
303
 
301
304
  if len(dets) > 0:
302
- """Detections"""
303
305
  if self.args.with_reid:
304
- detections = [
305
- STrack(STrack.tlbr_to_tlwh(tlbr), s, c, f)
306
- for (tlbr, s, c, f) in zip(dets, scores_keep, classes_keep, features_keep)
307
- ]
306
+ detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s, c, f) for
307
+ (tlbr, s, c, f) in zip(dets, scores_keep, classes_keep, features_keep)]
308
308
  else:
309
- detections = [
310
- STrack(STrack.tlbr_to_tlwh(tlbr), s, c)
311
- for (tlbr, s, c) in zip(dets, scores_keep, classes_keep)
312
- ]
309
+ detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s, c) for
310
+ (tlbr, s, c) in zip(dets, scores_keep, classes_keep)]
313
311
  else:
314
312
  detections = []
315
313
 
316
- """ Add newly detected tracklets to tracked_stracks"""
314
+ ''' Add newly detected tracklets to tracked_stracks'''
317
315
  unconfirmed = []
318
- tracked_stracks = [] # type: list[STrack]
316
+ tracked_stracks = []
319
317
  for track in self.tracked_stracks:
320
318
  if not track.is_activated:
321
319
  unconfirmed.append(track)
322
320
  else:
323
321
  tracked_stracks.append(track)
324
322
 
325
- """ Step 2: First association, with high score detection boxes"""
323
+ ''' Step 2: First association, with high score detection boxes'''
326
324
  strack_pool = joint_stracks(tracked_stracks, self.lost_stracks)
327
325
 
328
- # Predict the current location with KF
329
326
  STrack.multi_predict(strack_pool)
330
-
331
- # Fix camera motion
332
327
  warp = self.gmc.apply(img, dets)
333
328
  STrack.multi_gmc(strack_pool, warp)
334
329
  STrack.multi_gmc(unconfirmed, warp)
335
330
 
336
- # Associate with high score detection boxes
337
331
  ious_dists = matching.iou_distance(strack_pool, detections)
338
- ious_dists_mask = ious_dists > self.proximity_thresh
332
+ ious_dists_mask = (ious_dists > self.proximity_thresh)
339
333
 
340
334
  if not self.args.mot20:
341
335
  ious_dists = matching.fuse_score(ious_dists, detections)
342
336
 
343
337
  if self.args.with_reid:
344
338
  emb_dists = matching.embedding_distance(strack_pool, detections) / 2.0
345
- raw_emb_dists = emb_dists.copy()
346
339
  emb_dists[emb_dists > self.appearance_thresh] = 1.0
347
340
  emb_dists[ious_dists_mask] = 1.0
348
341
  dists = np.minimum(ious_dists, emb_dists)
349
-
350
- # Popular ReID method (JDE / FairMOT)
351
- # raw_emb_dists = matching.embedding_distance(strack_pool, detections)
352
- # dists = matching.fuse_motion(self.kalman_filter, raw_emb_dists, strack_pool, detections)
353
- # emb_dists = dists
354
-
355
- # IoU making ReID
356
- # dists = matching.embedding_distance(strack_pool, detections)
357
- # dists[ious_dists_mask] = 1.0
358
342
  else:
359
343
  dists = ious_dists
360
344
 
361
- matches, u_track, u_detection = matching.linear_assignment(
362
- dists, thresh=self.args.match_thresh
363
- )
345
+ matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.args.match_thresh)
364
346
 
365
347
  for itracked, idet in matches:
366
348
  track = strack_pool[itracked]
367
349
  det = detections[idet]
350
+ orig_idx = int(high_inds_map[idet]) # Get original detection index
351
+ # Fill track_id for matched detection
352
+ detection_track_map[orig_idx]["track_id"] = int(track.track_id)
368
353
  if track.state == TrackState.Tracked:
369
- track.update(detections[idet], self.frame_id)
354
+ track.update(det, self.frame_id)
370
355
  activated_starcks.append(track)
371
356
  else:
372
357
  track.re_activate(det, self.frame_id, new_id=False)
373
358
  refind_stracks.append(track)
374
359
 
375
- """ Step 3: Second association, with low score detection boxes"""
360
+ ''' Step 3: Second association, with low score detection boxes'''
376
361
  if len(scores):
377
362
  inds_high = scores < self.args.track_high_thresh
378
363
  inds_low = scores > self.args.track_low_thresh
@@ -380,34 +365,32 @@ class BoTSORT(object):
380
365
  dets_second = bboxes[inds_second]
381
366
  scores_second = scores[inds_second]
382
367
  classes_second = classes[inds_second]
368
+ second_inds_map = low_inds_map[inds_second]
383
369
  else:
384
370
  dets_second = []
385
371
  scores_second = []
386
372
  classes_second = []
373
+ second_inds_map = []
387
374
 
388
- # association the untrack to the low score detections
389
375
  if len(dets_second) > 0:
390
- """Detections"""
391
- detections_second = [
392
- STrack(STrack.tlbr_to_tlwh(tlbr), s, c)
393
- for (tlbr, s, c) in zip(dets_second, scores_second, classes_second)
394
- ]
376
+ detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s, c) for
377
+ (tlbr, s, c) in zip(dets_second, scores_second, classes_second)]
395
378
  else:
396
379
  detections_second = []
397
380
 
398
- r_tracked_stracks = [
399
- strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked
400
- ]
381
+ r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked]
401
382
  dists = matching.iou_distance(r_tracked_stracks, detections_second)
402
383
  matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5)
403
384
  for itracked, idet in matches:
404
385
  track = r_tracked_stracks[itracked]
405
- det = detections_second[idet]
386
+ orig_idx = int(second_inds_map[idet])
387
+ # Fill track_id for matched second-level detection
388
+ detection_track_map[orig_idx]["track_id"] = int(track.track_id)
406
389
  if track.state == TrackState.Tracked:
407
- track.update(det, self.frame_id)
390
+ track.update(detections_second[idet], self.frame_id)
408
391
  activated_starcks.append(track)
409
392
  else:
410
- track.re_activate(det, self.frame_id, new_id=False)
393
+ track.re_activate(detections_second[idet], self.frame_id, new_id=False)
411
394
  refind_stracks.append(track)
412
395
 
413
396
  for it in u_track:
@@ -416,14 +399,20 @@ class BoTSORT(object):
416
399
  track.mark_lost()
417
400
  lost_stracks.append(track)
418
401
 
419
- """Deal with unconfirmed tracks, usually tracks with only one beginning frame"""
420
- detections = [detections[i] for i in u_detection]
421
- dists = matching.iou_distance(unconfirmed, detections)
402
+ '''Deal with unconfirmed tracks'''
403
+ detections_unconf = [detections[i] for i in u_detection]
404
+ dists = matching.iou_distance(unconfirmed, detections_unconf)
422
405
  if not self.args.mot20:
423
- dists = matching.fuse_score(dists, detections)
424
- matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7)
406
+ dists = matching.fuse_score(dists, detections_unconf)
407
+ matches, u_unconfirmed, u_detection_unconf = matching.linear_assignment(dists, thresh=0.7)
425
408
  for itracked, idet in matches:
426
- unconfirmed[itracked].update(detections[idet], self.frame_id)
409
+ track = unconfirmed[itracked]
410
+ # Get original detection index from u_detection array
411
+ detection_idx = u_detection[idet]
412
+ orig_idx = int(high_inds_map[detection_idx])
413
+ # Fill track_id for matched unconfirmed detection
414
+ detection_track_map[orig_idx]["track_id"] = int(track.track_id)
415
+ unconfirmed[itracked].update(detections_unconf[idet], self.frame_id)
427
416
  activated_starcks.append(unconfirmed[itracked])
428
417
  for it in u_unconfirmed:
429
418
  track = unconfirmed[it]
@@ -433,9 +422,11 @@ class BoTSORT(object):
433
422
  """ Step 4: Init new stracks"""
434
423
  for inew in u_detection:
435
424
  track = detections[inew]
425
+ orig_idx = int(high_inds_map[inew])
436
426
  if track.score < self.new_track_thresh:
437
427
  continue
438
-
428
+ # Fill track_id for new detection
429
+ detection_track_map[orig_idx]["track_id"] = int(track.track_id)
439
430
  track.activate(self.kalman_filter, self.frame_id)
440
431
  activated_starcks.append(track)
441
432
 
@@ -453,15 +444,11 @@ class BoTSORT(object):
453
444
  self.lost_stracks.extend(lost_stracks)
454
445
  self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks)
455
446
  self.removed_stracks.extend(removed_stracks)
456
- self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(
457
- self.tracked_stracks, self.lost_stracks
458
- )
459
-
460
- # output_stracks = [track for track in self.tracked_stracks if track.is_activated]
461
- output_stracks = [track for track in self.tracked_stracks]
447
+ self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks)
462
448
 
463
- return output_stracks
449
+ output_stracks = [track for track in self.tracked_stracks if track.is_activated]
464
450
 
451
+ return output_stracks, detection_track_map
465
452
 
466
453
  def joint_stracks(tlista, tlistb):
467
454
  exists = {}