supervisely 6.73.418__py3-none-any.whl → 6.73.420__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. supervisely/api/entity_annotation/figure_api.py +89 -45
  2. supervisely/nn/inference/inference.py +61 -45
  3. supervisely/nn/inference/instance_segmentation/instance_segmentation.py +1 -0
  4. supervisely/nn/inference/object_detection/object_detection.py +1 -0
  5. supervisely/nn/inference/session.py +4 -4
  6. supervisely/nn/model/model_api.py +31 -20
  7. supervisely/nn/model/prediction.py +11 -0
  8. supervisely/nn/model/prediction_session.py +33 -6
  9. supervisely/nn/tracker/__init__.py +1 -2
  10. supervisely/nn/tracker/base_tracker.py +44 -0
  11. supervisely/nn/tracker/botsort/__init__.py +1 -0
  12. supervisely/nn/tracker/botsort/botsort_config.yaml +31 -0
  13. supervisely/nn/tracker/botsort/osnet_reid/osnet.py +566 -0
  14. supervisely/nn/tracker/botsort/osnet_reid/osnet_reid_interface.py +88 -0
  15. supervisely/nn/tracker/botsort/tracker/__init__.py +0 -0
  16. supervisely/nn/tracker/{bot_sort → botsort/tracker}/basetrack.py +1 -2
  17. supervisely/nn/tracker/{utils → botsort/tracker}/gmc.py +51 -59
  18. supervisely/nn/tracker/{deep_sort/deep_sort → botsort/tracker}/kalman_filter.py +71 -33
  19. supervisely/nn/tracker/botsort/tracker/matching.py +202 -0
  20. supervisely/nn/tracker/{bot_sort/bot_sort.py → botsort/tracker/mc_bot_sort.py} +68 -81
  21. supervisely/nn/tracker/botsort_tracker.py +259 -0
  22. supervisely/project/project.py +212 -74
  23. {supervisely-6.73.418.dist-info → supervisely-6.73.420.dist-info}/METADATA +3 -1
  24. {supervisely-6.73.418.dist-info → supervisely-6.73.420.dist-info}/RECORD +29 -42
  25. supervisely/nn/tracker/bot_sort/__init__.py +0 -21
  26. supervisely/nn/tracker/bot_sort/fast_reid_interface.py +0 -152
  27. supervisely/nn/tracker/bot_sort/matching.py +0 -127
  28. supervisely/nn/tracker/bot_sort/sly_tracker.py +0 -401
  29. supervisely/nn/tracker/deep_sort/__init__.py +0 -6
  30. supervisely/nn/tracker/deep_sort/deep_sort/__init__.py +0 -1
  31. supervisely/nn/tracker/deep_sort/deep_sort/detection.py +0 -49
  32. supervisely/nn/tracker/deep_sort/deep_sort/iou_matching.py +0 -81
  33. supervisely/nn/tracker/deep_sort/deep_sort/linear_assignment.py +0 -202
  34. supervisely/nn/tracker/deep_sort/deep_sort/nn_matching.py +0 -176
  35. supervisely/nn/tracker/deep_sort/deep_sort/track.py +0 -166
  36. supervisely/nn/tracker/deep_sort/deep_sort/tracker.py +0 -145
  37. supervisely/nn/tracker/deep_sort/deep_sort.py +0 -301
  38. supervisely/nn/tracker/deep_sort/generate_clip_detections.py +0 -90
  39. supervisely/nn/tracker/deep_sort/preprocessing.py +0 -70
  40. supervisely/nn/tracker/deep_sort/sly_tracker.py +0 -273
  41. supervisely/nn/tracker/tracker.py +0 -285
  42. supervisely/nn/tracker/utils/kalman_filter.py +0 -492
  43. supervisely/nn/tracking/__init__.py +0 -1
  44. supervisely/nn/tracking/boxmot.py +0 -114
  45. supervisely/nn/tracking/tracking.py +0 -24
  46. /supervisely/nn/tracker/{utils → botsort/osnet_reid}/__init__.py +0 -0
  47. {supervisely-6.73.418.dist-info → supervisely-6.73.420.dist-info}/LICENSE +0 -0
  48. {supervisely-6.73.418.dist-info → supervisely-6.73.420.dist-info}/WHEEL +0 -0
  49. {supervisely-6.73.418.dist-info → supervisely-6.73.420.dist-info}/entry_points.txt +0 -0
  50. {supervisely-6.73.418.dist-info → supervisely-6.73.420.dist-info}/top_level.txt +0 -0
@@ -1,202 +0,0 @@
1
- # vim: expandtab:ts=4:sw=4
2
- from __future__ import absolute_import
3
-
4
- import numpy as np
5
-
6
- # pylint: disable=import-error
7
- from scipy.optimize import linear_sum_assignment
8
-
9
- from supervisely.nn.tracker.deep_sort.deep_sort import kalman_filter
10
-
11
- INFTY_COST = 1e5
12
-
13
-
14
- def min_cost_matching(
15
- distance_metric, max_distance, tracks, detections, track_indices=None, detection_indices=None
16
- ):
17
- """Solve linear assignment problem.
18
-
19
- Parameters
20
- ----------
21
- distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray
22
- The distance metric is given a list of tracks and detections as well as
23
- a list of N track indices and M detection indices. The metric should
24
- return the NxM dimensional cost matrix, where element (i, j) is the
25
- association cost between the i-th track in the given track indices and
26
- the j-th detection in the given detection_indices.
27
- max_distance : float
28
- Gating threshold. Associations with cost larger than this value are
29
- disregarded.
30
- tracks : List[track.Track]
31
- A list of predicted tracks at the current time step.
32
- detections : List[detection.Detection]
33
- A list of detections at the current time step.
34
- track_indices : List[int]
35
- List of track indices that maps rows in `cost_matrix` to tracks in
36
- `tracks` (see description above).
37
- detection_indices : List[int]
38
- List of detection indices that maps columns in `cost_matrix` to
39
- detections in `detections` (see description above).
40
-
41
- Returns
42
- -------
43
- (List[(int, int)], List[int], List[int])
44
- Returns a tuple with the following three entries:
45
- * A list of matched track and detection indices.
46
- * A list of unmatched track indices.
47
- * A list of unmatched detection indices.
48
-
49
- """
50
- if track_indices is None:
51
- track_indices = np.arange(len(tracks))
52
- if detection_indices is None:
53
- detection_indices = np.arange(len(detections))
54
-
55
- if len(detection_indices) == 0 or len(track_indices) == 0:
56
- return [], track_indices, detection_indices # Nothing to match.
57
-
58
- cost_matrix = distance_metric(tracks, detections, track_indices, detection_indices)
59
- cost_matrix[cost_matrix > max_distance] = max_distance + 1e-5
60
- indices = linear_sum_assignment(cost_matrix)
61
- indices = np.asarray(indices)
62
- indices = np.transpose(indices)
63
- matches, unmatched_tracks, unmatched_detections = [], [], []
64
- for col, detection_idx in enumerate(detection_indices):
65
- if col not in indices[:, 1]:
66
- unmatched_detections.append(detection_idx)
67
- for row, track_idx in enumerate(track_indices):
68
- if row not in indices[:, 0]:
69
- unmatched_tracks.append(track_idx)
70
- for row, col in indices:
71
- track_idx = track_indices[row]
72
- detection_idx = detection_indices[col]
73
- if cost_matrix[row, col] > max_distance:
74
- unmatched_tracks.append(track_idx)
75
- unmatched_detections.append(detection_idx)
76
- else:
77
- matches.append((track_idx, detection_idx))
78
- return matches, unmatched_tracks, unmatched_detections
79
-
80
-
81
- def matching_cascade(
82
- distance_metric,
83
- max_distance,
84
- cascade_depth,
85
- tracks,
86
- detections,
87
- track_indices=None,
88
- detection_indices=None,
89
- ):
90
- """Run matching cascade.
91
-
92
- Parameters
93
- ----------
94
- distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray
95
- The distance metric is given a list of tracks and detections as well as
96
- a list of N track indices and M detection indices. The metric should
97
- return the NxM dimensional cost matrix, where element (i, j) is the
98
- association cost between the i-th track in the given track indices and
99
- the j-th detection in the given detection indices.
100
- max_distance : float
101
- Gating threshold. Associations with cost larger than this value are
102
- disregarded.
103
- cascade_depth: int
104
- The cascade depth, should be se to the maximum track age.
105
- tracks : List[track.Track]
106
- A list of predicted tracks at the current time step.
107
- detections : List[detection.Detection]
108
- A list of detections at the current time step.
109
- track_indices : Optional[List[int]]
110
- List of track indices that maps rows in `cost_matrix` to tracks in
111
- `tracks` (see description above). Defaults to all tracks.
112
- detection_indices : Optional[List[int]]
113
- List of detection indices that maps columns in `cost_matrix` to
114
- detections in `detections` (see description above). Defaults to all
115
- detections.
116
-
117
- Returns
118
- -------
119
- (List[(int, int)], List[int], List[int])
120
- Returns a tuple with the following three entries:
121
- * A list of matched track and detection indices.
122
- * A list of unmatched track indices.
123
- * A list of unmatched detection indices.
124
-
125
- """
126
- if track_indices is None:
127
- track_indices = list(range(len(tracks)))
128
- if detection_indices is None:
129
- detection_indices = list(range(len(detections)))
130
-
131
- unmatched_detections = detection_indices
132
- matches = []
133
- for level in range(cascade_depth):
134
- if len(unmatched_detections) == 0: # No detections left
135
- break
136
-
137
- track_indices_l = [k for k in track_indices if tracks[k].time_since_update == 1 + level]
138
- if len(track_indices_l) == 0: # Nothing to match at this level
139
- continue
140
-
141
- matches_l, _, unmatched_detections = min_cost_matching(
142
- distance_metric, max_distance, tracks, detections, track_indices_l, unmatched_detections
143
- )
144
- matches += matches_l
145
- unmatched_tracks = list(set(track_indices) - set(k for k, _ in matches))
146
- return matches, unmatched_tracks, unmatched_detections
147
-
148
-
149
- def gate_cost_matrix(
150
- kf,
151
- cost_matrix,
152
- tracks,
153
- detections,
154
- track_indices,
155
- detection_indices,
156
- gated_cost=INFTY_COST,
157
- only_position=False,
158
- ):
159
- """Invalidate infeasible entries in cost matrix based on the state
160
- distributions obtained by Kalman filtering.
161
-
162
- Parameters
163
- ----------
164
- kf : The Kalman filter.
165
- cost_matrix : ndarray
166
- The NxM dimensional cost matrix, where N is the number of track indices
167
- and M is the number of detection indices, such that entry (i, j) is the
168
- association cost between `tracks[track_indices[i]]` and
169
- `detections[detection_indices[j]]`.
170
- tracks : List[track.Track]
171
- A list of predicted tracks at the current time step.
172
- detections : List[detection.Detection]
173
- A list of detections at the current time step.
174
- track_indices : List[int]
175
- List of track indices that maps rows in `cost_matrix` to tracks in
176
- `tracks` (see description above).
177
- detection_indices : List[int]
178
- List of detection indices that maps columns in `cost_matrix` to
179
- detections in `detections` (see description above).
180
- gated_cost : Optional[float]
181
- Entries in the cost matrix corresponding to infeasible associations are
182
- set this value. Defaults to a very large value.
183
- only_position : Optional[bool]
184
- If True, only the x, y position of the state distribution is considered
185
- during gating. Defaults to False.
186
-
187
- Returns
188
- -------
189
- ndarray
190
- Returns the modified cost matrix.
191
-
192
- """
193
- gating_dim = 2 if only_position else 4
194
- gating_threshold = kalman_filter.chi2inv95[gating_dim]
195
- measurements = np.asarray([detections[i].to_xyah() for i in detection_indices])
196
- for row, track_idx in enumerate(track_indices):
197
- track = tracks[track_idx]
198
- gating_distance = kf.gating_distance(
199
- track.mean, track.covariance, measurements, only_position
200
- )
201
- cost_matrix[row, gating_distance > gating_threshold] = gated_cost
202
- return cost_matrix
@@ -1,176 +0,0 @@
1
- # vim: expandtab:ts=4:sw=4
2
- import numpy as np
3
-
4
-
5
- def _pdist(a, b):
6
- """Compute pair-wise squared distance between points in `a` and `b`.
7
-
8
- Parameters
9
- ----------
10
- a : array_like
11
- An NxM matrix of N samples of dimensionality M.
12
- b : array_like
13
- An LxM matrix of L samples of dimensionality M.
14
-
15
- Returns
16
- -------
17
- ndarray
18
- Returns a matrix of size len(a), len(b) such that eleement (i, j)
19
- contains the squared distance between `a[i]` and `b[j]`.
20
-
21
- """
22
- a, b = np.asarray(a), np.asarray(b)
23
- if len(a) == 0 or len(b) == 0:
24
- return np.zeros((len(a), len(b)))
25
- a2, b2 = np.square(a).sum(axis=1), np.square(b).sum(axis=1)
26
- r2 = -2. * np.dot(a, b.T) + a2[:, None] + b2[None, :]
27
- r2 = np.clip(r2, 0., float(np.inf))
28
- return r2
29
-
30
-
31
- def _cosine_distance(a, b, data_is_normalized=False):
32
- """Compute pair-wise cosine distance between points in `a` and `b`.
33
-
34
- Parameters
35
- ----------
36
- a : array_like
37
- An NxM matrix of N samples of dimensionality M.
38
- b : array_like
39
- An LxM matrix of L samples of dimensionality M.
40
- data_is_normalized : Optional[bool]
41
- If True, assumes rows in a and b are unit length vectors.
42
- Otherwise, a and b are explicitly normalized to lenght 1.
43
-
44
- Returns
45
- -------
46
- ndarray
47
- Returns a matrix of size len(a), len(b) such that eleement (i, j)
48
- contains the squared distance between `a[i]` and `b[j]`.
49
-
50
- """
51
- if not data_is_normalized:
52
- a = np.asarray(a) / np.linalg.norm(a, axis=1, keepdims=True)
53
- b = np.asarray(b) / np.linalg.norm(b, axis=1, keepdims=True)
54
- return 1. - np.dot(a, b.T)
55
-
56
-
57
- def _nn_euclidean_distance(x, y):
58
- """ Helper function for nearest neighbor distance metric (Euclidean).
59
-
60
- Parameters
61
- ----------
62
- x : ndarray
63
- A matrix of N row-vectors (sample points).
64
- y : ndarray
65
- A matrix of M row-vectors (query points).
66
-
67
- Returns
68
- -------
69
- ndarray
70
- A vector of length M that contains for each entry in `y` the
71
- smallest Euclidean distance to a sample in `x`.
72
-
73
- """
74
- distances = _pdist(x, y)
75
- return np.maximum(0.0, distances.min(axis=0))
76
-
77
-
78
- def _nn_cosine_distance(x, y):
79
- """ Helper function for nearest neighbor distance metric (cosine).
80
-
81
- Parameters
82
- ----------
83
- x : ndarray
84
- A matrix of N row-vectors (sample points).
85
- y : ndarray
86
- A matrix of M row-vectors (query points).
87
-
88
- Returns
89
- -------
90
- ndarray
91
- A vector of length M that contains for each entry in `y` the
92
- smallest cosine distance to a sample in `x`.
93
-
94
- """
95
- distances = _cosine_distance(x, y)
96
- return distances.min(axis=0)
97
-
98
-
99
- class NearestNeighborDistanceMetric(object):
100
- """
101
- A nearest neighbor distance metric that, for each target, returns
102
- the closest distance to any sample that has been observed so far.
103
-
104
- Parameters
105
- ----------
106
- metric : str
107
- Either "euclidean" or "cosine".
108
- matching_threshold: float
109
- The matching threshold. Samples with larger distance are considered an
110
- invalid match.
111
- budget : Optional[int]
112
- If not None, fix samples per class to at most this number. Removes
113
- the oldest samples when the budget is reached.
114
-
115
- Attributes
116
- ----------
117
- samples : Dict[int -> List[ndarray]]
118
- A dictionary that maps from target identities to the list of samples
119
- that have been observed so far.
120
-
121
- """
122
-
123
- def __init__(self, metric, matching_threshold, budget=None):
124
-
125
- if metric == "euclidean":
126
- self._metric = _nn_euclidean_distance
127
- elif metric == "cosine":
128
- self._metric = _nn_cosine_distance
129
- else:
130
- raise ValueError(
131
- "Invalid metric; must be either 'euclidean' or 'cosine'")
132
- self.matching_threshold = matching_threshold
133
- self.budget = budget
134
- self.samples = {}
135
-
136
- def partial_fit(self, features, targets, active_targets):
137
- """Update the distance metric with new data.
138
-
139
- Parameters
140
- ----------
141
- features : ndarray
142
- An NxM matrix of N features of dimensionality M.
143
- targets : ndarray
144
- An integer array of associated target identities.
145
- active_targets : List[int]
146
- A list of targets that are currently present in the scene.
147
-
148
- """
149
- for feature, target in zip(features, targets):
150
- self.samples.setdefault(target, []).append(feature)
151
- if self.budget is not None:
152
- self.samples[target] = self.samples[target][-self.budget:]
153
- self.samples = {k: self.samples[k] for k in active_targets}
154
-
155
- def distance(self, features, targets):
156
- """Compute distance between features and targets.
157
-
158
- Parameters
159
- ----------
160
- features : ndarray
161
- An NxM matrix of N features of dimensionality M.
162
- targets : List[int]
163
- A list of targets to match the given `features` against.
164
-
165
- Returns
166
- -------
167
- ndarray
168
- Returns a cost matrix of shape len(targets), len(features), where
169
- element (i, j) contains the closest squared distance between
170
- `targets[i]` and `features[j]`.
171
-
172
- """
173
- cost_matrix = np.zeros((len(targets), len(features)))
174
- for i, target in enumerate(targets):
175
- cost_matrix[i, :] = self._metric(self.samples[target], features)
176
- return cost_matrix
@@ -1,166 +0,0 @@
1
- # vim: expandtab:ts=4:sw=4
2
-
3
-
4
- class TrackState:
5
- """
6
- Enumeration type for the single target track state. Newly created tracks are
7
- classified as `tentative` until enough evidence has been collected. Then,
8
- the track state is changed to `confirmed`. Tracks that are no longer alive
9
- are classified as `deleted` to mark them for removal from the set of active
10
- tracks.
11
-
12
- """
13
-
14
- Tentative = 1
15
- Confirmed = 2
16
- Deleted = 3
17
-
18
-
19
- class Track:
20
- """
21
- A single target track with state space `(x, y, a, h)` and associated
22
- velocities, where `(x, y)` is the center of the bounding box, `a` is the
23
- aspect ratio and `h` is the height.
24
-
25
- Parameters
26
- ----------
27
- mean : ndarray
28
- Mean vector of the initial state distribution.
29
- covariance : ndarray
30
- Covariance matrix of the initial state distribution.
31
- track_id : int
32
- A unique track identifier.
33
- n_init : int
34
- Number of consecutive detections before the track is confirmed. The
35
- track state is set to `Deleted` if a miss occurs within the first
36
- `n_init` frames.
37
- max_age : int
38
- The maximum number of consecutive misses before the track state is
39
- set to `Deleted`.
40
- feature : Optional[ndarray]
41
- Feature vector of the detection this track originates from. If not None,
42
- this feature is added to the `features` cache.
43
-
44
- Attributes
45
- ----------
46
- mean : ndarray
47
- Mean vector of the initial state distribution.
48
- covariance : ndarray
49
- Covariance matrix of the initial state distribution.
50
- track_id : int
51
- A unique track identifier.
52
- hits : int
53
- Total number of measurement updates.
54
- age : int
55
- Total number of frames since first occurance.
56
- time_since_update : int
57
- Total number of frames since last measurement update.
58
- state : TrackState
59
- The current track state.
60
- features : List[ndarray]
61
- A cache of features. On each measurement update, the associated feature
62
- vector is added to this list.
63
-
64
- """
65
-
66
- def __init__(self, mean, covariance, track_id, n_init, max_age,
67
- feature=None):
68
- self.mean = mean
69
- self.covariance = covariance
70
- self.track_id = track_id
71
- self.hits = 1
72
- self.age = 1
73
- self.time_since_update = 0
74
-
75
- self.state = TrackState.Tentative
76
- self.features = []
77
- if feature is not None:
78
- self.features.append(feature)
79
-
80
- self._n_init = n_init
81
- self._max_age = max_age
82
-
83
- def to_tlwh(self):
84
- """Get current position in bounding box format `(top left x, top left y,
85
- width, height)`.
86
-
87
- Returns
88
- -------
89
- ndarray
90
- The bounding box.
91
-
92
- """
93
- ret = self.mean[:4].copy()
94
- ret[2] *= ret[3]
95
- ret[:2] -= ret[2:] / 2
96
- return ret
97
-
98
- def to_tlbr(self):
99
- """Get current position in bounding box format `(min x, miny, max x,
100
- max y)`.
101
-
102
- Returns
103
- -------
104
- ndarray
105
- The bounding box.
106
-
107
- """
108
- ret = self.to_tlwh()
109
- ret[2:] = ret[:2] + ret[2:]
110
- return ret
111
-
112
- def predict(self, kf):
113
- """Propagate the state distribution to the current time step using a
114
- Kalman filter prediction step.
115
-
116
- Parameters
117
- ----------
118
- kf : kalman_filter.KalmanFilter
119
- The Kalman filter.
120
-
121
- """
122
- self.mean, self.covariance = kf.predict(self.mean, self.covariance)
123
- self.age += 1
124
- self.time_since_update += 1
125
-
126
- def update(self, kf, detection):
127
- """Perform Kalman filter measurement update step and update the feature
128
- cache.
129
-
130
- Parameters
131
- ----------
132
- kf : kalman_filter.KalmanFilter
133
- The Kalman filter.
134
- detection : Detection
135
- The associated detection.
136
-
137
- """
138
- self.mean, self.covariance = kf.update(
139
- self.mean, self.covariance, detection.to_xyah())
140
- self.features.append(detection.feature)
141
-
142
- self.hits += 1
143
- self.time_since_update = 0
144
- if self.state == TrackState.Tentative and self.hits >= self._n_init:
145
- self.state = TrackState.Confirmed
146
-
147
- def mark_missed(self):
148
- """Mark this track as missed (no association at the current time step).
149
- """
150
- if self.state == TrackState.Tentative:
151
- self.state = TrackState.Deleted
152
- elif self.time_since_update > self._max_age:
153
- self.state = TrackState.Deleted
154
-
155
- def is_tentative(self):
156
- """Returns True if this track is tentative (unconfirmed).
157
- """
158
- return self.state == TrackState.Tentative
159
-
160
- def is_confirmed(self):
161
- """Returns True if this track is confirmed."""
162
- return self.state == TrackState.Confirmed
163
-
164
- def is_deleted(self):
165
- """Returns True if this track is dead and should be deleted."""
166
- return self.state == TrackState.Deleted
@@ -1,145 +0,0 @@
1
- # vim: expandtab:ts=4:sw=4
2
- from __future__ import absolute_import
3
-
4
- import numpy as np
5
-
6
- from supervisely.nn.tracker.deep_sort.deep_sort import (
7
- iou_matching,
8
- kalman_filter,
9
- linear_assignment,
10
- )
11
- from supervisely.nn.tracker.deep_sort.deep_sort.track import Track
12
-
13
-
14
- class Tracker:
15
- """
16
- This is the multi-target tracker.
17
-
18
- Parameters
19
- ----------
20
- metric : nn_matching.NearestNeighborDistanceMetric
21
- A distance metric for measurement-to-track association.
22
- max_age : int
23
- Maximum number of missed misses before a track is deleted.
24
- n_init : int
25
- Number of consecutive detections before the track is confirmed. The
26
- track state is set to `Deleted` if a miss occurs within the first
27
- `n_init` frames.
28
-
29
- Attributes
30
- ----------
31
- metric : nn_matching.NearestNeighborDistanceMetric
32
- The distance metric used for measurement to track association.
33
- max_age : int
34
- Maximum number of missed misses before a track is deleted.
35
- n_init : int
36
- Number of frames that a track remains in initialization phase.
37
- kf : kalman_filter.KalmanFilter
38
- A Kalman filter to filter target trajectories in image space.
39
- tracks : List[Track]
40
- The list of active tracks at the current time step.
41
-
42
- """
43
-
44
- def __init__(self, metric, max_iou_distance=0.7, max_age=30, n_init=3):
45
- self.metric = metric
46
- self.max_iou_distance = max_iou_distance
47
- self.max_age = max_age
48
- self.n_init = n_init
49
-
50
- self.kf = kalman_filter.KalmanFilter()
51
- self.tracks = []
52
- self._next_id = 1
53
-
54
- def predict(self):
55
- """Propagate track state distributions one time step forward.
56
-
57
- This function should be called once every time step, before `update`.
58
- """
59
- for track in self.tracks:
60
- track.predict(self.kf)
61
-
62
- def update(self, detections):
63
- """Perform measurement update and track management.
64
-
65
- Parameters
66
- ----------
67
- detections : List[deep_sort.detection.Detection]
68
- A list of detections at the current time step.
69
-
70
- """
71
- # Run matching cascade.
72
- matches, unmatched_tracks, unmatched_detections = self._match(detections)
73
-
74
- # Update track set.
75
- for track_idx, detection_idx in matches:
76
- self.tracks[track_idx].update(self.kf, detections[detection_idx])
77
- for track_idx in unmatched_tracks:
78
- self.tracks[track_idx].mark_missed()
79
- for detection_idx in unmatched_detections:
80
- self._initiate_track(detections[detection_idx])
81
- self.tracks = [t for t in self.tracks if not t.is_deleted()]
82
-
83
- # Update distance metric.
84
- active_targets = [t.track_id for t in self.tracks if t.is_confirmed()]
85
- features, targets = [], []
86
- for track in self.tracks:
87
- if not track.is_confirmed():
88
- continue
89
- features += track.features
90
- targets += [track.track_id for _ in track.features]
91
- track.features = []
92
- self.metric.partial_fit(np.asarray(features), np.asarray(targets), active_targets)
93
-
94
- def _match(self, detections):
95
-
96
- def gated_metric(tracks, dets, track_indices, detection_indices):
97
- features = np.array([dets[i].feature for i in detection_indices])
98
- targets = np.array([tracks[i].track_id for i in track_indices])
99
- cost_matrix = self.metric.distance(features, targets)
100
- cost_matrix = linear_assignment.gate_cost_matrix(
101
- self.kf, cost_matrix, tracks, dets, track_indices, detection_indices
102
- )
103
-
104
- return cost_matrix
105
-
106
- # Split track set into confirmed and unconfirmed tracks.
107
- confirmed_tracks = [i for i, t in enumerate(self.tracks) if t.is_confirmed()]
108
- unconfirmed_tracks = [i for i, t in enumerate(self.tracks) if not t.is_confirmed()]
109
-
110
- # Associate confirmed tracks using appearance features.
111
- matches_a, unmatched_tracks_a, unmatched_detections = linear_assignment.matching_cascade(
112
- gated_metric,
113
- self.metric.matching_threshold,
114
- self.max_age,
115
- self.tracks,
116
- detections,
117
- confirmed_tracks,
118
- )
119
-
120
- # Associate remaining tracks together with unconfirmed tracks using IOU.
121
- iou_track_candidates = unconfirmed_tracks + [
122
- k for k in unmatched_tracks_a if self.tracks[k].time_since_update == 1
123
- ]
124
- unmatched_tracks_a = [
125
- k for k in unmatched_tracks_a if self.tracks[k].time_since_update != 1
126
- ]
127
- matches_b, unmatched_tracks_b, unmatched_detections = linear_assignment.min_cost_matching(
128
- iou_matching.iou_cost,
129
- self.max_iou_distance,
130
- self.tracks,
131
- detections,
132
- iou_track_candidates,
133
- unmatched_detections,
134
- )
135
-
136
- matches = matches_a + matches_b
137
- unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b))
138
- return matches, unmatched_tracks, unmatched_detections
139
-
140
- def _initiate_track(self, detection):
141
- mean, covariance = self.kf.initiate(detection.to_xyah())
142
- self.tracks.append(
143
- Track(mean, covariance, self._next_id, self.n_init, self.max_age, detection.feature)
144
- )
145
- self._next_id += 1