mapillary-tools 0.13.3a1__py3-none-any.whl → 0.14.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapillary_tools/__init__.py +1 -1
- mapillary_tools/api_v4.py +235 -14
- mapillary_tools/authenticate.py +325 -64
- mapillary_tools/{geotag/blackvue_parser.py → blackvue_parser.py} +74 -54
- mapillary_tools/camm/camm_builder.py +55 -97
- mapillary_tools/camm/camm_parser.py +425 -177
- mapillary_tools/commands/__main__.py +11 -4
- mapillary_tools/commands/authenticate.py +8 -1
- mapillary_tools/commands/process.py +27 -51
- mapillary_tools/commands/process_and_upload.py +19 -5
- mapillary_tools/commands/sample_video.py +2 -3
- mapillary_tools/commands/upload.py +18 -9
- mapillary_tools/commands/video_process_and_upload.py +19 -5
- mapillary_tools/config.py +28 -12
- mapillary_tools/constants.py +46 -4
- mapillary_tools/exceptions.py +34 -35
- mapillary_tools/exif_read.py +158 -53
- mapillary_tools/exiftool_read.py +19 -5
- mapillary_tools/exiftool_read_video.py +12 -1
- mapillary_tools/exiftool_runner.py +77 -0
- mapillary_tools/geo.py +148 -107
- mapillary_tools/geotag/factory.py +298 -0
- mapillary_tools/geotag/geotag_from_generic.py +152 -11
- mapillary_tools/geotag/geotag_images_from_exif.py +43 -124
- mapillary_tools/geotag/geotag_images_from_exiftool.py +66 -70
- mapillary_tools/geotag/geotag_images_from_exiftool_both_image_and_video.py +32 -48
- mapillary_tools/geotag/geotag_images_from_gpx.py +41 -116
- mapillary_tools/geotag/geotag_images_from_gpx_file.py +15 -96
- mapillary_tools/geotag/geotag_images_from_nmea_file.py +4 -2
- mapillary_tools/geotag/geotag_images_from_video.py +46 -46
- mapillary_tools/geotag/geotag_videos_from_exiftool_video.py +98 -92
- mapillary_tools/geotag/geotag_videos_from_gpx.py +140 -0
- mapillary_tools/geotag/geotag_videos_from_video.py +149 -181
- mapillary_tools/geotag/options.py +159 -0
- mapillary_tools/{geotag → gpmf}/gpmf_parser.py +194 -171
- mapillary_tools/history.py +3 -11
- mapillary_tools/mp4/io_utils.py +0 -1
- mapillary_tools/mp4/mp4_sample_parser.py +11 -3
- mapillary_tools/mp4/simple_mp4_parser.py +0 -10
- mapillary_tools/process_geotag_properties.py +151 -386
- mapillary_tools/process_sequence_properties.py +554 -202
- mapillary_tools/sample_video.py +8 -15
- mapillary_tools/telemetry.py +24 -12
- mapillary_tools/types.py +80 -22
- mapillary_tools/upload.py +316 -298
- mapillary_tools/upload_api_v4.py +55 -122
- mapillary_tools/uploader.py +396 -254
- mapillary_tools/utils.py +26 -0
- mapillary_tools/video_data_extraction/extract_video_data.py +17 -36
- mapillary_tools/video_data_extraction/extractors/blackvue_parser.py +34 -19
- mapillary_tools/video_data_extraction/extractors/camm_parser.py +41 -17
- mapillary_tools/video_data_extraction/extractors/exiftool_runtime_parser.py +4 -1
- mapillary_tools/video_data_extraction/extractors/exiftool_xml_parser.py +1 -2
- mapillary_tools/video_data_extraction/extractors/gopro_parser.py +37 -22
- {mapillary_tools-0.13.3a1.dist-info → mapillary_tools-0.14.0a1.dist-info}/METADATA +3 -2
- mapillary_tools-0.14.0a1.dist-info/RECORD +78 -0
- {mapillary_tools-0.13.3a1.dist-info → mapillary_tools-0.14.0a1.dist-info}/WHEEL +1 -1
- mapillary_tools/geotag/utils.py +0 -26
- mapillary_tools-0.13.3a1.dist-info/RECORD +0 -75
- /mapillary_tools/{geotag → gpmf}/gpmf_gps_filter.py +0 -0
- /mapillary_tools/{geotag → gpmf}/gps_filter.py +0 -0
- {mapillary_tools-0.13.3a1.dist-info → mapillary_tools-0.14.0a1.dist-info}/entry_points.txt +0 -0
- {mapillary_tools-0.13.3a1.dist-info → mapillary_tools-0.14.0a1.dist-info/licenses}/LICENSE +0 -0
- {mapillary_tools-0.13.3a1.dist-info → mapillary_tools-0.14.0a1.dist-info}/top_level.txt +0 -0
mapillary_tools/geo.py
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
# pyre-ignore-all-errors[4]
|
|
2
|
+
from __future__ import annotations
|
|
2
3
|
|
|
3
4
|
import bisect
|
|
4
5
|
import dataclasses
|
|
5
6
|
import datetime
|
|
6
7
|
import itertools
|
|
7
8
|
import math
|
|
9
|
+
import sys
|
|
8
10
|
import typing as T
|
|
9
11
|
|
|
10
12
|
WGS84_a = 6378137.0
|
|
@@ -27,34 +29,14 @@ class Point:
|
|
|
27
29
|
time: float
|
|
28
30
|
lat: float
|
|
29
31
|
lon: float
|
|
30
|
-
alt:
|
|
31
|
-
angle:
|
|
32
|
+
alt: float | None
|
|
33
|
+
angle: float | None
|
|
32
34
|
|
|
33
35
|
|
|
34
|
-
|
|
35
|
-
"""
|
|
36
|
-
Compute ECEF XYZ from latitude, longitude and altitude.
|
|
36
|
+
PointLike = T.TypeVar("PointLike", bound=Point)
|
|
37
37
|
|
|
38
|
-
All using the WGS94 model.
|
|
39
|
-
Altitude is the distance to the WGS94 ellipsoid.
|
|
40
|
-
Check results here http://www.oc.nps.edu/oc2902w/coord/llhxyz.htm
|
|
41
38
|
|
|
42
|
-
|
|
43
|
-
lat = math.radians(lat)
|
|
44
|
-
lon = math.radians(lon)
|
|
45
|
-
cos_lat = math.cos(lat)
|
|
46
|
-
sin_lat = math.sin(lat)
|
|
47
|
-
L = 1.0 / math.sqrt(WGS84_a_SQ * cos_lat**2 + WGS84_b_SQ * sin_lat**2)
|
|
48
|
-
K = WGS84_a_SQ * L * cos_lat
|
|
49
|
-
x = K * math.cos(lon)
|
|
50
|
-
y = K * math.sin(lon)
|
|
51
|
-
z = WGS84_b_SQ * L * sin_lat
|
|
52
|
-
return x, y, z
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def gps_distance(
|
|
56
|
-
latlon_1: T.Tuple[float, float], latlon_2: T.Tuple[float, float]
|
|
57
|
-
) -> float:
|
|
39
|
+
def gps_distance(latlon_1: tuple[float, float], latlon_2: tuple[float, float]) -> float:
|
|
58
40
|
"""
|
|
59
41
|
Distance between two (lat,lon) pairs.
|
|
60
42
|
|
|
@@ -69,19 +51,9 @@ def gps_distance(
|
|
|
69
51
|
return math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2 + (z1 - z2) ** 2)
|
|
70
52
|
|
|
71
53
|
|
|
72
|
-
def get_max_distance_from_start(latlons: T.List[T.Tuple[float, float]]) -> float:
|
|
73
|
-
"""
|
|
74
|
-
Returns the radius of an entire GPS track. Used to calculate whether or not the entire sequence was just stationary video
|
|
75
|
-
Takes a sequence of points as input
|
|
76
|
-
"""
|
|
77
|
-
if not latlons:
|
|
78
|
-
return 0
|
|
79
|
-
start = latlons[0]
|
|
80
|
-
return max(gps_distance(start, latlon) for latlon in latlons)
|
|
81
|
-
|
|
82
|
-
|
|
83
54
|
def compute_bearing(
|
|
84
|
-
|
|
55
|
+
latlon_1: tuple[float, float],
|
|
56
|
+
latlon_2: tuple[float, float],
|
|
85
57
|
) -> float:
|
|
86
58
|
"""
|
|
87
59
|
Get the compass bearing from start to end.
|
|
@@ -89,7 +61,10 @@ def compute_bearing(
|
|
|
89
61
|
Formula from
|
|
90
62
|
http://www.movable-type.co.uk/scripts/latlong.html
|
|
91
63
|
"""
|
|
92
|
-
|
|
64
|
+
start_lat, start_lon = latlon_1
|
|
65
|
+
end_lat, end_lon = latlon_2
|
|
66
|
+
|
|
67
|
+
# Make sure everything is in radians
|
|
93
68
|
start_lat = math.radians(start_lat)
|
|
94
69
|
start_lon = math.radians(start_lon)
|
|
95
70
|
end_lat = math.radians(end_lat)
|
|
@@ -125,14 +100,14 @@ _IT = T.TypeVar("_IT")
|
|
|
125
100
|
|
|
126
101
|
|
|
127
102
|
# http://stackoverflow.com/a/5434936
|
|
128
|
-
def pairwise(iterable: T.Iterable[_IT]) -> T.Iterable[
|
|
103
|
+
def pairwise(iterable: T.Iterable[_IT]) -> T.Iterable[tuple[_IT, _IT]]:
|
|
129
104
|
"""s -> (s0,s1), (s1,s2), (s2, s3), ..."""
|
|
130
105
|
a, b = itertools.tee(iterable)
|
|
131
106
|
next(b, None)
|
|
132
107
|
return zip(a, b)
|
|
133
108
|
|
|
134
109
|
|
|
135
|
-
def as_unix_time(dt:
|
|
110
|
+
def as_unix_time(dt: datetime.datetime | int | float) -> float:
|
|
136
111
|
if isinstance(dt, (int, float)):
|
|
137
112
|
return dt
|
|
138
113
|
else:
|
|
@@ -148,59 +123,37 @@ def as_unix_time(dt: T.Union[datetime.datetime, int, float]) -> float:
|
|
|
148
123
|
return 0.0
|
|
149
124
|
|
|
150
125
|
|
|
151
|
-
|
|
152
|
-
if start.time == end.time:
|
|
153
|
-
weight = 0.0
|
|
154
|
-
else:
|
|
155
|
-
weight = (t - start.time) / (end.time - start.time)
|
|
156
|
-
|
|
157
|
-
lat = start.lat + (end.lat - start.lat) * weight
|
|
158
|
-
lon = start.lon + (end.lon - start.lon) * weight
|
|
159
|
-
angle = compute_bearing(start.lat, start.lon, end.lat, end.lon)
|
|
160
|
-
alt: T.Optional[float]
|
|
161
|
-
if start.alt is not None and end.alt is not None:
|
|
162
|
-
alt = start.alt + (end.alt - start.alt) * weight
|
|
163
|
-
else:
|
|
164
|
-
alt = None
|
|
165
|
-
|
|
166
|
-
return Point(time=t, lat=lat, lon=lon, alt=alt, angle=angle)
|
|
167
|
-
|
|
126
|
+
if sys.version_info < (3, 10):
|
|
168
127
|
|
|
169
|
-
def
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
else:
|
|
176
|
-
if 0 < idx < len(points):
|
|
177
|
-
# interpolating within the range
|
|
178
|
-
start, end = points[idx - 1], points[idx]
|
|
179
|
-
elif idx <= 0:
|
|
180
|
-
# extrapolating behind the range
|
|
181
|
-
start, end = points[0], points[1]
|
|
182
|
-
else:
|
|
183
|
-
# extrapolating beyond the range
|
|
184
|
-
assert len(points) <= idx
|
|
185
|
-
start, end = points[-2], points[-1]
|
|
128
|
+
def interpolate(points: T.Sequence[Point], t: float, lo: int = 0) -> Point:
|
|
129
|
+
"""
|
|
130
|
+
Interpolate or extrapolate the point at time t along the sequence of points (sorted by time).
|
|
131
|
+
"""
|
|
132
|
+
if not points:
|
|
133
|
+
raise ValueError("Expect non-empty points")
|
|
186
134
|
|
|
187
|
-
|
|
135
|
+
# Make sure that points are sorted (disabled because the check costs O(N)):
|
|
136
|
+
# for cur, nex in pairwise(points):
|
|
137
|
+
# assert cur.time <= nex.time, "Points not sorted"
|
|
188
138
|
|
|
139
|
+
p = Point(time=t, lat=float("-inf"), lon=float("-inf"), alt=None, angle=None)
|
|
140
|
+
idx = bisect.bisect_left(points, p, lo=lo)
|
|
141
|
+
return _interpolate_at_segment_idx(points, t, idx)
|
|
142
|
+
else:
|
|
189
143
|
|
|
190
|
-
def interpolate(points: T.Sequence[Point], t: float, lo: int = 0) -> Point:
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
144
|
+
def interpolate(points: T.Sequence[Point], t: float, lo: int = 0) -> Point:
|
|
145
|
+
"""
|
|
146
|
+
Interpolate or extrapolate the point at time t along the sequence of points (sorted by time).
|
|
147
|
+
"""
|
|
148
|
+
if not points:
|
|
149
|
+
raise ValueError("Expect non-empty points")
|
|
196
150
|
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
151
|
+
# Make sure that points are sorted (disabled because the check costs O(N)):
|
|
152
|
+
# for cur, nex in pairwise(points):
|
|
153
|
+
# assert cur.time <= nex.time, "Points not sorted"
|
|
200
154
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
return _interpolate_at_index(points, t, idx)
|
|
155
|
+
idx = bisect.bisect_left(points, t, lo=lo, key=lambda x: x.time)
|
|
156
|
+
return _interpolate_at_segment_idx(points, t, idx)
|
|
204
157
|
|
|
205
158
|
|
|
206
159
|
class Interpolator:
|
|
@@ -212,12 +165,22 @@ class Interpolator:
|
|
|
212
165
|
track_idx: int
|
|
213
166
|
# interpolation starts from the lower bound point index in the current track
|
|
214
167
|
lo: int
|
|
215
|
-
prev_time:
|
|
168
|
+
prev_time: float | None
|
|
216
169
|
|
|
217
170
|
def __init__(self, tracks: T.Sequence[T.Sequence[Point]]):
|
|
171
|
+
# Remove empty tracks
|
|
218
172
|
self.tracks = [track for track in tracks if track]
|
|
173
|
+
|
|
219
174
|
if not self.tracks:
|
|
220
|
-
raise ValueError("Expect non-empty
|
|
175
|
+
raise ValueError("Expect at least one non-empty track")
|
|
176
|
+
|
|
177
|
+
for track in self.tracks:
|
|
178
|
+
for left, right in pairwise(track):
|
|
179
|
+
if not (left.time <= right.time):
|
|
180
|
+
raise ValueError(
|
|
181
|
+
"Expect points to be sorted by time, but got {left.time} then {right.time}"
|
|
182
|
+
)
|
|
183
|
+
|
|
221
184
|
self.tracks.sort(key=lambda track: track[0].time)
|
|
222
185
|
self.track_idx = 0
|
|
223
186
|
self.lo = 0
|
|
@@ -225,7 +188,7 @@ class Interpolator:
|
|
|
225
188
|
|
|
226
189
|
@staticmethod
|
|
227
190
|
def _lsearch_left(
|
|
228
|
-
track: T.Sequence[Point], t: float, lo: int = 0, hi:
|
|
191
|
+
track: T.Sequence[Point], t: float, lo: int = 0, hi: int | None = None
|
|
229
192
|
) -> int:
|
|
230
193
|
"""
|
|
231
194
|
similar to bisect.bisect_left, but faster in the incremental search case
|
|
@@ -244,24 +207,37 @@ class Interpolator:
|
|
|
244
207
|
|
|
245
208
|
def interpolate(self, t: float) -> Point:
|
|
246
209
|
if self.prev_time is not None:
|
|
247
|
-
|
|
210
|
+
if not (self.prev_time <= t):
|
|
211
|
+
raise ValueError(
|
|
212
|
+
f"Require times to be monotonically increasing, but got {self.prev_time} then {t}"
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
interpolated: Point | None = None
|
|
248
216
|
|
|
249
217
|
while self.track_idx < len(self.tracks):
|
|
250
218
|
track = self.tracks[self.track_idx]
|
|
219
|
+
assert track, "expect non-empty track"
|
|
220
|
+
|
|
251
221
|
if t < track[0].time:
|
|
252
|
-
|
|
222
|
+
interpolated = _interpolate_at_segment_idx(track, t, 0)
|
|
223
|
+
break
|
|
224
|
+
|
|
253
225
|
elif track[0].time <= t <= track[-1].time:
|
|
254
|
-
#
|
|
226
|
+
# Similar to bisect.bisect_left(points, p, lo=lo) but faster in this case
|
|
255
227
|
idx = Interpolator._lsearch_left(track, t, lo=self.lo)
|
|
256
|
-
# t must
|
|
257
|
-
#
|
|
258
|
-
# because the next t can still be interpolated anywhere between (track[idx - 1], track[idx]]
|
|
228
|
+
# Time t must be between (track[idx - 1], track[idx]], so set the lower bound to idx - 1
|
|
229
|
+
# Because the next t can still be interpolated anywhere between (track[idx - 1], track[idx]]
|
|
259
230
|
self.lo = max(idx - 1, 0)
|
|
260
|
-
|
|
231
|
+
interpolated = _interpolate_at_segment_idx(track, t, idx)
|
|
232
|
+
break
|
|
233
|
+
|
|
261
234
|
self.track_idx += 1
|
|
262
235
|
self.lo = 0
|
|
263
236
|
|
|
264
|
-
interpolated
|
|
237
|
+
if interpolated is None:
|
|
238
|
+
interpolated = _interpolate_at_segment_idx(
|
|
239
|
+
self.tracks[-1], t, len(self.tracks[-1])
|
|
240
|
+
)
|
|
265
241
|
|
|
266
242
|
self.prev_time = t
|
|
267
243
|
|
|
@@ -276,7 +252,7 @@ def sample_points_by_distance(
|
|
|
276
252
|
min_distance: float,
|
|
277
253
|
point_func: T.Callable[[_PointAbstract], Point],
|
|
278
254
|
) -> T.Generator[_PointAbstract, None, None]:
|
|
279
|
-
prevp:
|
|
255
|
+
prevp: Point | None = None
|
|
280
256
|
for sample in samples:
|
|
281
257
|
if prevp is None:
|
|
282
258
|
yield sample
|
|
@@ -288,26 +264,27 @@ def sample_points_by_distance(
|
|
|
288
264
|
prevp = p
|
|
289
265
|
|
|
290
266
|
|
|
291
|
-
def interpolate_directions_if_none(sequence: T.Sequence[
|
|
267
|
+
def interpolate_directions_if_none(sequence: T.Sequence[PointLike]) -> None:
|
|
292
268
|
for cur, nex in pairwise(sequence):
|
|
293
269
|
if cur.angle is None:
|
|
294
|
-
cur.angle = compute_bearing(cur.lat, cur.lon, nex.lat, nex.lon)
|
|
270
|
+
cur.angle = compute_bearing((cur.lat, cur.lon), (nex.lat, nex.lon))
|
|
295
271
|
|
|
296
272
|
if len(sequence) == 1:
|
|
297
273
|
if sequence[-1].angle is None:
|
|
298
274
|
sequence[-1].angle = 0
|
|
299
275
|
elif 2 <= len(sequence):
|
|
300
276
|
if sequence[-1].angle is None:
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
277
|
+
prev_angle = sequence[-2].angle
|
|
278
|
+
assert prev_angle is not None, (
|
|
279
|
+
"expect the last second point to have an interpolated angle"
|
|
280
|
+
)
|
|
281
|
+
sequence[-1].angle = prev_angle
|
|
305
282
|
|
|
306
283
|
|
|
307
284
|
def extend_deduplicate_points(
|
|
308
|
-
sequence: T.Iterable[
|
|
309
|
-
to_extend:
|
|
310
|
-
) ->
|
|
285
|
+
sequence: T.Iterable[PointLike],
|
|
286
|
+
to_extend: list[PointLike] | None = None,
|
|
287
|
+
) -> list[PointLike]:
|
|
311
288
|
if to_extend is None:
|
|
312
289
|
to_extend = []
|
|
313
290
|
for point in sequence:
|
|
@@ -319,3 +296,67 @@ def extend_deduplicate_points(
|
|
|
319
296
|
else:
|
|
320
297
|
to_extend.append(point)
|
|
321
298
|
return to_extend
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
def _ecef_from_lla2(lat: float, lon: float) -> tuple[float, float, float]:
|
|
302
|
+
"""
|
|
303
|
+
Compute ECEF XYZ from latitude and longitude.
|
|
304
|
+
|
|
305
|
+
All using the WGS94 model.
|
|
306
|
+
Altitude is the distance to the WGS94 ellipsoid.
|
|
307
|
+
Check results here http://www.oc.nps.edu/oc2902w/coord/llhxyz.htm
|
|
308
|
+
|
|
309
|
+
"""
|
|
310
|
+
lat = math.radians(lat)
|
|
311
|
+
lon = math.radians(lon)
|
|
312
|
+
cos_lat = math.cos(lat)
|
|
313
|
+
sin_lat = math.sin(lat)
|
|
314
|
+
L = 1.0 / math.sqrt(WGS84_a_SQ * cos_lat**2 + WGS84_b_SQ * sin_lat**2)
|
|
315
|
+
K = WGS84_a_SQ * L * cos_lat
|
|
316
|
+
x = K * math.cos(lon)
|
|
317
|
+
y = K * math.sin(lon)
|
|
318
|
+
z = WGS84_b_SQ * L * sin_lat
|
|
319
|
+
return x, y, z
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def _interpolate_segment(start: Point, end: Point, t: float) -> Point:
|
|
323
|
+
try:
|
|
324
|
+
weight = (t - start.time) / (end.time - start.time)
|
|
325
|
+
except ZeroDivisionError:
|
|
326
|
+
weight = 0.0
|
|
327
|
+
|
|
328
|
+
lat = start.lat + (end.lat - start.lat) * weight
|
|
329
|
+
lon = start.lon + (end.lon - start.lon) * weight
|
|
330
|
+
angle = compute_bearing((start.lat, start.lon), (end.lat, end.lon))
|
|
331
|
+
alt: float | None
|
|
332
|
+
if start.alt is not None and end.alt is not None:
|
|
333
|
+
alt = start.alt + (end.alt - start.alt) * weight
|
|
334
|
+
else:
|
|
335
|
+
alt = None
|
|
336
|
+
|
|
337
|
+
return Point(time=t, lat=lat, lon=lon, alt=alt, angle=angle)
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
def _interpolate_at_segment_idx(points: T.Sequence[Point], t: float, idx: int) -> Point:
|
|
341
|
+
"""
|
|
342
|
+
Interpolate time t along the segment between idx - 1 and idx.
|
|
343
|
+
If idx is out of range, extrapolate it to the nearest segment (first or last).
|
|
344
|
+
"""
|
|
345
|
+
|
|
346
|
+
if len(points) == 1:
|
|
347
|
+
start, end = points[0], points[0]
|
|
348
|
+
elif 2 <= len(points):
|
|
349
|
+
if 0 < idx < len(points):
|
|
350
|
+
# Normal interpolation within the range
|
|
351
|
+
start, end = points[idx - 1], points[idx]
|
|
352
|
+
elif idx <= 0:
|
|
353
|
+
# Extrapolating before the first point
|
|
354
|
+
start, end = points[0], points[1]
|
|
355
|
+
else:
|
|
356
|
+
# Extrapolating after the last point
|
|
357
|
+
assert len(points) <= idx
|
|
358
|
+
start, end = points[-2], points[-1]
|
|
359
|
+
else:
|
|
360
|
+
assert False, "expect non-empty points"
|
|
361
|
+
|
|
362
|
+
return _interpolate_segment(start, end, t)
|
|
@@ -0,0 +1,298 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import typing as T
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from .. import exceptions, types, utils
|
|
9
|
+
from ..types import FileType
|
|
10
|
+
from . import (
|
|
11
|
+
geotag_from_generic,
|
|
12
|
+
geotag_images_from_exif,
|
|
13
|
+
geotag_images_from_exiftool,
|
|
14
|
+
geotag_images_from_exiftool_both_image_and_video,
|
|
15
|
+
geotag_images_from_gpx_file,
|
|
16
|
+
geotag_images_from_nmea_file,
|
|
17
|
+
geotag_images_from_video,
|
|
18
|
+
geotag_videos_from_exiftool_video,
|
|
19
|
+
geotag_videos_from_gpx,
|
|
20
|
+
geotag_videos_from_video,
|
|
21
|
+
)
|
|
22
|
+
from .options import InterpolationOption, SOURCE_TYPE_ALIAS, SourceOption, SourceType
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
LOG = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def parse_source_option(source: str) -> list[SourceOption]:
|
|
29
|
+
"""
|
|
30
|
+
Given a source string, parse it into a list of GeotagOptions objects.
|
|
31
|
+
|
|
32
|
+
Examples:
|
|
33
|
+
"native" -> [SourceOption(SourceType.NATIVE)]
|
|
34
|
+
"gpx,exif" -> [SourceOption(SourceType.GPX), SourceOption(SourceType.EXIF)]
|
|
35
|
+
"exif,gpx" -> [SourceOption(SourceType.EXIF), SourceOption(SourceType.GPX)]
|
|
36
|
+
'{"source": "gpx"}' -> [SourceOption(SourceType.GPX)]
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
try:
|
|
40
|
+
source_type = SourceType(SOURCE_TYPE_ALIAS.get(source, source))
|
|
41
|
+
except ValueError:
|
|
42
|
+
pass
|
|
43
|
+
else:
|
|
44
|
+
return [SourceOption(source_type)]
|
|
45
|
+
|
|
46
|
+
try:
|
|
47
|
+
payload = json.loads(source)
|
|
48
|
+
except json.JSONDecodeError:
|
|
49
|
+
pass
|
|
50
|
+
else:
|
|
51
|
+
return [SourceOption.from_dict(payload)]
|
|
52
|
+
|
|
53
|
+
sources = source.split(",")
|
|
54
|
+
|
|
55
|
+
return [SourceOption(SourceType(SOURCE_TYPE_ALIAS.get(s, s))) for s in sources]
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def process(
|
|
59
|
+
# Collection: ABC for sized iterable container classes
|
|
60
|
+
paths: T.Iterable[Path],
|
|
61
|
+
options: T.Collection[SourceOption],
|
|
62
|
+
) -> list[types.MetadataOrError]:
|
|
63
|
+
if not options:
|
|
64
|
+
raise ValueError("No geotag options provided")
|
|
65
|
+
|
|
66
|
+
final_metadatas: list[types.MetadataOrError] = []
|
|
67
|
+
|
|
68
|
+
# Paths (image path or video path) that will be sent to the next geotag process
|
|
69
|
+
reprocessable_paths = set(paths)
|
|
70
|
+
|
|
71
|
+
for idx, option in enumerate(options):
|
|
72
|
+
LOG.debug("Processing %d files with %s", len(reprocessable_paths), option)
|
|
73
|
+
|
|
74
|
+
image_metadata_or_errors = _geotag_images(reprocessable_paths, option)
|
|
75
|
+
video_metadata_or_errors = _geotag_videos(reprocessable_paths, option)
|
|
76
|
+
|
|
77
|
+
more_option = idx < len(options) - 1
|
|
78
|
+
|
|
79
|
+
for metadata in image_metadata_or_errors + video_metadata_or_errors:
|
|
80
|
+
if more_option and _is_reprocessable(metadata):
|
|
81
|
+
# Leave what it is for the next geotag process
|
|
82
|
+
pass
|
|
83
|
+
else:
|
|
84
|
+
final_metadatas.append(metadata)
|
|
85
|
+
reprocessable_paths.remove(metadata.filename)
|
|
86
|
+
|
|
87
|
+
# Quit if no more paths to process
|
|
88
|
+
if not reprocessable_paths:
|
|
89
|
+
break
|
|
90
|
+
|
|
91
|
+
return final_metadatas
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _is_reprocessable(metadata: types.MetadataOrError) -> bool:
|
|
95
|
+
if isinstance(metadata, types.ErrorMetadata):
|
|
96
|
+
if isinstance(
|
|
97
|
+
metadata.error,
|
|
98
|
+
(
|
|
99
|
+
exceptions.MapillaryGeoTaggingError,
|
|
100
|
+
exceptions.MapillaryVideoGPSNotFoundError,
|
|
101
|
+
),
|
|
102
|
+
):
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
return False
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def _filter_images_and_videos(
|
|
109
|
+
file_paths: T.Iterable[Path],
|
|
110
|
+
filetypes: set[types.FileType] | None = None,
|
|
111
|
+
) -> tuple[list[Path], list[Path]]:
|
|
112
|
+
image_paths = []
|
|
113
|
+
video_paths = []
|
|
114
|
+
|
|
115
|
+
ALL_VIDEO_TYPES = {types.FileType.VIDEO, *types.NATIVE_VIDEO_FILETYPES}
|
|
116
|
+
|
|
117
|
+
if filetypes is None:
|
|
118
|
+
include_images = True
|
|
119
|
+
include_videos = True
|
|
120
|
+
else:
|
|
121
|
+
include_images = types.FileType.IMAGE in filetypes
|
|
122
|
+
include_videos = bool(filetypes & ALL_VIDEO_TYPES)
|
|
123
|
+
|
|
124
|
+
for path in file_paths:
|
|
125
|
+
if utils.is_image_file(path):
|
|
126
|
+
if include_images:
|
|
127
|
+
image_paths.append(path)
|
|
128
|
+
|
|
129
|
+
elif utils.is_video_file(path):
|
|
130
|
+
if include_videos:
|
|
131
|
+
video_paths.append(path)
|
|
132
|
+
|
|
133
|
+
return image_paths, video_paths
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def _ensure_source_path(option: SourceOption) -> Path:
|
|
137
|
+
if option.source_path is None or option.source_path.source_path is None:
|
|
138
|
+
raise exceptions.MapillaryBadParameterError(
|
|
139
|
+
f"source_path must be provided for {option.source}"
|
|
140
|
+
)
|
|
141
|
+
return option.source_path.source_path
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _geotag_images(
|
|
145
|
+
paths: T.Iterable[Path], option: SourceOption
|
|
146
|
+
) -> list[types.ImageMetadataOrError]:
|
|
147
|
+
image_paths, _ = _filter_images_and_videos(paths, option.filetypes)
|
|
148
|
+
|
|
149
|
+
if not image_paths:
|
|
150
|
+
return []
|
|
151
|
+
|
|
152
|
+
if option.interpolation is None:
|
|
153
|
+
interpolation = InterpolationOption()
|
|
154
|
+
else:
|
|
155
|
+
interpolation = option.interpolation
|
|
156
|
+
|
|
157
|
+
geotag: geotag_from_generic.GeotagImagesFromGeneric
|
|
158
|
+
|
|
159
|
+
if option.source is SourceType.NATIVE:
|
|
160
|
+
geotag = geotag_images_from_exif.GeotagImagesFromEXIF(
|
|
161
|
+
image_paths, num_processes=option.num_processes
|
|
162
|
+
)
|
|
163
|
+
return geotag.to_description()
|
|
164
|
+
|
|
165
|
+
if option.source is SourceType.EXIFTOOL_RUNTIME:
|
|
166
|
+
geotag = geotag_images_from_exiftool.GeotagImagesFromExifToolRunner(
|
|
167
|
+
image_paths, num_processes=option.num_processes
|
|
168
|
+
)
|
|
169
|
+
try:
|
|
170
|
+
return geotag.to_description()
|
|
171
|
+
except exceptions.MapillaryExiftoolNotFoundError as ex:
|
|
172
|
+
LOG.warning('Skip "%s" because: %s', option.source.value, ex)
|
|
173
|
+
return []
|
|
174
|
+
|
|
175
|
+
elif option.source is SourceType.EXIFTOOL_XML:
|
|
176
|
+
# This is to ensure 'video_process --geotag={"source": "exiftool_xml", "source_path": "/tmp/xml_path"}'
|
|
177
|
+
# to work
|
|
178
|
+
geotag = geotag_images_from_exiftool_both_image_and_video.GeotagImagesFromExifToolBothImageAndVideo(
|
|
179
|
+
image_paths,
|
|
180
|
+
xml_path=_ensure_source_path(option),
|
|
181
|
+
num_processes=option.num_processes,
|
|
182
|
+
)
|
|
183
|
+
return geotag.to_description()
|
|
184
|
+
|
|
185
|
+
elif option.source is SourceType.GPX:
|
|
186
|
+
geotag = geotag_images_from_gpx_file.GeotagImagesFromGPXFile(
|
|
187
|
+
image_paths,
|
|
188
|
+
source_path=_ensure_source_path(option),
|
|
189
|
+
use_gpx_start_time=interpolation.use_gpx_start_time,
|
|
190
|
+
offset_time=interpolation.offset_time,
|
|
191
|
+
num_processes=option.num_processes,
|
|
192
|
+
)
|
|
193
|
+
return geotag.to_description()
|
|
194
|
+
|
|
195
|
+
elif option.source is SourceType.NMEA:
|
|
196
|
+
geotag = geotag_images_from_nmea_file.GeotagImagesFromNMEAFile(
|
|
197
|
+
image_paths,
|
|
198
|
+
source_path=_ensure_source_path(option),
|
|
199
|
+
use_gpx_start_time=interpolation.use_gpx_start_time,
|
|
200
|
+
offset_time=interpolation.offset_time,
|
|
201
|
+
num_processes=option.num_processes,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
return geotag.to_description()
|
|
205
|
+
|
|
206
|
+
elif option.source is SourceType.EXIF:
|
|
207
|
+
geotag = geotag_images_from_exif.GeotagImagesFromEXIF(
|
|
208
|
+
image_paths, num_processes=option.num_processes
|
|
209
|
+
)
|
|
210
|
+
return geotag.to_description()
|
|
211
|
+
|
|
212
|
+
elif option.source in [
|
|
213
|
+
SourceType.GOPRO,
|
|
214
|
+
SourceType.BLACKVUE,
|
|
215
|
+
SourceType.CAMM,
|
|
216
|
+
]:
|
|
217
|
+
map_geotag_source_to_filetype: dict[SourceType, FileType] = {
|
|
218
|
+
SourceType.GOPRO: FileType.GOPRO,
|
|
219
|
+
SourceType.BLACKVUE: FileType.BLACKVUE,
|
|
220
|
+
SourceType.CAMM: FileType.CAMM,
|
|
221
|
+
}
|
|
222
|
+
video_paths = utils.find_videos([_ensure_source_path(option)])
|
|
223
|
+
image_samples_by_video_path = utils.find_all_image_samples(
|
|
224
|
+
image_paths, video_paths
|
|
225
|
+
)
|
|
226
|
+
video_paths_with_image_samples = list(image_samples_by_video_path.keys())
|
|
227
|
+
video_metadatas = geotag_videos_from_video.GeotagVideosFromVideo(
|
|
228
|
+
video_paths_with_image_samples,
|
|
229
|
+
filetypes={map_geotag_source_to_filetype[option.source]},
|
|
230
|
+
num_processes=option.num_processes,
|
|
231
|
+
).to_description()
|
|
232
|
+
geotag = geotag_images_from_video.GeotagImagesFromVideo(
|
|
233
|
+
image_paths,
|
|
234
|
+
video_metadatas,
|
|
235
|
+
offset_time=interpolation.offset_time,
|
|
236
|
+
num_processes=option.num_processes,
|
|
237
|
+
)
|
|
238
|
+
return geotag.to_description()
|
|
239
|
+
|
|
240
|
+
else:
|
|
241
|
+
raise ValueError(f"Invalid geotag source {option.source}")
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _geotag_videos(
|
|
245
|
+
paths: T.Iterable[Path], option: SourceOption
|
|
246
|
+
) -> list[types.VideoMetadataOrError]:
|
|
247
|
+
_, video_paths = _filter_images_and_videos(paths, option.filetypes)
|
|
248
|
+
|
|
249
|
+
if not video_paths:
|
|
250
|
+
return []
|
|
251
|
+
|
|
252
|
+
geotag: geotag_from_generic.GeotagVideosFromGeneric
|
|
253
|
+
|
|
254
|
+
if option.source is SourceType.NATIVE:
|
|
255
|
+
geotag = geotag_videos_from_video.GeotagVideosFromVideo(
|
|
256
|
+
video_paths, num_processes=option.num_processes, filetypes=option.filetypes
|
|
257
|
+
)
|
|
258
|
+
return geotag.to_description()
|
|
259
|
+
|
|
260
|
+
if option.source is SourceType.EXIFTOOL_RUNTIME:
|
|
261
|
+
geotag = geotag_videos_from_exiftool_video.GeotagVideosFromExifToolRunner(
|
|
262
|
+
video_paths, num_processes=option.num_processes
|
|
263
|
+
)
|
|
264
|
+
try:
|
|
265
|
+
return geotag.to_description()
|
|
266
|
+
except exceptions.MapillaryExiftoolNotFoundError as ex:
|
|
267
|
+
LOG.warning('Skip "%s" because: %s', option.source.value, ex)
|
|
268
|
+
return []
|
|
269
|
+
|
|
270
|
+
elif option.source is SourceType.EXIFTOOL_XML:
|
|
271
|
+
geotag = geotag_videos_from_exiftool_video.GeotagVideosFromExifToolVideo(
|
|
272
|
+
video_paths,
|
|
273
|
+
xml_path=_ensure_source_path(option),
|
|
274
|
+
)
|
|
275
|
+
return geotag.to_description()
|
|
276
|
+
|
|
277
|
+
elif option.source is SourceType.GPX:
|
|
278
|
+
geotag = geotag_videos_from_gpx.GeotagVideosFromGPX(video_paths)
|
|
279
|
+
return geotag.to_description()
|
|
280
|
+
|
|
281
|
+
elif option.source is SourceType.NMEA:
|
|
282
|
+
# TODO: geotag videos from NMEA
|
|
283
|
+
return []
|
|
284
|
+
|
|
285
|
+
elif option.source is SourceType.EXIF:
|
|
286
|
+
# Legacy image-specific geotag types
|
|
287
|
+
return []
|
|
288
|
+
|
|
289
|
+
elif option.source in [
|
|
290
|
+
SourceType.GOPRO,
|
|
291
|
+
SourceType.BLACKVUE,
|
|
292
|
+
SourceType.CAMM,
|
|
293
|
+
]:
|
|
294
|
+
# Legacy image-specific geotag types
|
|
295
|
+
return []
|
|
296
|
+
|
|
297
|
+
else:
|
|
298
|
+
raise ValueError(f"Invalid geotag source {option.source}")
|