mapillary-tools 0.14.0a2__py3-none-any.whl → 0.14.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapillary_tools/__init__.py +1 -1
- mapillary_tools/api_v4.py +1 -0
- mapillary_tools/authenticate.py +9 -9
- mapillary_tools/blackvue_parser.py +79 -22
- mapillary_tools/config.py +38 -17
- mapillary_tools/constants.py +2 -0
- mapillary_tools/exiftool_read_video.py +52 -15
- mapillary_tools/exiftool_runner.py +4 -24
- mapillary_tools/ffmpeg.py +406 -232
- mapillary_tools/geotag/__init__.py +0 -0
- mapillary_tools/geotag/base.py +2 -2
- mapillary_tools/geotag/factory.py +97 -88
- mapillary_tools/geotag/geotag_images_from_exiftool.py +26 -19
- mapillary_tools/geotag/geotag_images_from_gpx.py +13 -6
- mapillary_tools/geotag/geotag_images_from_video.py +35 -0
- mapillary_tools/geotag/geotag_videos_from_exiftool.py +39 -13
- mapillary_tools/geotag/geotag_videos_from_gpx.py +22 -9
- mapillary_tools/geotag/options.py +25 -3
- mapillary_tools/geotag/video_extractors/base.py +1 -1
- mapillary_tools/geotag/video_extractors/exiftool.py +1 -1
- mapillary_tools/geotag/video_extractors/gpx.py +60 -70
- mapillary_tools/geotag/video_extractors/native.py +9 -31
- mapillary_tools/history.py +4 -1
- mapillary_tools/process_geotag_properties.py +16 -8
- mapillary_tools/process_sequence_properties.py +9 -11
- mapillary_tools/sample_video.py +7 -6
- mapillary_tools/serializer/description.py +587 -0
- mapillary_tools/serializer/gpx.py +132 -0
- mapillary_tools/types.py +44 -610
- mapillary_tools/upload.py +176 -197
- mapillary_tools/upload_api_v4.py +94 -51
- mapillary_tools/uploader.py +284 -138
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/METADATA +87 -31
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/RECORD +38 -35
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/WHEEL +1 -1
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/entry_points.txt +0 -0
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/licenses/LICENSE +0 -0
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/top_level.txt +0 -0
|
@@ -60,9 +60,13 @@ class SourceOption:
|
|
|
60
60
|
elif k == "filetypes":
|
|
61
61
|
kwargs[k] = {types.FileType(t) for t in v}
|
|
62
62
|
elif k == "source_path":
|
|
63
|
-
kwargs.setdefault(
|
|
63
|
+
kwargs.setdefault(
|
|
64
|
+
"source_path", SourcePathOption(source_path=Path(v))
|
|
65
|
+
).sourthe_path = Path(v)
|
|
64
66
|
elif k == "pattern":
|
|
65
|
-
kwargs.setdefault(
|
|
67
|
+
kwargs.setdefault(
|
|
68
|
+
"source_path", SourcePathOption(pattern=v)
|
|
69
|
+
).pattern = v
|
|
66
70
|
elif k == "interpolation_offset_time":
|
|
67
71
|
kwargs.setdefault(
|
|
68
72
|
"interpolation", InterpolationOption()
|
|
@@ -85,6 +89,24 @@ class SourcePathOption:
|
|
|
85
89
|
raise ValueError("Either pattern or source_path must be provided")
|
|
86
90
|
|
|
87
91
|
def resolve(self, path: Path) -> Path:
|
|
92
|
+
"""
|
|
93
|
+
Resolve the source path or pattern against the given path.
|
|
94
|
+
|
|
95
|
+
Examples:
|
|
96
|
+
>>> from pathlib import Path
|
|
97
|
+
>>> opt = SourcePathOption(source_path=Path("/foo/bar.mp4"))
|
|
98
|
+
>>> opt.resolve(Path("/baz/qux.mp4"))
|
|
99
|
+
PosixPath('/foo/bar.mp4')
|
|
100
|
+
|
|
101
|
+
>>> opt = SourcePathOption(pattern="videos/%g_sub%e")
|
|
102
|
+
>>> opt.resolve(Path("/data/video1.mp4"))
|
|
103
|
+
PosixPath('/data/videos/video1_sub.mp4')
|
|
104
|
+
|
|
105
|
+
>>> opt = SourcePathOption(pattern="/abs/path/%f")
|
|
106
|
+
>>> opt.resolve(Path("/tmp/abc.mov"))
|
|
107
|
+
PosixPath('/abs/path/abc.mov')
|
|
108
|
+
"""
|
|
109
|
+
|
|
88
110
|
if self.source_path is not None:
|
|
89
111
|
return self.source_path
|
|
90
112
|
|
|
@@ -140,7 +162,7 @@ SourceOptionSchema = {
|
|
|
140
162
|
"type": "integer",
|
|
141
163
|
},
|
|
142
164
|
"interpolation_offset_time": {
|
|
143
|
-
"type": "
|
|
165
|
+
"type": "number",
|
|
144
166
|
},
|
|
145
167
|
"interpolation_use_gpx_start_time": {
|
|
146
168
|
"type": "boolean",
|
|
@@ -21,7 +21,7 @@ class VideoExifToolExtractor(BaseVideoExtractor):
|
|
|
21
21
|
self.element = element
|
|
22
22
|
|
|
23
23
|
@override
|
|
24
|
-
def extract(self) -> types.
|
|
24
|
+
def extract(self) -> types.VideoMetadata:
|
|
25
25
|
exif = exiftool_read_video.ExifToolReadVideo(ET.ElementTree(self.element))
|
|
26
26
|
|
|
27
27
|
make = exif.extract_make()
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import dataclasses
|
|
4
|
-
import
|
|
4
|
+
import enum
|
|
5
5
|
import logging
|
|
6
6
|
import sys
|
|
7
7
|
import typing as T
|
|
@@ -12,7 +12,7 @@ if sys.version_info >= (3, 12):
|
|
|
12
12
|
else:
|
|
13
13
|
from typing_extensions import override
|
|
14
14
|
|
|
15
|
-
from ... import geo, telemetry, types
|
|
15
|
+
from ... import exceptions, geo, telemetry, types
|
|
16
16
|
from ..utils import parse_gpx
|
|
17
17
|
from .base import BaseVideoExtractor
|
|
18
18
|
from .native import NativeVideoExtractor
|
|
@@ -21,106 +21,96 @@ from .native import NativeVideoExtractor
|
|
|
21
21
|
LOG = logging.getLogger(__name__)
|
|
22
22
|
|
|
23
23
|
|
|
24
|
+
class SyncMode(enum.Enum):
|
|
25
|
+
# Sync by video GPS timestamps if found, otherwise rebase
|
|
26
|
+
SYNC = "sync"
|
|
27
|
+
# Sync by video GPS timestamps, and throw if not found
|
|
28
|
+
STRICT_SYNC = "strict_sync"
|
|
29
|
+
# Rebase all GPX timestamps to start from 0
|
|
30
|
+
REBASE = "rebase"
|
|
31
|
+
|
|
32
|
+
|
|
24
33
|
class GPXVideoExtractor(BaseVideoExtractor):
|
|
25
|
-
def __init__(
|
|
34
|
+
def __init__(
|
|
35
|
+
self, video_path: Path, gpx_path: Path, sync_mode: SyncMode = SyncMode.SYNC
|
|
36
|
+
):
|
|
26
37
|
self.video_path = video_path
|
|
27
38
|
self.gpx_path = gpx_path
|
|
39
|
+
self.sync_mode = sync_mode
|
|
28
40
|
|
|
29
41
|
@override
|
|
30
|
-
def extract(self) -> types.
|
|
31
|
-
|
|
32
|
-
gpx_tracks = parse_gpx(self.gpx_path)
|
|
33
|
-
except Exception as ex:
|
|
34
|
-
raise RuntimeError(
|
|
35
|
-
f"Error parsing GPX {self.gpx_path}: {ex.__class__.__name__}: {ex}"
|
|
36
|
-
)
|
|
42
|
+
def extract(self) -> types.VideoMetadata:
|
|
43
|
+
gpx_tracks = parse_gpx(self.gpx_path)
|
|
37
44
|
|
|
38
45
|
if 1 < len(gpx_tracks):
|
|
39
46
|
LOG.warning(
|
|
40
|
-
"Found
|
|
41
|
-
len(gpx_tracks),
|
|
42
|
-
self.gpx_path,
|
|
47
|
+
f"Found {len(gpx_tracks)} tracks in the GPX file {self.gpx_path}. Will merge points in all the tracks as a single track for interpolation"
|
|
43
48
|
)
|
|
44
49
|
|
|
45
50
|
gpx_points: T.Sequence[geo.Point] = sum(gpx_tracks, [])
|
|
46
51
|
|
|
47
52
|
native_extractor = NativeVideoExtractor(self.video_path)
|
|
48
53
|
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
54
|
+
try:
|
|
55
|
+
native_video_metadata = native_extractor.extract()
|
|
56
|
+
except exceptions.MapillaryVideoGPSNotFoundError as ex:
|
|
57
|
+
if self.sync_mode is SyncMode.STRICT_SYNC:
|
|
58
|
+
raise ex
|
|
52
59
|
self._rebase_times(gpx_points)
|
|
53
60
|
return types.VideoMetadata(
|
|
54
|
-
filename=
|
|
55
|
-
filetype=
|
|
61
|
+
filename=self.video_path,
|
|
62
|
+
filetype=types.FileType.VIDEO,
|
|
56
63
|
points=gpx_points,
|
|
57
64
|
)
|
|
58
65
|
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
gpx_points,
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
self._rebase_times(gpx_points, offset=offset)
|
|
66
|
+
if self.sync_mode is SyncMode.REBASE:
|
|
67
|
+
self._rebase_times(gpx_points)
|
|
68
|
+
else:
|
|
69
|
+
offset = self._gpx_offset(gpx_points, native_video_metadata.points)
|
|
70
|
+
self._rebase_times(gpx_points, offset=offset)
|
|
66
71
|
|
|
67
|
-
return dataclasses.replace(
|
|
72
|
+
return dataclasses.replace(native_video_metadata, points=gpx_points)
|
|
68
73
|
|
|
69
|
-
@
|
|
70
|
-
def _rebase_times(points: T.Sequence[geo.Point], offset: float = 0.0):
|
|
74
|
+
@classmethod
|
|
75
|
+
def _rebase_times(cls, points: T.Sequence[geo.Point], offset: float = 0.0) -> None:
|
|
71
76
|
"""
|
|
72
|
-
|
|
77
|
+
Rebase point times to start from **offset**
|
|
73
78
|
"""
|
|
74
79
|
if points:
|
|
75
80
|
first_timestamp = points[0].time
|
|
76
81
|
for p in points:
|
|
77
82
|
p.time = (p.time - first_timestamp) + offset
|
|
78
|
-
return points
|
|
79
83
|
|
|
80
|
-
|
|
81
|
-
|
|
84
|
+
@classmethod
|
|
85
|
+
def _gpx_offset(
|
|
86
|
+
cls, gpx_points: T.Sequence[geo.Point], video_gps_points: T.Sequence[geo.Point]
|
|
82
87
|
) -> float:
|
|
88
|
+
"""
|
|
89
|
+
Calculate the offset that needs to be applied to the GPX points to sync with the video GPS points.
|
|
90
|
+
|
|
91
|
+
>>> gpx_points = [geo.Point(time=5, lat=1, lon=1, alt=None, angle=None)]
|
|
92
|
+
>>> GPXVideoExtractor._gpx_offset(gpx_points, gpx_points)
|
|
93
|
+
0.0
|
|
94
|
+
>>> GPXVideoExtractor._gpx_offset(gpx_points, [])
|
|
95
|
+
0.0
|
|
96
|
+
>>> GPXVideoExtractor._gpx_offset([], gpx_points)
|
|
97
|
+
0.0
|
|
98
|
+
"""
|
|
83
99
|
offset: float = 0.0
|
|
84
100
|
|
|
85
|
-
if not gpx_points:
|
|
86
|
-
return offset
|
|
87
|
-
|
|
88
|
-
first_gpx_dt = datetime.datetime.fromtimestamp(
|
|
89
|
-
gpx_points[0].time, tz=datetime.timezone.utc
|
|
90
|
-
)
|
|
91
|
-
LOG.info("First GPX timestamp: %s", first_gpx_dt)
|
|
92
|
-
|
|
93
|
-
if not video_gps_points:
|
|
94
|
-
LOG.warning(
|
|
95
|
-
"Skip GPX synchronization because no GPS found in video %s",
|
|
96
|
-
self.video_path,
|
|
97
|
-
)
|
|
101
|
+
if not gpx_points or not video_gps_points:
|
|
98
102
|
return offset
|
|
99
103
|
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
first_gpx_dt,
|
|
112
|
-
first_gps_dt,
|
|
113
|
-
offset,
|
|
114
|
-
)
|
|
115
|
-
else:
|
|
116
|
-
LOG.info(
|
|
117
|
-
"GPX and GPS are perfectly synchronized (all starts from %s)",
|
|
118
|
-
first_gpx_dt,
|
|
119
|
-
)
|
|
120
|
-
else:
|
|
121
|
-
LOG.warning(
|
|
122
|
-
"Skip GPX synchronization because no GPS epoch time found in video %s",
|
|
123
|
-
self.video_path,
|
|
124
|
-
)
|
|
104
|
+
gps_epoch_time: float | None = None
|
|
105
|
+
gps_point = video_gps_points[0]
|
|
106
|
+
if isinstance(gps_point, telemetry.GPSPoint):
|
|
107
|
+
if gps_point.epoch_time is not None:
|
|
108
|
+
gps_epoch_time = gps_point.epoch_time
|
|
109
|
+
elif isinstance(gps_point, telemetry.CAMMGPSPoint):
|
|
110
|
+
if gps_point.time_gps_epoch is not None:
|
|
111
|
+
gps_epoch_time = gps_point.time_gps_epoch
|
|
112
|
+
|
|
113
|
+
if gps_epoch_time is not None:
|
|
114
|
+
offset = gpx_points[0].time - gps_epoch_time
|
|
125
115
|
|
|
126
116
|
return offset
|
|
@@ -17,7 +17,7 @@ from .base import BaseVideoExtractor
|
|
|
17
17
|
|
|
18
18
|
class GoProVideoExtractor(BaseVideoExtractor):
|
|
19
19
|
@override
|
|
20
|
-
def extract(self) -> types.
|
|
20
|
+
def extract(self) -> types.VideoMetadata:
|
|
21
21
|
with self.video_path.open("rb") as fp:
|
|
22
22
|
gopro_info = gpmf_parser.extract_gopro_info(fp)
|
|
23
23
|
|
|
@@ -29,23 +29,13 @@ class GoProVideoExtractor(BaseVideoExtractor):
|
|
|
29
29
|
gps_points = gopro_info.gps
|
|
30
30
|
assert gps_points is not None, "must have GPS data extracted"
|
|
31
31
|
if not gps_points:
|
|
32
|
-
|
|
33
|
-
ex: exceptions.MapillaryDescriptionError = (
|
|
34
|
-
exceptions.MapillaryGPXEmptyError("Empty GPS data found")
|
|
35
|
-
)
|
|
36
|
-
return types.describe_error_metadata(
|
|
37
|
-
ex, self.video_path, filetype=types.FileType.GOPRO
|
|
38
|
-
)
|
|
32
|
+
raise exceptions.MapillaryGPXEmptyError("Empty GPS data found")
|
|
39
33
|
|
|
40
34
|
gps_points = T.cast(
|
|
41
35
|
T.List[telemetry.GPSPoint], gpmf_gps_filter.remove_noisy_points(gps_points)
|
|
42
36
|
)
|
|
43
37
|
if not gps_points:
|
|
44
|
-
|
|
45
|
-
ex = exceptions.MapillaryGPSNoiseError("GPS is too noisy")
|
|
46
|
-
return types.describe_error_metadata(
|
|
47
|
-
ex, self.video_path, filetype=types.FileType.GOPRO
|
|
48
|
-
)
|
|
38
|
+
raise exceptions.MapillaryGPSNoiseError("GPS is too noisy")
|
|
49
39
|
|
|
50
40
|
video_metadata = types.VideoMetadata(
|
|
51
41
|
filename=self.video_path,
|
|
@@ -61,7 +51,7 @@ class GoProVideoExtractor(BaseVideoExtractor):
|
|
|
61
51
|
|
|
62
52
|
class CAMMVideoExtractor(BaseVideoExtractor):
|
|
63
53
|
@override
|
|
64
|
-
def extract(self) -> types.
|
|
54
|
+
def extract(self) -> types.VideoMetadata:
|
|
65
55
|
with self.video_path.open("rb") as fp:
|
|
66
56
|
camm_info = camm_parser.extract_camm_info(fp)
|
|
67
57
|
|
|
@@ -71,13 +61,7 @@ class CAMMVideoExtractor(BaseVideoExtractor):
|
|
|
71
61
|
)
|
|
72
62
|
|
|
73
63
|
if not camm_info.gps and not camm_info.mini_gps:
|
|
74
|
-
|
|
75
|
-
ex: exceptions.MapillaryDescriptionError = (
|
|
76
|
-
exceptions.MapillaryGPXEmptyError("Empty GPS data found")
|
|
77
|
-
)
|
|
78
|
-
return types.describe_error_metadata(
|
|
79
|
-
ex, self.video_path, filetype=types.FileType.CAMM
|
|
80
|
-
)
|
|
64
|
+
raise exceptions.MapillaryGPXEmptyError("Empty GPS data found")
|
|
81
65
|
|
|
82
66
|
return types.VideoMetadata(
|
|
83
67
|
filename=self.video_path,
|
|
@@ -91,7 +75,7 @@ class CAMMVideoExtractor(BaseVideoExtractor):
|
|
|
91
75
|
|
|
92
76
|
class BlackVueVideoExtractor(BaseVideoExtractor):
|
|
93
77
|
@override
|
|
94
|
-
def extract(self) -> types.
|
|
78
|
+
def extract(self) -> types.VideoMetadata:
|
|
95
79
|
with self.video_path.open("rb") as fp:
|
|
96
80
|
blackvue_info = blackvue_parser.extract_blackvue_info(fp)
|
|
97
81
|
|
|
@@ -101,19 +85,13 @@ class BlackVueVideoExtractor(BaseVideoExtractor):
|
|
|
101
85
|
)
|
|
102
86
|
|
|
103
87
|
if not blackvue_info.gps:
|
|
104
|
-
|
|
105
|
-
ex: exceptions.MapillaryDescriptionError = (
|
|
106
|
-
exceptions.MapillaryGPXEmptyError("Empty GPS data found")
|
|
107
|
-
)
|
|
108
|
-
return types.describe_error_metadata(
|
|
109
|
-
ex, self.video_path, filetype=types.FileType.BLACKVUE
|
|
110
|
-
)
|
|
88
|
+
raise exceptions.MapillaryGPXEmptyError("Empty GPS data found")
|
|
111
89
|
|
|
112
90
|
video_metadata = types.VideoMetadata(
|
|
113
91
|
filename=self.video_path,
|
|
114
92
|
filesize=utils.get_file_size(self.video_path),
|
|
115
93
|
filetype=types.FileType.BLACKVUE,
|
|
116
|
-
points=blackvue_info.gps
|
|
94
|
+
points=blackvue_info.gps,
|
|
117
95
|
make=blackvue_info.make,
|
|
118
96
|
model=blackvue_info.model,
|
|
119
97
|
)
|
|
@@ -127,7 +105,7 @@ class NativeVideoExtractor(BaseVideoExtractor):
|
|
|
127
105
|
self.filetypes = filetypes
|
|
128
106
|
|
|
129
107
|
@override
|
|
130
|
-
def extract(self) -> types.
|
|
108
|
+
def extract(self) -> types.VideoMetadata:
|
|
131
109
|
ft = self.filetypes
|
|
132
110
|
extractor: BaseVideoExtractor
|
|
133
111
|
|
mapillary_tools/history.py
CHANGED
|
@@ -7,6 +7,7 @@ import typing as T
|
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
|
|
9
9
|
from . import constants, types
|
|
10
|
+
from .serializer.description import DescriptionJSONSerializer
|
|
10
11
|
|
|
11
12
|
JSONDict = T.Dict[str, T.Union[str, int, float, None]]
|
|
12
13
|
|
|
@@ -57,6 +58,8 @@ def write_history(
|
|
|
57
58
|
"summary": summary,
|
|
58
59
|
}
|
|
59
60
|
if metadatas is not None:
|
|
60
|
-
history["descs"] = [
|
|
61
|
+
history["descs"] = [
|
|
62
|
+
DescriptionJSONSerializer.as_desc(metadata) for metadata in metadatas
|
|
63
|
+
]
|
|
61
64
|
with open(path, "w") as fp:
|
|
62
65
|
fp.write(json.dumps(history))
|
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import collections
|
|
4
4
|
import datetime
|
|
5
|
-
import json
|
|
6
5
|
import logging
|
|
7
6
|
import typing as T
|
|
8
7
|
from pathlib import Path
|
|
@@ -17,6 +16,11 @@ from .geotag.options import (
|
|
|
17
16
|
SourcePathOption,
|
|
18
17
|
SourceType,
|
|
19
18
|
)
|
|
19
|
+
from .serializer.description import (
|
|
20
|
+
DescriptionJSONSerializer,
|
|
21
|
+
validate_and_fail_metadata,
|
|
22
|
+
)
|
|
23
|
+
from .serializer.gpx import GPXSerializer
|
|
20
24
|
|
|
21
25
|
LOG = logging.getLogger(__name__)
|
|
22
26
|
DEFAULT_GEOTAG_SOURCE_OPTIONS = [
|
|
@@ -200,12 +204,16 @@ def _write_metadatas(
|
|
|
200
204
|
desc_path: str,
|
|
201
205
|
) -> None:
|
|
202
206
|
if desc_path == "-":
|
|
203
|
-
descs =
|
|
204
|
-
print(
|
|
207
|
+
descs = DescriptionJSONSerializer.serialize(metadatas)
|
|
208
|
+
print(descs.decode("utf-8"))
|
|
205
209
|
else:
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
210
|
+
normalized_suffix = Path(desc_path).suffix.strip().lower()
|
|
211
|
+
if normalized_suffix in [".gpx"]:
|
|
212
|
+
descs = GPXSerializer.serialize(metadatas)
|
|
213
|
+
else:
|
|
214
|
+
descs = DescriptionJSONSerializer.serialize(metadatas)
|
|
215
|
+
with open(desc_path, "wb") as fp:
|
|
216
|
+
fp.write(descs)
|
|
209
217
|
LOG.info("Check the description file for details: %s", desc_path)
|
|
210
218
|
|
|
211
219
|
|
|
@@ -293,7 +301,7 @@ def _validate_metadatas(
|
|
|
293
301
|
# See https://stackoverflow.com/a/61432070
|
|
294
302
|
good_metadatas, error_metadatas = types.separate_errors(metadatas)
|
|
295
303
|
map_results = utils.mp_map_maybe(
|
|
296
|
-
|
|
304
|
+
validate_and_fail_metadata,
|
|
297
305
|
T.cast(T.Iterable[types.Metadata], good_metadatas),
|
|
298
306
|
num_processes=num_processes,
|
|
299
307
|
)
|
|
@@ -308,7 +316,7 @@ def _validate_metadatas(
|
|
|
308
316
|
)
|
|
309
317
|
)
|
|
310
318
|
|
|
311
|
-
return validated_metadatas + error_metadatas
|
|
319
|
+
return T.cast(list[types.MetadataOrError], validated_metadatas + error_metadatas)
|
|
312
320
|
|
|
313
321
|
|
|
314
322
|
def process_finalize(
|
|
@@ -7,6 +7,7 @@ import os
|
|
|
7
7
|
import typing as T
|
|
8
8
|
|
|
9
9
|
from . import constants, exceptions, geo, types, utils
|
|
10
|
+
from .serializer.description import DescriptionJSONSerializer
|
|
10
11
|
|
|
11
12
|
LOG = logging.getLogger(__name__)
|
|
12
13
|
|
|
@@ -24,24 +25,21 @@ def split_sequence_by(
|
|
|
24
25
|
"""
|
|
25
26
|
output_sequences: list[list[SeqItem]] = []
|
|
26
27
|
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
prev = next(seq, None)
|
|
30
|
-
if prev is None:
|
|
31
|
-
return output_sequences
|
|
32
|
-
|
|
33
|
-
output_sequences.append([prev])
|
|
28
|
+
if sequence:
|
|
29
|
+
output_sequences.append([sequence[0]])
|
|
34
30
|
|
|
35
|
-
for cur in
|
|
31
|
+
for prev, cur in geo.pairwise(sequence):
|
|
36
32
|
# invariant: prev is processed
|
|
37
33
|
if should_split(prev, cur):
|
|
38
34
|
output_sequences.append([cur])
|
|
39
35
|
else:
|
|
40
36
|
output_sequences[-1].append(cur)
|
|
41
|
-
prev = cur
|
|
42
37
|
# invariant: cur is processed
|
|
43
38
|
|
|
44
|
-
assert sum(len(s) for s in output_sequences) == len(sequence)
|
|
39
|
+
assert sum(len(s) for s in output_sequences) == len(sequence), (
|
|
40
|
+
output_sequences,
|
|
41
|
+
sequence,
|
|
42
|
+
)
|
|
45
43
|
|
|
46
44
|
return output_sequences
|
|
47
45
|
|
|
@@ -109,7 +107,7 @@ def duplication_check(
|
|
|
109
107
|
dup = types.describe_error_metadata(
|
|
110
108
|
exceptions.MapillaryDuplicationError(
|
|
111
109
|
msg,
|
|
112
|
-
|
|
110
|
+
DescriptionJSONSerializer.as_desc(cur),
|
|
113
111
|
distance=distance,
|
|
114
112
|
angle_diff=angle_diff,
|
|
115
113
|
),
|
mapillary_tools/sample_video.py
CHANGED
|
@@ -13,6 +13,7 @@ from . import constants, exceptions, ffmpeg as ffmpeglib, geo, types, utils
|
|
|
13
13
|
from .exif_write import ExifEdit
|
|
14
14
|
from .geotag import geotag_videos_from_video
|
|
15
15
|
from .mp4 import mp4_sample_parser
|
|
16
|
+
from .serializer.description import parse_capture_time
|
|
16
17
|
|
|
17
18
|
LOG = logging.getLogger(__name__)
|
|
18
19
|
|
|
@@ -65,7 +66,7 @@ def sample_video(
|
|
|
65
66
|
video_start_time_dt: datetime.datetime | None = None
|
|
66
67
|
if video_start_time is not None:
|
|
67
68
|
try:
|
|
68
|
-
video_start_time_dt =
|
|
69
|
+
video_start_time_dt = parse_capture_time(video_start_time)
|
|
69
70
|
except ValueError as ex:
|
|
70
71
|
raise exceptions.MapillaryBadParameterError(str(ex))
|
|
71
72
|
|
|
@@ -193,8 +194,8 @@ def _sample_single_video_by_interval(
|
|
|
193
194
|
)
|
|
194
195
|
|
|
195
196
|
with wip_dir_context(wip_sample_dir(sample_dir), sample_dir) as wip_dir:
|
|
196
|
-
ffmpeg.
|
|
197
|
-
frame_samples = ffmpeglib.sort_selected_samples(wip_dir, video_path
|
|
197
|
+
ffmpeg.extract_frames_by_interval(video_path, wip_dir, sample_interval)
|
|
198
|
+
frame_samples = ffmpeglib.FFMPEG.sort_selected_samples(wip_dir, video_path)
|
|
198
199
|
for frame_idx_1based, sample_paths in frame_samples:
|
|
199
200
|
assert len(sample_paths) == 1
|
|
200
201
|
if sample_paths[0] is None:
|
|
@@ -321,11 +322,11 @@ def _sample_single_video_by_distance(
|
|
|
321
322
|
video_path,
|
|
322
323
|
wip_dir,
|
|
323
324
|
frame_indices=set(sorted_sample_indices),
|
|
324
|
-
|
|
325
|
+
stream_specifier=str(video_stream_idx),
|
|
325
326
|
)
|
|
326
327
|
|
|
327
|
-
frame_samples = ffmpeglib.sort_selected_samples(
|
|
328
|
-
wip_dir, video_path, [video_stream_idx]
|
|
328
|
+
frame_samples = ffmpeglib.FFMPEG.sort_selected_samples(
|
|
329
|
+
wip_dir, video_path, selected_stream_specifiers=[str(video_stream_idx)]
|
|
329
330
|
)
|
|
330
331
|
if len(frame_samples) != len(sorted_sample_indices):
|
|
331
332
|
raise exceptions.MapillaryVideoError(
|