mapillary-tools 0.14.0a2__py3-none-any.whl → 0.14.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapillary_tools/__init__.py +1 -1
- mapillary_tools/api_v4.py +1 -0
- mapillary_tools/authenticate.py +9 -9
- mapillary_tools/blackvue_parser.py +79 -22
- mapillary_tools/config.py +38 -17
- mapillary_tools/constants.py +2 -0
- mapillary_tools/exiftool_read_video.py +52 -15
- mapillary_tools/exiftool_runner.py +4 -24
- mapillary_tools/ffmpeg.py +406 -232
- mapillary_tools/geotag/__init__.py +0 -0
- mapillary_tools/geotag/base.py +2 -2
- mapillary_tools/geotag/factory.py +97 -88
- mapillary_tools/geotag/geotag_images_from_exiftool.py +26 -19
- mapillary_tools/geotag/geotag_images_from_gpx.py +13 -6
- mapillary_tools/geotag/geotag_images_from_video.py +35 -0
- mapillary_tools/geotag/geotag_videos_from_exiftool.py +39 -13
- mapillary_tools/geotag/geotag_videos_from_gpx.py +22 -9
- mapillary_tools/geotag/options.py +25 -3
- mapillary_tools/geotag/video_extractors/base.py +1 -1
- mapillary_tools/geotag/video_extractors/exiftool.py +1 -1
- mapillary_tools/geotag/video_extractors/gpx.py +60 -70
- mapillary_tools/geotag/video_extractors/native.py +9 -31
- mapillary_tools/history.py +4 -1
- mapillary_tools/process_geotag_properties.py +16 -8
- mapillary_tools/process_sequence_properties.py +9 -11
- mapillary_tools/sample_video.py +7 -6
- mapillary_tools/serializer/description.py +587 -0
- mapillary_tools/serializer/gpx.py +132 -0
- mapillary_tools/types.py +44 -610
- mapillary_tools/upload.py +176 -197
- mapillary_tools/upload_api_v4.py +94 -51
- mapillary_tools/uploader.py +284 -138
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/METADATA +87 -31
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/RECORD +38 -35
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/WHEEL +1 -1
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/entry_points.txt +0 -0
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/licenses/LICENSE +0 -0
- {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/top_level.txt +0 -0
mapillary_tools/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "0.14.
|
|
1
|
+
VERSION = "0.14.0b1"
|
mapillary_tools/api_v4.py
CHANGED
mapillary_tools/authenticate.py
CHANGED
|
@@ -11,7 +11,7 @@ import jsonschema
|
|
|
11
11
|
|
|
12
12
|
import requests
|
|
13
13
|
|
|
14
|
-
from . import api_v4, config, constants, exceptions
|
|
14
|
+
from . import api_v4, config, constants, exceptions
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
LOG = logging.getLogger(__name__)
|
|
@@ -64,7 +64,7 @@ def authenticate(
|
|
|
64
64
|
LOG.info('Creating new profile: "%s"', profile_name)
|
|
65
65
|
|
|
66
66
|
if jwt:
|
|
67
|
-
user_items:
|
|
67
|
+
user_items: config.UserItem = {"user_upload_token": jwt}
|
|
68
68
|
user_items = _verify_user_auth(_validate_profile(user_items))
|
|
69
69
|
else:
|
|
70
70
|
user_items = _prompt_login(
|
|
@@ -89,7 +89,7 @@ def authenticate(
|
|
|
89
89
|
def fetch_user_items(
|
|
90
90
|
user_name: str | None = None,
|
|
91
91
|
organization_key: str | None = None,
|
|
92
|
-
) ->
|
|
92
|
+
) -> config.UserItem:
|
|
93
93
|
"""
|
|
94
94
|
Read user information from the config file,
|
|
95
95
|
or prompt the user to authenticate if the specified profile does not exist
|
|
@@ -155,9 +155,9 @@ def _prompt(message: str) -> str:
|
|
|
155
155
|
return input()
|
|
156
156
|
|
|
157
157
|
|
|
158
|
-
def _validate_profile(user_items:
|
|
158
|
+
def _validate_profile(user_items: config.UserItem) -> config.UserItem:
|
|
159
159
|
try:
|
|
160
|
-
jsonschema.validate(user_items,
|
|
160
|
+
jsonschema.validate(user_items, config.UserItemSchema)
|
|
161
161
|
except jsonschema.ValidationError as ex:
|
|
162
162
|
raise exceptions.MapillaryBadParameterError(
|
|
163
163
|
f"Invalid profile format: {ex.message}"
|
|
@@ -165,7 +165,7 @@ def _validate_profile(user_items: types.UserItem) -> types.UserItem:
|
|
|
165
165
|
return user_items
|
|
166
166
|
|
|
167
167
|
|
|
168
|
-
def _verify_user_auth(user_items:
|
|
168
|
+
def _verify_user_auth(user_items: config.UserItem) -> config.UserItem:
|
|
169
169
|
"""
|
|
170
170
|
Verify that the user access token is valid
|
|
171
171
|
"""
|
|
@@ -205,7 +205,7 @@ def _validate_profile_name(profile_name: str):
|
|
|
205
205
|
)
|
|
206
206
|
|
|
207
207
|
|
|
208
|
-
def _list_all_profiles(profiles: dict[str,
|
|
208
|
+
def _list_all_profiles(profiles: dict[str, config.UserItem]) -> None:
|
|
209
209
|
_echo("Existing Mapillary profiles:")
|
|
210
210
|
|
|
211
211
|
# Header
|
|
@@ -256,7 +256,7 @@ def _is_login_retryable(ex: requests.HTTPError) -> bool:
|
|
|
256
256
|
def _prompt_login(
|
|
257
257
|
user_email: str | None = None,
|
|
258
258
|
user_password: str | None = None,
|
|
259
|
-
) ->
|
|
259
|
+
) -> config.UserItem:
|
|
260
260
|
_enabled = _prompt_enabled()
|
|
261
261
|
|
|
262
262
|
if user_email is None:
|
|
@@ -288,7 +288,7 @@ def _prompt_login(
|
|
|
288
288
|
|
|
289
289
|
data = resp.json()
|
|
290
290
|
|
|
291
|
-
user_items:
|
|
291
|
+
user_items: config.UserItem = {
|
|
292
292
|
"user_upload_token": str(data["access_token"]),
|
|
293
293
|
"MAPSettingsUserKey": str(data["user_id"]),
|
|
294
294
|
}
|
|
@@ -14,15 +14,14 @@ from .mp4 import simple_mp4_parser as sparser
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
LOG = logging.getLogger(__name__)
|
|
17
|
-
# An example: [1623057074211]$GPVTG,,T,,M,0.078,N,0.144,K,D*28[1623057075215]
|
|
18
17
|
NMEA_LINE_REGEX = re.compile(
|
|
19
18
|
rb"""
|
|
20
19
|
^\s*
|
|
21
|
-
\[(\d+)\] #
|
|
20
|
+
\[(\d+)\] # Timestamp
|
|
22
21
|
\s*
|
|
23
|
-
(\$\w{5}.*) #
|
|
22
|
+
(\$\w{5}.*) # NMEA message
|
|
24
23
|
\s*
|
|
25
|
-
(\[\d+\])? #
|
|
24
|
+
(\[\d+\])? # Strange timestamp
|
|
26
25
|
\s*$
|
|
27
26
|
""",
|
|
28
27
|
re.X,
|
|
@@ -47,7 +46,7 @@ def extract_blackvue_info(fp: T.BinaryIO) -> BlackVueInfo | None:
|
|
|
47
46
|
if gps_data is None:
|
|
48
47
|
return None
|
|
49
48
|
|
|
50
|
-
points =
|
|
49
|
+
points = _parse_gps_box(gps_data)
|
|
51
50
|
points.sort(key=lambda p: p.time)
|
|
52
51
|
|
|
53
52
|
if points:
|
|
@@ -83,8 +82,12 @@ def extract_camera_model(fp: T.BinaryIO) -> str:
|
|
|
83
82
|
|
|
84
83
|
|
|
85
84
|
def _extract_camera_model_from_cprt(cprt_bytes: bytes) -> str:
|
|
86
|
-
|
|
87
|
-
|
|
85
|
+
"""
|
|
86
|
+
>>> _extract_camera_model_from_cprt(b' {"model":"DR900X Plus","ver":0.918,"lang":"English","direct":1,"psn":"","temp":34,"GPS":1}')
|
|
87
|
+
'DR900X Plus'
|
|
88
|
+
>>> _extract_camera_model_from_cprt(b' Pittasoft Co., Ltd.;DR900S-1CH;1.008;English;1;D90SS1HAE00661;T69;')
|
|
89
|
+
'DR900S-1CH'
|
|
90
|
+
"""
|
|
88
91
|
cprt_bytes = cprt_bytes.strip().strip(b"\x00")
|
|
89
92
|
|
|
90
93
|
try:
|
|
@@ -111,28 +114,82 @@ def _extract_camera_model_from_cprt(cprt_bytes: bytes) -> str:
|
|
|
111
114
|
return ""
|
|
112
115
|
|
|
113
116
|
|
|
114
|
-
def _parse_gps_box(gps_data: bytes) ->
|
|
117
|
+
def _parse_gps_box(gps_data: bytes) -> list[geo.Point]:
|
|
118
|
+
"""
|
|
119
|
+
>>> list(_parse_gps_box(b"[1623057074211]$GPGGA,202530.00,5109.0262,N,11401.8407,W,5,40,0.5,1097.36,M,-17.00,M,18,TSTR*61"))
|
|
120
|
+
[Point(time=1623057074211, lat=51.150436666666664, lon=-114.03067833333333, alt=1097.36, angle=None)]
|
|
121
|
+
|
|
122
|
+
>>> list(_parse_gps_box(b"[1629874404069]$GNGGA,175322.00,3244.53126,N,11710.97811,W,1,12,0.84,17.4,M,-34.0,M,,*45"))
|
|
123
|
+
[Point(time=1629874404069, lat=32.742187666666666, lon=-117.1829685, alt=17.4, angle=None)]
|
|
124
|
+
|
|
125
|
+
>>> list(_parse_gps_box(b"[1629874404069]$GNGLL,4404.14012,N,12118.85993,W,001037.00,A,A*67"))
|
|
126
|
+
[Point(time=1629874404069, lat=44.069002, lon=-121.31433216666667, alt=None, angle=None)]
|
|
127
|
+
|
|
128
|
+
>>> list(_parse_gps_box(b"[1629874404069]$GNRMC,001031.00,A,4404.13993,N,12118.86023,W,0.146,,100117,,,A*7B"))
|
|
129
|
+
[Point(time=1629874404069, lat=44.06899883333333, lon=-121.31433716666666, alt=None, angle=None)]
|
|
130
|
+
|
|
131
|
+
>>> list(_parse_gps_box(b"[1623057074211]$GPVTG,,T,,M,0.078,N,0.144,K,D*28[1623057075215]"))
|
|
132
|
+
[]
|
|
133
|
+
"""
|
|
134
|
+
points_by_sentence_type: dict[str, list[geo.Point]] = {}
|
|
135
|
+
|
|
115
136
|
for line_bytes in gps_data.splitlines():
|
|
116
137
|
match = NMEA_LINE_REGEX.match(line_bytes)
|
|
117
138
|
if match is None:
|
|
118
139
|
continue
|
|
119
140
|
nmea_line_bytes = match.group(2)
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
141
|
+
|
|
142
|
+
if not nmea_line_bytes:
|
|
143
|
+
continue
|
|
144
|
+
|
|
145
|
+
try:
|
|
146
|
+
nmea_line = nmea_line_bytes.decode("utf8")
|
|
147
|
+
except UnicodeDecodeError:
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
if not nmea_line:
|
|
151
|
+
continue
|
|
152
|
+
|
|
153
|
+
try:
|
|
154
|
+
message = pynmea2.parse(nmea_line)
|
|
155
|
+
except pynmea2.nmea.ParseError:
|
|
156
|
+
continue
|
|
157
|
+
|
|
158
|
+
epoch_ms = int(match.group(1))
|
|
159
|
+
|
|
160
|
+
# https://tavotech.com/gps-nmea-sentence-structure/
|
|
161
|
+
if message.sentence_type in ["GGA"]:
|
|
162
|
+
if not message.is_valid:
|
|
128
163
|
continue
|
|
129
|
-
|
|
164
|
+
point = geo.Point(
|
|
165
|
+
time=epoch_ms,
|
|
166
|
+
lat=message.latitude,
|
|
167
|
+
lon=message.longitude,
|
|
168
|
+
alt=message.altitude,
|
|
169
|
+
angle=None,
|
|
170
|
+
)
|
|
171
|
+
points_by_sentence_type.setdefault(message.sentence_type, []).append(point)
|
|
172
|
+
|
|
173
|
+
elif message.sentence_type in ["RMC", "GLL"]:
|
|
174
|
+
if not message.is_valid:
|
|
130
175
|
continue
|
|
131
|
-
|
|
132
|
-
yield geo.Point(
|
|
176
|
+
point = geo.Point(
|
|
133
177
|
time=epoch_ms,
|
|
134
|
-
lat=
|
|
135
|
-
lon=
|
|
136
|
-
alt=
|
|
178
|
+
lat=message.latitude,
|
|
179
|
+
lon=message.longitude,
|
|
180
|
+
alt=None,
|
|
137
181
|
angle=None,
|
|
138
182
|
)
|
|
183
|
+
points_by_sentence_type.setdefault(message.sentence_type, []).append(point)
|
|
184
|
+
|
|
185
|
+
# This is the extraction order in exiftool
|
|
186
|
+
if "RMC" in points_by_sentence_type:
|
|
187
|
+
return points_by_sentence_type["RMC"]
|
|
188
|
+
|
|
189
|
+
if "GGA" in points_by_sentence_type:
|
|
190
|
+
return points_by_sentence_type["GGA"]
|
|
191
|
+
|
|
192
|
+
if "GLL" in points_by_sentence_type:
|
|
193
|
+
return points_by_sentence_type["GLL"]
|
|
194
|
+
|
|
195
|
+
return []
|
mapillary_tools/config.py
CHANGED
|
@@ -2,31 +2,54 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import configparser
|
|
4
4
|
import os
|
|
5
|
+
import sys
|
|
5
6
|
import typing as T
|
|
7
|
+
from typing import TypedDict
|
|
6
8
|
|
|
7
|
-
|
|
9
|
+
if sys.version_info >= (3, 11):
|
|
10
|
+
from typing import Required
|
|
11
|
+
else:
|
|
12
|
+
from typing_extensions import Required
|
|
8
13
|
|
|
14
|
+
from . import api_v4
|
|
9
15
|
|
|
10
|
-
_CLIENT_ID = api_v4.MAPILLARY_CLIENT_TOKEN
|
|
11
|
-
# Windows is not happy with | so we convert MLY|ID|TOKEN to MLY_ID_TOKEN
|
|
12
|
-
_CLIENT_ID = _CLIENT_ID.replace("|", "_", 2)
|
|
13
|
-
|
|
14
|
-
DEFAULT_MAPILLARY_FOLDER = os.path.join(
|
|
15
|
-
os.path.expanduser("~"),
|
|
16
|
-
".config",
|
|
17
|
-
"mapillary",
|
|
18
|
-
)
|
|
19
16
|
|
|
17
|
+
DEFAULT_MAPILLARY_FOLDER = os.path.join(os.path.expanduser("~"), ".config", "mapillary")
|
|
20
18
|
MAPILLARY_CONFIG_PATH = os.getenv(
|
|
21
19
|
"MAPILLARY_CONFIG_PATH",
|
|
22
20
|
os.path.join(
|
|
23
21
|
DEFAULT_MAPILLARY_FOLDER,
|
|
24
22
|
"configs",
|
|
25
|
-
|
|
23
|
+
# Windows is not happy with | so we convert MLY|ID|TOKEN to MLY_ID_TOKEN
|
|
24
|
+
api_v4.MAPILLARY_CLIENT_TOKEN.replace("|", "_"),
|
|
26
25
|
),
|
|
27
26
|
)
|
|
28
27
|
|
|
29
28
|
|
|
29
|
+
class UserItem(TypedDict, total=False):
|
|
30
|
+
MAPOrganizationKey: int | str
|
|
31
|
+
# Username
|
|
32
|
+
MAPSettingsUsername: str
|
|
33
|
+
# User ID
|
|
34
|
+
MAPSettingsUserKey: str
|
|
35
|
+
# User access token
|
|
36
|
+
user_upload_token: Required[str]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
UserItemSchema = {
|
|
40
|
+
"type": "object",
|
|
41
|
+
"properties": {
|
|
42
|
+
"MAPOrganizationKey": {"type": ["integer", "string"]},
|
|
43
|
+
# Not in use. Keep here for back-compatibility
|
|
44
|
+
"MAPSettingsUsername": {"type": "string"},
|
|
45
|
+
"MAPSettingsUserKey": {"type": "string"},
|
|
46
|
+
"user_upload_token": {"type": "string"},
|
|
47
|
+
},
|
|
48
|
+
"required": ["user_upload_token"],
|
|
49
|
+
"additionalProperties": True,
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
|
|
30
53
|
def _load_config(config_path: str) -> configparser.ConfigParser:
|
|
31
54
|
config = configparser.ConfigParser()
|
|
32
55
|
# Override to not change option names (by default it will lower them)
|
|
@@ -36,19 +59,17 @@ def _load_config(config_path: str) -> configparser.ConfigParser:
|
|
|
36
59
|
return config
|
|
37
60
|
|
|
38
61
|
|
|
39
|
-
def load_user(
|
|
40
|
-
profile_name: str, config_path: str | None = None
|
|
41
|
-
) -> types.UserItem | None:
|
|
62
|
+
def load_user(profile_name: str, config_path: str | None = None) -> UserItem | None:
|
|
42
63
|
if config_path is None:
|
|
43
64
|
config_path = MAPILLARY_CONFIG_PATH
|
|
44
65
|
config = _load_config(config_path)
|
|
45
66
|
if not config.has_section(profile_name):
|
|
46
67
|
return None
|
|
47
68
|
user_items = dict(config.items(profile_name))
|
|
48
|
-
return T.cast(
|
|
69
|
+
return T.cast(UserItem, user_items)
|
|
49
70
|
|
|
50
71
|
|
|
51
|
-
def list_all_users(config_path: str | None = None) -> dict[str,
|
|
72
|
+
def list_all_users(config_path: str | None = None) -> dict[str, UserItem]:
|
|
52
73
|
if config_path is None:
|
|
53
74
|
config_path = MAPILLARY_CONFIG_PATH
|
|
54
75
|
cp = _load_config(config_path)
|
|
@@ -60,7 +81,7 @@ def list_all_users(config_path: str | None = None) -> dict[str, types.UserItem]:
|
|
|
60
81
|
|
|
61
82
|
|
|
62
83
|
def update_config(
|
|
63
|
-
profile_name: str, user_items:
|
|
84
|
+
profile_name: str, user_items: UserItem, config_path: str | None = None
|
|
64
85
|
) -> None:
|
|
65
86
|
if config_path is None:
|
|
66
87
|
config_path = MAPILLARY_CONFIG_PATH
|
mapillary_tools/constants.py
CHANGED
|
@@ -82,6 +82,32 @@ def _extract_alternative_fields(
|
|
|
82
82
|
return None
|
|
83
83
|
|
|
84
84
|
|
|
85
|
+
def _same_gps_point(left: GPSPoint, right: GPSPoint) -> bool:
|
|
86
|
+
"""
|
|
87
|
+
>>> left = GPSPoint(time=56.0, lat=36.741385, lon=29.021274, alt=141.6, angle=1.54, epoch_time=None, fix=None, precision=None, ground_speed=None)
|
|
88
|
+
>>> right = GPSPoint(time=56.0, lat=36.741385, lon=29.021274, alt=142.4, angle=1.54, epoch_time=None, fix=None, precision=None, ground_speed=None)
|
|
89
|
+
>>> _same_gps_point(left, right)
|
|
90
|
+
True
|
|
91
|
+
"""
|
|
92
|
+
return (
|
|
93
|
+
left.time == right.time
|
|
94
|
+
and left.lon == right.lon
|
|
95
|
+
and left.lat == right.lat
|
|
96
|
+
and left.epoch_time == right.epoch_time
|
|
97
|
+
and left.angle == right.angle
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _deduplicate_gps_points(
|
|
102
|
+
track: list[GPSPoint], same_gps_point: T.Callable[[GPSPoint, GPSPoint], bool]
|
|
103
|
+
) -> list[GPSPoint]:
|
|
104
|
+
deduplicated_track: list[GPSPoint] = []
|
|
105
|
+
for point in track:
|
|
106
|
+
if not deduplicated_track or not same_gps_point(deduplicated_track[-1], point):
|
|
107
|
+
deduplicated_track.append(point)
|
|
108
|
+
return deduplicated_track
|
|
109
|
+
|
|
110
|
+
|
|
85
111
|
def _aggregate_gps_track(
|
|
86
112
|
texts_by_tag: dict[str, list[str]],
|
|
87
113
|
time_tag: str | None,
|
|
@@ -174,7 +200,7 @@ def _aggregate_gps_track(
|
|
|
174
200
|
epoch_time = geo.as_unix_time(dt)
|
|
175
201
|
|
|
176
202
|
# build track
|
|
177
|
-
track = []
|
|
203
|
+
track: list[GPSPoint] = []
|
|
178
204
|
for timestamp, lon, lat, alt, direction, ground_speed in zip(
|
|
179
205
|
timestamps,
|
|
180
206
|
lons,
|
|
@@ -185,22 +211,26 @@ def _aggregate_gps_track(
|
|
|
185
211
|
):
|
|
186
212
|
if timestamp is None or lon is None or lat is None:
|
|
187
213
|
continue
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
)
|
|
214
|
+
|
|
215
|
+
point = GPSPoint(
|
|
216
|
+
time=timestamp,
|
|
217
|
+
lon=lon,
|
|
218
|
+
lat=lat,
|
|
219
|
+
alt=alt,
|
|
220
|
+
angle=direction,
|
|
221
|
+
epoch_time=epoch_time,
|
|
222
|
+
fix=None,
|
|
223
|
+
precision=None,
|
|
224
|
+
ground_speed=ground_speed,
|
|
200
225
|
)
|
|
201
226
|
|
|
227
|
+
if not track or not _same_gps_point(track[-1], point):
|
|
228
|
+
track.append(point)
|
|
229
|
+
|
|
202
230
|
track.sort(key=lambda point: point.time)
|
|
203
231
|
|
|
232
|
+
track = _deduplicate_gps_points(track, same_gps_point=_same_gps_point)
|
|
233
|
+
|
|
204
234
|
if time_tag is not None:
|
|
205
235
|
if track:
|
|
206
236
|
first_time = track[0].time
|
|
@@ -310,7 +340,10 @@ class ExifToolReadVideo:
|
|
|
310
340
|
etree: ET.ElementTree,
|
|
311
341
|
) -> None:
|
|
312
342
|
self.etree = etree
|
|
313
|
-
|
|
343
|
+
root = self.etree.getroot()
|
|
344
|
+
if root is None:
|
|
345
|
+
raise ValueError("ElementTree root is None")
|
|
346
|
+
self._texts_by_tag = _index_text_by_tag(root)
|
|
314
347
|
self._all_tags = set(self._texts_by_tag.keys())
|
|
315
348
|
|
|
316
349
|
def extract_gps_track(self) -> list[geo.Point]:
|
|
@@ -371,6 +404,10 @@ class ExifToolReadVideo:
|
|
|
371
404
|
return model
|
|
372
405
|
|
|
373
406
|
def _extract_gps_track_from_track(self) -> list[GPSPoint]:
|
|
407
|
+
root = self.etree.getroot()
|
|
408
|
+
if root is None:
|
|
409
|
+
raise ValueError("ElementTree root is None")
|
|
410
|
+
|
|
374
411
|
for track_id in range(1, MAX_TRACK_ID + 1):
|
|
375
412
|
track_ns = f"Track{track_id}"
|
|
376
413
|
if self._all_tags_exists(
|
|
@@ -382,7 +419,7 @@ class ExifToolReadVideo:
|
|
|
382
419
|
}
|
|
383
420
|
):
|
|
384
421
|
sample_iterator = _aggregate_samples(
|
|
385
|
-
|
|
422
|
+
root,
|
|
386
423
|
f"{track_ns}:SampleTime",
|
|
387
424
|
f"{track_ns}:SampleDuration",
|
|
388
425
|
)
|
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import platform
|
|
4
|
-
import shutil
|
|
5
3
|
import subprocess
|
|
6
4
|
import typing as T
|
|
7
5
|
from pathlib import Path
|
|
@@ -12,32 +10,14 @@ class ExiftoolRunner:
|
|
|
12
10
|
Wrapper around ExifTool to run it in a subprocess
|
|
13
11
|
"""
|
|
14
12
|
|
|
15
|
-
def __init__(self,
|
|
16
|
-
|
|
17
|
-
exiftool_path = self._search_preferred_exiftool_path()
|
|
18
|
-
self.exiftool_path = exiftool_path
|
|
13
|
+
def __init__(self, exiftool_executable: str = "exiftool", recursive: bool = False):
|
|
14
|
+
self.exiftool_executable = exiftool_executable
|
|
19
15
|
self.recursive = recursive
|
|
20
16
|
|
|
21
|
-
def _search_preferred_exiftool_path(self) -> str:
|
|
22
|
-
system = platform.system()
|
|
23
|
-
|
|
24
|
-
if system and system.lower() == "windows":
|
|
25
|
-
exiftool_paths = ["exiftool.exe", "exiftool"]
|
|
26
|
-
else:
|
|
27
|
-
exiftool_paths = ["exiftool", "exiftool.exe"]
|
|
28
|
-
|
|
29
|
-
for path in exiftool_paths:
|
|
30
|
-
full_path = shutil.which(path)
|
|
31
|
-
if full_path:
|
|
32
|
-
return path
|
|
33
|
-
|
|
34
|
-
# Always return the prefered one, even if it is not found,
|
|
35
|
-
# and let the subprocess.run figure out the error later
|
|
36
|
-
return exiftool_paths[0]
|
|
37
|
-
|
|
38
17
|
def _build_args_read_stdin(self) -> list[str]:
|
|
39
18
|
args: list[str] = [
|
|
40
|
-
self.
|
|
19
|
+
self.exiftool_executable,
|
|
20
|
+
"-fast",
|
|
41
21
|
"-q",
|
|
42
22
|
"-n", # Disable print conversion
|
|
43
23
|
"-X", # XML output
|