mapillary-tools 0.13.1a1__py3-none-any.whl → 0.13.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapillary_tools/__init__.py +1 -1
- mapillary_tools/api_v4.py +133 -11
- mapillary_tools/commands/__main__.py +9 -4
- mapillary_tools/commands/process_and_upload.py +1 -0
- mapillary_tools/constants.py +1 -1
- mapillary_tools/geotag/geotag_images_from_gpx_file.py +7 -1
- mapillary_tools/upload.py +11 -43
- mapillary_tools/upload_api_v4.py +72 -92
- mapillary_tools/uploader.py +0 -2
- mapillary_tools/video_data_extraction/extractors/camm_parser.py +8 -12
- mapillary_tools/video_data_extraction/extractors/gpx_parser.py +60 -23
- {mapillary_tools-0.13.1a1.dist-info → mapillary_tools-0.13.3.dist-info}/METADATA +1 -1
- {mapillary_tools-0.13.1a1.dist-info → mapillary_tools-0.13.3.dist-info}/RECORD +17 -17
- {mapillary_tools-0.13.1a1.dist-info → mapillary_tools-0.13.3.dist-info}/WHEEL +1 -1
- {mapillary_tools-0.13.1a1.dist-info → mapillary_tools-0.13.3.dist-info}/LICENSE +0 -0
- {mapillary_tools-0.13.1a1.dist-info → mapillary_tools-0.13.3.dist-info}/entry_points.txt +0 -0
- {mapillary_tools-0.13.1a1.dist-info → mapillary_tools-0.13.3.dist-info}/top_level.txt +0 -0
mapillary_tools/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "0.13.
|
|
1
|
+
VERSION = "0.13.3"
|
mapillary_tools/api_v4.py
CHANGED
|
@@ -2,6 +2,7 @@ import logging
|
|
|
2
2
|
import os
|
|
3
3
|
import ssl
|
|
4
4
|
import typing as T
|
|
5
|
+
from json import dumps
|
|
5
6
|
|
|
6
7
|
import requests
|
|
7
8
|
from requests.adapters import HTTPAdapter
|
|
@@ -46,6 +47,106 @@ class HTTPSystemCertsAdapter(HTTPAdapter):
|
|
|
46
47
|
conn.ca_certs = None
|
|
47
48
|
|
|
48
49
|
|
|
50
|
+
@T.overload
|
|
51
|
+
def _truncate(s: bytes, limit: int = 512) -> bytes: ...
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@T.overload
|
|
55
|
+
def _truncate(s: str, limit: int = 512) -> str: ...
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _truncate(s, limit=512):
|
|
59
|
+
if limit < len(s):
|
|
60
|
+
remaining = len(s) - limit
|
|
61
|
+
if isinstance(s, bytes):
|
|
62
|
+
return (
|
|
63
|
+
s[:limit]
|
|
64
|
+
+ b"..."
|
|
65
|
+
+ f"({remaining} more bytes truncated)".encode("utf-8")
|
|
66
|
+
)
|
|
67
|
+
else:
|
|
68
|
+
return str(s[:limit]) + f"...({remaining} more chars truncated)"
|
|
69
|
+
else:
|
|
70
|
+
return s
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _sanitize(headers: T.Dict):
|
|
74
|
+
new_headers = {}
|
|
75
|
+
|
|
76
|
+
for k, v in headers.items():
|
|
77
|
+
if k.lower() in [
|
|
78
|
+
"authorization",
|
|
79
|
+
"cookie",
|
|
80
|
+
"x-fb-access-token",
|
|
81
|
+
"access-token",
|
|
82
|
+
"access_token",
|
|
83
|
+
"password",
|
|
84
|
+
]:
|
|
85
|
+
new_headers[k] = "[REDACTED]"
|
|
86
|
+
else:
|
|
87
|
+
new_headers[k] = _truncate(v)
|
|
88
|
+
|
|
89
|
+
return new_headers
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
def _log_debug_request(
|
|
93
|
+
method: str,
|
|
94
|
+
url: str,
|
|
95
|
+
json: T.Optional[T.Dict] = None,
|
|
96
|
+
params: T.Optional[T.Dict] = None,
|
|
97
|
+
headers: T.Optional[T.Dict] = None,
|
|
98
|
+
timeout: T.Any = None,
|
|
99
|
+
):
|
|
100
|
+
if logging.getLogger().getEffectiveLevel() <= logging.DEBUG:
|
|
101
|
+
return
|
|
102
|
+
|
|
103
|
+
msg = f"HTTP {method} {url}"
|
|
104
|
+
|
|
105
|
+
if USE_SYSTEM_CERTS:
|
|
106
|
+
msg += " (w/sys_certs)"
|
|
107
|
+
|
|
108
|
+
if json:
|
|
109
|
+
t = _truncate(dumps(_sanitize(json)))
|
|
110
|
+
msg += f" JSON={t}"
|
|
111
|
+
|
|
112
|
+
if params:
|
|
113
|
+
msg += f" PARAMS={_sanitize(params)}"
|
|
114
|
+
|
|
115
|
+
if headers:
|
|
116
|
+
msg += f" HEADERS={_sanitize(headers)}"
|
|
117
|
+
|
|
118
|
+
if timeout is not None:
|
|
119
|
+
msg += f" TIMEOUT={timeout}"
|
|
120
|
+
|
|
121
|
+
LOG.debug(msg)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _log_debug_response(resp: requests.Response):
|
|
125
|
+
if logging.getLogger().getEffectiveLevel() <= logging.DEBUG:
|
|
126
|
+
return
|
|
127
|
+
|
|
128
|
+
data: T.Union[str, bytes]
|
|
129
|
+
try:
|
|
130
|
+
data = _truncate(dumps(_sanitize(resp.json())))
|
|
131
|
+
except Exception:
|
|
132
|
+
data = _truncate(resp.content)
|
|
133
|
+
|
|
134
|
+
LOG.debug(f"HTTP {resp.status_code} ({resp.reason}): %s", data)
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def readable_http_error(ex: requests.HTTPError) -> str:
|
|
138
|
+
req = ex.request
|
|
139
|
+
resp = ex.response
|
|
140
|
+
|
|
141
|
+
data: T.Union[str, bytes]
|
|
142
|
+
try:
|
|
143
|
+
data = _truncate(dumps(_sanitize(resp.json())))
|
|
144
|
+
except Exception:
|
|
145
|
+
data = _truncate(resp.content)
|
|
146
|
+
|
|
147
|
+
return f"{req.method} {resp.url} => {resp.status_code} ({resp.reason}): {str(data)}"
|
|
148
|
+
|
|
149
|
+
|
|
49
150
|
def request_post(
|
|
50
151
|
url: str,
|
|
51
152
|
data: T.Optional[T.Any] = None,
|
|
@@ -54,14 +155,23 @@ def request_post(
|
|
|
54
155
|
) -> requests.Response:
|
|
55
156
|
global USE_SYSTEM_CERTS
|
|
56
157
|
|
|
158
|
+
_log_debug_request(
|
|
159
|
+
"POST",
|
|
160
|
+
url,
|
|
161
|
+
json=json,
|
|
162
|
+
params=kwargs.get("params"),
|
|
163
|
+
headers=kwargs.get("headers"),
|
|
164
|
+
timeout=kwargs.get("timeout"),
|
|
165
|
+
)
|
|
166
|
+
|
|
57
167
|
if USE_SYSTEM_CERTS:
|
|
58
168
|
with requests.Session() as session:
|
|
59
169
|
session.mount("https://", HTTPSystemCertsAdapter())
|
|
60
|
-
|
|
170
|
+
resp = session.post(url, data=data, json=json, **kwargs)
|
|
61
171
|
|
|
62
172
|
else:
|
|
63
173
|
try:
|
|
64
|
-
|
|
174
|
+
resp = requests.post(url, data=data, json=json, **kwargs)
|
|
65
175
|
except requests.exceptions.SSLError as ex:
|
|
66
176
|
if "SSLCertVerificationError" not in str(ex):
|
|
67
177
|
raise ex
|
|
@@ -70,9 +180,11 @@ def request_post(
|
|
|
70
180
|
LOG.warning(
|
|
71
181
|
"SSL error occurred, falling back to system SSL certificates: %s", ex
|
|
72
182
|
)
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
183
|
+
return request_post(url, data=data, json=json, **kwargs)
|
|
184
|
+
|
|
185
|
+
_log_debug_response(resp)
|
|
186
|
+
|
|
187
|
+
return resp
|
|
76
188
|
|
|
77
189
|
|
|
78
190
|
def request_get(
|
|
@@ -82,13 +194,21 @@ def request_get(
|
|
|
82
194
|
) -> requests.Response:
|
|
83
195
|
global USE_SYSTEM_CERTS
|
|
84
196
|
|
|
197
|
+
_log_debug_request(
|
|
198
|
+
"GET",
|
|
199
|
+
url,
|
|
200
|
+
params=kwargs.get("params"),
|
|
201
|
+
headers=kwargs.get("headers"),
|
|
202
|
+
timeout=kwargs.get("timeout"),
|
|
203
|
+
)
|
|
204
|
+
|
|
85
205
|
if USE_SYSTEM_CERTS:
|
|
86
206
|
with requests.Session() as session:
|
|
87
207
|
session.mount("https://", HTTPSystemCertsAdapter())
|
|
88
|
-
|
|
208
|
+
resp = session.get(url, params=params, **kwargs)
|
|
89
209
|
else:
|
|
90
210
|
try:
|
|
91
|
-
|
|
211
|
+
resp = requests.get(url, params=params, **kwargs)
|
|
92
212
|
except requests.exceptions.SSLError as ex:
|
|
93
213
|
if "SSLCertVerificationError" not in str(ex):
|
|
94
214
|
raise ex
|
|
@@ -97,15 +217,17 @@ def request_get(
|
|
|
97
217
|
LOG.warning(
|
|
98
218
|
"SSL error occurred, falling back to system SSL certificates: %s", ex
|
|
99
219
|
)
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
220
|
+
resp = request_get(url, params=params, **kwargs)
|
|
221
|
+
|
|
222
|
+
_log_debug_response(resp)
|
|
223
|
+
|
|
224
|
+
return resp
|
|
103
225
|
|
|
104
226
|
|
|
105
227
|
def get_upload_token(email: str, password: str) -> requests.Response:
|
|
106
228
|
resp = request_post(
|
|
107
229
|
f"{MAPILLARY_GRAPH_API_ENDPOINT}/login",
|
|
108
|
-
|
|
230
|
+
headers={"Authorization": f"OAuth {MAPILLARY_CLIENT_TOKEN}"},
|
|
109
231
|
json={"email": email, "password": password, "locale": "en_US"},
|
|
110
232
|
timeout=REQUESTS_TIMEOUT,
|
|
111
233
|
)
|
|
@@ -5,7 +5,9 @@ import sys
|
|
|
5
5
|
import typing as T
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
|
|
8
|
-
|
|
8
|
+
import requests
|
|
9
|
+
|
|
10
|
+
from .. import api_v4, constants, exceptions, VERSION
|
|
9
11
|
from . import (
|
|
10
12
|
authenticate,
|
|
11
13
|
process,
|
|
@@ -160,11 +162,14 @@ def main():
|
|
|
160
162
|
|
|
161
163
|
try:
|
|
162
164
|
args.func(argvars)
|
|
163
|
-
except
|
|
165
|
+
except requests.HTTPError as ex:
|
|
166
|
+
LOG.error("%s: %s", ex.__class__.__name__, api_v4.readable_http_error(ex))
|
|
167
|
+
|
|
168
|
+
except exceptions.MapillaryUserError as ex:
|
|
164
169
|
LOG.error(
|
|
165
|
-
"%s: %s",
|
|
170
|
+
"%s: %s", ex.__class__.__name__, ex, exc_info=log_level == logging.DEBUG
|
|
166
171
|
)
|
|
167
|
-
sys.exit(
|
|
172
|
+
sys.exit(ex.exit_code)
|
|
168
173
|
|
|
169
174
|
|
|
170
175
|
if __name__ == "__main__":
|
mapillary_tools/constants.py
CHANGED
|
@@ -45,6 +45,6 @@ GOPRO_GPS_PRECISION = float(os.getenv(_ENV_PREFIX + "GOPRO_GPS_PRECISION", 15))
|
|
|
45
45
|
# Max number of images per sequence
|
|
46
46
|
MAX_SEQUENCE_LENGTH = int(os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_LENGTH", 1000))
|
|
47
47
|
# Max file size per sequence (sum of image filesizes in the sequence)
|
|
48
|
-
MAX_SEQUENCE_FILESIZE: str = os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_FILESIZE", "
|
|
48
|
+
MAX_SEQUENCE_FILESIZE: str = os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_FILESIZE", "110G")
|
|
49
49
|
# Max number of pixels per sequence (sum of image pixels in the sequence)
|
|
50
50
|
MAX_SEQUENCE_PIXELS: str = os.getenv(_ENV_PREFIX + "MAX_SEQUENCE_PIXELS", "6G")
|
|
@@ -25,7 +25,13 @@ class GeotagImagesFromGPXFile(GeotagImagesFromGeneric):
|
|
|
25
25
|
num_processes: T.Optional[int] = None,
|
|
26
26
|
):
|
|
27
27
|
super().__init__()
|
|
28
|
-
|
|
28
|
+
try:
|
|
29
|
+
tracks = parse_gpx(source_path)
|
|
30
|
+
except Exception as ex:
|
|
31
|
+
raise RuntimeError(
|
|
32
|
+
f"Error parsing GPX {source_path}: {ex.__class__.__name__}: {ex}"
|
|
33
|
+
)
|
|
34
|
+
|
|
29
35
|
if 1 < len(tracks):
|
|
30
36
|
LOG.warning(
|
|
31
37
|
"Found %s tracks in the GPX file %s. Will merge points in all the tracks as a single track for interpolation",
|
mapillary_tools/upload.py
CHANGED
|
@@ -47,25 +47,6 @@ class UploadError(Exception):
|
|
|
47
47
|
super().__init__(str(inner_ex))
|
|
48
48
|
|
|
49
49
|
|
|
50
|
-
class UploadHTTPError(Exception):
|
|
51
|
-
pass
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
def wrap_http_exception(ex: requests.HTTPError):
|
|
55
|
-
req = ex.request
|
|
56
|
-
resp = ex.response
|
|
57
|
-
if isinstance(resp, requests.Response) and isinstance(req, requests.Request):
|
|
58
|
-
lines = [
|
|
59
|
-
f"{req.method} {resp.url}",
|
|
60
|
-
f"> HTTP Status: {resp.status_code}",
|
|
61
|
-
str(resp.content),
|
|
62
|
-
]
|
|
63
|
-
else:
|
|
64
|
-
lines = []
|
|
65
|
-
|
|
66
|
-
return UploadHTTPError("\n".join(lines))
|
|
67
|
-
|
|
68
|
-
|
|
69
50
|
def _load_validate_metadatas_from_desc_path(
|
|
70
51
|
desc_path: T.Optional[str], import_paths: T.Sequence[Path]
|
|
71
52
|
) -> T.List[types.Metadata]:
|
|
@@ -175,18 +156,12 @@ def fetch_user_items(
|
|
|
175
156
|
"Found multiple Mapillary accounts. Please specify one with --user_name"
|
|
176
157
|
)
|
|
177
158
|
else:
|
|
178
|
-
|
|
179
|
-
user_items = authenticate.authenticate_user(user_name)
|
|
180
|
-
except requests.HTTPError as exc:
|
|
181
|
-
raise wrap_http_exception(exc) from exc
|
|
159
|
+
user_items = authenticate.authenticate_user(user_name)
|
|
182
160
|
|
|
183
161
|
if organization_key is not None:
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
)
|
|
188
|
-
except requests.HTTPError as ex:
|
|
189
|
-
raise wrap_http_exception(ex) from ex
|
|
162
|
+
resp = api_v4.fetch_organization(
|
|
163
|
+
user_items["user_upload_token"], organization_key
|
|
164
|
+
)
|
|
190
165
|
org = resp.json()
|
|
191
166
|
LOG.info("Uploading to organization: %s", json.dumps(org))
|
|
192
167
|
user_items = T.cast(
|
|
@@ -430,15 +405,12 @@ def _api_logging_finished(summary: T.Dict):
|
|
|
430
405
|
action: api_v4.ActionType = "upload_finished_upload"
|
|
431
406
|
LOG.debug("API Logging for action %s: %s", action, summary)
|
|
432
407
|
try:
|
|
433
|
-
api_v4.log_event(
|
|
434
|
-
action,
|
|
435
|
-
summary,
|
|
436
|
-
)
|
|
408
|
+
api_v4.log_event(action, summary)
|
|
437
409
|
except requests.HTTPError as exc:
|
|
438
410
|
LOG.warning(
|
|
439
|
-
"
|
|
411
|
+
"HTTPError from API Logging for action %s: %s",
|
|
440
412
|
action,
|
|
441
|
-
|
|
413
|
+
api_v4.readable_http_error(exc),
|
|
442
414
|
)
|
|
443
415
|
except Exception:
|
|
444
416
|
LOG.warning("Error from API Logging for action %s", action, exc_info=True)
|
|
@@ -452,16 +424,12 @@ def _api_logging_failed(payload: T.Dict, exc: Exception):
|
|
|
452
424
|
action: api_v4.ActionType = "upload_failed_upload"
|
|
453
425
|
LOG.debug("API Logging for action %s: %s", action, payload)
|
|
454
426
|
try:
|
|
455
|
-
api_v4.log_event(
|
|
456
|
-
action,
|
|
457
|
-
payload_with_reason,
|
|
458
|
-
)
|
|
427
|
+
api_v4.log_event(action, payload_with_reason)
|
|
459
428
|
except requests.HTTPError as exc:
|
|
460
|
-
wrapped_exc = wrap_http_exception(exc)
|
|
461
429
|
LOG.warning(
|
|
462
|
-
"
|
|
430
|
+
"HTTPError from API Logging for action %s: %s",
|
|
463
431
|
action,
|
|
464
|
-
|
|
432
|
+
api_v4.readable_http_error(exc),
|
|
465
433
|
)
|
|
466
434
|
except Exception:
|
|
467
435
|
LOG.warning("Error from API Logging for action %s", action, exc_info=True)
|
|
@@ -678,7 +646,7 @@ def upload(
|
|
|
678
646
|
raise exceptions.MapillaryUploadUnauthorizedError(
|
|
679
647
|
debug_info.get("message")
|
|
680
648
|
) from inner_ex
|
|
681
|
-
raise
|
|
649
|
+
raise inner_ex
|
|
682
650
|
|
|
683
651
|
raise inner_ex
|
|
684
652
|
|
mapillary_tools/upload_api_v4.py
CHANGED
|
@@ -1,16 +1,19 @@
|
|
|
1
1
|
import enum
|
|
2
2
|
import io
|
|
3
|
-
import json
|
|
4
|
-
import logging
|
|
5
3
|
import os
|
|
6
4
|
import random
|
|
7
5
|
import typing as T
|
|
6
|
+
import uuid
|
|
8
7
|
|
|
9
8
|
import requests
|
|
10
9
|
|
|
11
|
-
from .api_v4 import
|
|
10
|
+
from .api_v4 import (
|
|
11
|
+
MAPILLARY_GRAPH_API_ENDPOINT,
|
|
12
|
+
request_get,
|
|
13
|
+
request_post,
|
|
14
|
+
REQUESTS_TIMEOUT,
|
|
15
|
+
)
|
|
12
16
|
|
|
13
|
-
LOG = logging.getLogger(__name__)
|
|
14
17
|
MAPILLARY_UPLOAD_ENDPOINT = os.getenv(
|
|
15
18
|
"MAPILLARY_UPLOAD_ENDPOINT", "https://rupload.facebook.com/mapillary_public_uploads"
|
|
16
19
|
)
|
|
@@ -21,7 +24,6 @@ DEFAULT_CHUNK_SIZE = 1024 * 1024 * 16 # 16MB
|
|
|
21
24
|
# i.e. if your the server does not respond within this timeout, it will throw:
|
|
22
25
|
# ConnectionError: ('Connection aborted.', timeout('The write operation timed out'))
|
|
23
26
|
# So let us make sure the largest possible chunks can be uploaded before this timeout for now,
|
|
24
|
-
REQUESTS_TIMEOUT = (20, 20) # 20 seconds
|
|
25
27
|
UPLOAD_REQUESTS_TIMEOUT = (30 * 60, 30 * 60) # 30 minutes
|
|
26
28
|
|
|
27
29
|
|
|
@@ -31,55 +33,33 @@ class ClusterFileType(enum.Enum):
|
|
|
31
33
|
CAMM = "mly_camm_video"
|
|
32
34
|
|
|
33
35
|
|
|
34
|
-
def _sanitize_headers(headers: T.Dict):
|
|
35
|
-
return {
|
|
36
|
-
k: v
|
|
37
|
-
for k, v in headers.items()
|
|
38
|
-
if k.lower() not in ["authorization", "cookie", "x-fb-access-token"]
|
|
39
|
-
}
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
_S = T.TypeVar("_S", str, bytes)
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def _truncate_end(s: _S) -> _S:
|
|
46
|
-
MAX_LENGTH = 512
|
|
47
|
-
if MAX_LENGTH < len(s):
|
|
48
|
-
if isinstance(s, bytes):
|
|
49
|
-
return s[:MAX_LENGTH] + b"..."
|
|
50
|
-
else:
|
|
51
|
-
return str(s[:MAX_LENGTH]) + "..."
|
|
52
|
-
else:
|
|
53
|
-
return s
|
|
54
|
-
|
|
55
|
-
|
|
56
36
|
class UploadService:
|
|
57
37
|
user_access_token: str
|
|
58
|
-
entity_size: int
|
|
59
38
|
session_key: str
|
|
60
39
|
callbacks: T.List[T.Callable[[bytes, T.Optional[requests.Response]], None]]
|
|
61
40
|
cluster_filetype: ClusterFileType
|
|
62
41
|
organization_id: T.Optional[T.Union[str, int]]
|
|
63
42
|
chunk_size: int
|
|
64
43
|
|
|
44
|
+
MIME_BY_CLUSTER_TYPE: T.Dict[ClusterFileType, str] = {
|
|
45
|
+
ClusterFileType.ZIP: "application/zip",
|
|
46
|
+
ClusterFileType.BLACKVUE: "video/mp4",
|
|
47
|
+
ClusterFileType.CAMM: "video/mp4",
|
|
48
|
+
}
|
|
49
|
+
|
|
65
50
|
def __init__(
|
|
66
51
|
self,
|
|
67
52
|
user_access_token: str,
|
|
68
53
|
session_key: str,
|
|
69
|
-
entity_size: int,
|
|
70
54
|
organization_id: T.Optional[T.Union[str, int]] = None,
|
|
71
55
|
cluster_filetype: ClusterFileType = ClusterFileType.ZIP,
|
|
72
56
|
chunk_size: int = DEFAULT_CHUNK_SIZE,
|
|
73
57
|
):
|
|
74
|
-
if entity_size <= 0:
|
|
75
|
-
raise ValueError(f"Expect positive entity size but got {entity_size}")
|
|
76
|
-
|
|
77
58
|
if chunk_size <= 0:
|
|
78
59
|
raise ValueError("Expect positive chunk size")
|
|
79
60
|
|
|
80
61
|
self.user_access_token = user_access_token
|
|
81
62
|
self.session_key = session_key
|
|
82
|
-
self.entity_size = entity_size
|
|
83
63
|
self.organization_id = organization_id
|
|
84
64
|
# validate the input
|
|
85
65
|
self.cluster_filetype = ClusterFileType(cluster_filetype)
|
|
@@ -91,13 +71,11 @@ class UploadService:
|
|
|
91
71
|
"Authorization": f"OAuth {self.user_access_token}",
|
|
92
72
|
}
|
|
93
73
|
url = f"{MAPILLARY_UPLOAD_ENDPOINT}/{self.session_key}"
|
|
94
|
-
LOG.debug("GET %s", url)
|
|
95
74
|
resp = request_get(
|
|
96
75
|
url,
|
|
97
76
|
headers=headers,
|
|
98
77
|
timeout=REQUESTS_TIMEOUT,
|
|
99
78
|
)
|
|
100
|
-
LOG.debug("HTTP response %s: %s", resp.status_code, resp.content)
|
|
101
79
|
resp.raise_for_status()
|
|
102
80
|
data = resp.json()
|
|
103
81
|
return data["offset"]
|
|
@@ -107,54 +85,63 @@ class UploadService:
|
|
|
107
85
|
data: T.IO[bytes],
|
|
108
86
|
offset: T.Optional[int] = None,
|
|
109
87
|
) -> str:
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
entity_type_map: T.Dict[ClusterFileType, str] = {
|
|
114
|
-
ClusterFileType.ZIP: "application/zip",
|
|
115
|
-
ClusterFileType.BLACKVUE: "video/mp4",
|
|
116
|
-
ClusterFileType.CAMM: "video/mp4",
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
entity_type = entity_type_map[self.cluster_filetype]
|
|
120
|
-
|
|
121
|
-
data.seek(offset, io.SEEK_CUR)
|
|
88
|
+
chunks = self._chunkize_byte_stream(data)
|
|
89
|
+
return self.upload_chunks(chunks, offset=offset)
|
|
122
90
|
|
|
91
|
+
def _chunkize_byte_stream(
|
|
92
|
+
self, stream: T.IO[bytes]
|
|
93
|
+
) -> T.Generator[bytes, None, None]:
|
|
123
94
|
while True:
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
95
|
+
data = stream.read(self.chunk_size)
|
|
96
|
+
if not data:
|
|
97
|
+
break
|
|
98
|
+
yield data
|
|
99
|
+
|
|
100
|
+
def _offset_chunks(
|
|
101
|
+
self, chunks: T.Iterable[bytes], offset: int
|
|
102
|
+
) -> T.Generator[bytes, None, None]:
|
|
103
|
+
assert offset >= 0, f"Expect non-negative offset but got {offset}"
|
|
104
|
+
|
|
105
|
+
for chunk in chunks:
|
|
106
|
+
if offset:
|
|
107
|
+
if offset < len(chunk):
|
|
108
|
+
yield chunk[offset:]
|
|
109
|
+
offset = 0
|
|
110
|
+
else:
|
|
111
|
+
offset -= len(chunk)
|
|
112
|
+
else:
|
|
113
|
+
yield chunk
|
|
114
|
+
|
|
115
|
+
def _attach_callbacks(
|
|
116
|
+
self, chunks: T.Iterable[bytes]
|
|
117
|
+
) -> T.Generator[bytes, None, None]:
|
|
118
|
+
for chunk in chunks:
|
|
119
|
+
yield chunk
|
|
148
120
|
for callback in self.callbacks:
|
|
149
|
-
callback(chunk,
|
|
150
|
-
# we can assert that offset == self.fetch_offset(session_key)
|
|
151
|
-
# otherwise, server will throw
|
|
121
|
+
callback(chunk, None)
|
|
152
122
|
|
|
153
|
-
|
|
154
|
-
|
|
123
|
+
def upload_chunks(
|
|
124
|
+
self,
|
|
125
|
+
chunks: T.Iterable[bytes],
|
|
126
|
+
offset: T.Optional[int] = None,
|
|
127
|
+
) -> str:
|
|
128
|
+
if offset is None:
|
|
129
|
+
offset = self.fetch_offset()
|
|
130
|
+
|
|
131
|
+
chunks = self._attach_callbacks(self._offset_chunks(chunks, offset))
|
|
155
132
|
|
|
156
|
-
|
|
157
|
-
f"
|
|
133
|
+
headers = {
|
|
134
|
+
"Authorization": f"OAuth {self.user_access_token}",
|
|
135
|
+
"Offset": f"{offset}",
|
|
136
|
+
"X-Entity-Name": self.session_key,
|
|
137
|
+
"X-Entity-Type": self.MIME_BY_CLUSTER_TYPE[self.cluster_filetype],
|
|
138
|
+
}
|
|
139
|
+
url = f"{MAPILLARY_UPLOAD_ENDPOINT}/{self.session_key}"
|
|
140
|
+
resp = request_post(
|
|
141
|
+
url,
|
|
142
|
+
headers=headers,
|
|
143
|
+
data=chunks,
|
|
144
|
+
timeout=UPLOAD_REQUESTS_TIMEOUT,
|
|
158
145
|
)
|
|
159
146
|
|
|
160
147
|
payload = resp.json()
|
|
@@ -178,14 +165,12 @@ class UploadService:
|
|
|
178
165
|
|
|
179
166
|
url = f"{MAPILLARY_GRAPH_API_ENDPOINT}/finish_upload"
|
|
180
167
|
|
|
181
|
-
LOG.debug("POST %s HEADERS %s", url, json.dumps(_sanitize_headers(headers)))
|
|
182
168
|
resp = request_post(
|
|
183
169
|
url,
|
|
184
170
|
headers=headers,
|
|
185
171
|
json=data,
|
|
186
172
|
timeout=REQUESTS_TIMEOUT,
|
|
187
173
|
)
|
|
188
|
-
LOG.debug("HTTP response %s: %s", resp.status_code, _truncate_end(resp.content))
|
|
189
174
|
|
|
190
175
|
resp.raise_for_status()
|
|
191
176
|
|
|
@@ -209,35 +194,30 @@ class FakeUploadService(UploadService):
|
|
|
209
194
|
)
|
|
210
195
|
self._error_ratio = 0.1
|
|
211
196
|
|
|
212
|
-
def
|
|
197
|
+
def upload_chunks(
|
|
213
198
|
self,
|
|
214
|
-
|
|
199
|
+
chunks: T.Iterable[bytes],
|
|
215
200
|
offset: T.Optional[int] = None,
|
|
216
201
|
) -> str:
|
|
217
202
|
if offset is None:
|
|
218
203
|
offset = self.fetch_offset()
|
|
204
|
+
|
|
205
|
+
chunks = self._attach_callbacks(self._offset_chunks(chunks, offset))
|
|
206
|
+
|
|
219
207
|
os.makedirs(self._upload_path, exist_ok=True)
|
|
220
208
|
filename = os.path.join(self._upload_path, self.session_key)
|
|
221
209
|
with open(filename, "ab") as fp:
|
|
222
|
-
|
|
223
|
-
while True:
|
|
224
|
-
chunk = data.read(self.chunk_size)
|
|
225
|
-
if not chunk:
|
|
226
|
-
break
|
|
227
|
-
# fail here means nothing uploaded
|
|
210
|
+
for chunk in chunks:
|
|
228
211
|
if random.random() <= self._error_ratio:
|
|
229
212
|
raise requests.ConnectionError(
|
|
230
213
|
f"TEST ONLY: Failed to upload with error ratio {self._error_ratio}"
|
|
231
214
|
)
|
|
232
215
|
fp.write(chunk)
|
|
233
|
-
# fail here means patially uploaded
|
|
234
216
|
if random.random() <= self._error_ratio:
|
|
235
217
|
raise requests.ConnectionError(
|
|
236
218
|
f"TEST ONLY: Partially uploaded with error ratio {self._error_ratio}"
|
|
237
219
|
)
|
|
238
|
-
|
|
239
|
-
callback(chunk, None)
|
|
240
|
-
return self.session_key
|
|
220
|
+
return uuid.uuid4().hex
|
|
241
221
|
|
|
242
222
|
def finish(self, _: str) -> str:
|
|
243
223
|
return "0"
|
mapillary_tools/uploader.py
CHANGED
|
@@ -195,7 +195,6 @@ class Uploader:
|
|
|
195
195
|
upload_api_v4.FakeUploadService(
|
|
196
196
|
user_access_token=self.user_items["user_upload_token"],
|
|
197
197
|
session_key=session_key,
|
|
198
|
-
entity_size=entity_size,
|
|
199
198
|
organization_id=self.user_items.get("MAPOrganizationKey"),
|
|
200
199
|
cluster_filetype=cluster_filetype,
|
|
201
200
|
chunk_size=self.chunk_size,
|
|
@@ -205,7 +204,6 @@ class Uploader:
|
|
|
205
204
|
upload_service = upload_api_v4.UploadService(
|
|
206
205
|
user_access_token=self.user_items["user_upload_token"],
|
|
207
206
|
session_key=session_key,
|
|
208
|
-
entity_size=entity_size,
|
|
209
207
|
organization_id=self.user_items.get("MAPOrganizationKey"),
|
|
210
208
|
cluster_filetype=cluster_filetype,
|
|
211
209
|
chunk_size=self.chunk_size,
|
|
@@ -13,8 +13,12 @@ class CammParser(BaseParser):
|
|
|
13
13
|
parser_label = "camm"
|
|
14
14
|
|
|
15
15
|
@functools.cached_property
|
|
16
|
-
def
|
|
17
|
-
|
|
16
|
+
def _camera_info(self) -> T.Tuple[str, str]:
|
|
17
|
+
source_path = self.geotag_source_path
|
|
18
|
+
if not source_path:
|
|
19
|
+
return "", ""
|
|
20
|
+
|
|
21
|
+
with source_path.open("rb") as fp:
|
|
18
22
|
return camm_parser.extract_camera_make_and_model(fp)
|
|
19
23
|
|
|
20
24
|
def extract_points(self) -> T.Sequence[geo.Point]:
|
|
@@ -28,15 +32,7 @@ class CammParser(BaseParser):
|
|
|
28
32
|
return []
|
|
29
33
|
|
|
30
34
|
def extract_make(self) -> T.Optional[str]:
|
|
31
|
-
|
|
32
|
-
if not source_path:
|
|
33
|
-
return None
|
|
34
|
-
with source_path.open("rb") as _fp:
|
|
35
|
-
return self.__camera_info[0] or None
|
|
35
|
+
return self._camera_info[0] or None
|
|
36
36
|
|
|
37
37
|
def extract_model(self) -> T.Optional[str]:
|
|
38
|
-
|
|
39
|
-
if not source_path:
|
|
40
|
-
return None
|
|
41
|
-
with source_path.open("rb") as _fp:
|
|
42
|
-
return self.__camera_info[1] or None
|
|
38
|
+
return self._camera_info[1] or None
|
|
@@ -20,7 +20,13 @@ class GpxParser(BaseParser):
|
|
|
20
20
|
if not path:
|
|
21
21
|
return []
|
|
22
22
|
|
|
23
|
-
|
|
23
|
+
try:
|
|
24
|
+
gpx_tracks = geotag_images_from_gpx_file.parse_gpx(path)
|
|
25
|
+
except Exception as ex:
|
|
26
|
+
raise RuntimeError(
|
|
27
|
+
f"Error parsing GPX {path}: {ex.__class__.__name__}: {ex}"
|
|
28
|
+
)
|
|
29
|
+
|
|
24
30
|
if 1 < len(gpx_tracks):
|
|
25
31
|
LOG.warning(
|
|
26
32
|
"Found %s tracks in the GPX file %s. Will merge points in all the tracks as a single track for interpolation",
|
|
@@ -32,40 +38,71 @@ class GpxParser(BaseParser):
|
|
|
32
38
|
if not gpx_points:
|
|
33
39
|
return gpx_points
|
|
34
40
|
|
|
41
|
+
offset = self._synx_gpx_by_first_gps_timestamp(gpx_points)
|
|
42
|
+
|
|
43
|
+
self._rebase_times(gpx_points, offset=offset)
|
|
44
|
+
|
|
45
|
+
return gpx_points
|
|
46
|
+
|
|
47
|
+
def _synx_gpx_by_first_gps_timestamp(
|
|
48
|
+
self, gpx_points: T.Sequence[geo.Point]
|
|
49
|
+
) -> float:
|
|
50
|
+
offset: float = 0.0
|
|
51
|
+
|
|
52
|
+
if not gpx_points:
|
|
53
|
+
return offset
|
|
54
|
+
|
|
35
55
|
first_gpx_dt = datetime.datetime.fromtimestamp(
|
|
36
56
|
gpx_points[0].time, tz=datetime.timezone.utc
|
|
37
57
|
)
|
|
38
58
|
LOG.info("First GPX timestamp: %s", first_gpx_dt)
|
|
39
59
|
|
|
40
60
|
# Extract first GPS timestamp (if found) for synchronization
|
|
41
|
-
|
|
42
|
-
parser = GenericVideoParser(self.videoPath, self.options,
|
|
61
|
+
# Use an empty dictionary to force video parsers to extract make/model from the video metadata itself
|
|
62
|
+
parser = GenericVideoParser(self.videoPath, self.options, {})
|
|
43
63
|
gps_points = parser.extract_points()
|
|
44
|
-
if gps_points:
|
|
45
|
-
first_gps_point = gps_points[0]
|
|
46
|
-
if isinstance(first_gps_point, telemetry.GPSPoint):
|
|
47
|
-
if first_gps_point.epoch_time is not None:
|
|
48
|
-
first_gps_dt = datetime.datetime.fromtimestamp(
|
|
49
|
-
first_gps_point.epoch_time, tz=datetime.timezone.utc
|
|
50
|
-
)
|
|
51
|
-
LOG.info("First GPS timestamp: %s", first_gps_dt)
|
|
52
|
-
offset = gpx_points[0].time - first_gps_point.epoch_time
|
|
53
|
-
if offset:
|
|
54
|
-
LOG.warning(
|
|
55
|
-
"Found offset between GPX %s and video GPS timestamps %s: %s seconds",
|
|
56
|
-
first_gpx_dt,
|
|
57
|
-
first_gps_dt,
|
|
58
|
-
offset,
|
|
59
|
-
)
|
|
60
64
|
|
|
61
|
-
|
|
65
|
+
if not gps_points:
|
|
66
|
+
LOG.warning(
|
|
67
|
+
"Skip GPX synchronization because no GPS found in video %s",
|
|
68
|
+
self.videoPath,
|
|
69
|
+
)
|
|
70
|
+
return offset
|
|
62
71
|
|
|
63
|
-
|
|
72
|
+
first_gps_point = gps_points[0]
|
|
73
|
+
if isinstance(first_gps_point, telemetry.GPSPoint):
|
|
74
|
+
if first_gps_point.epoch_time is not None:
|
|
75
|
+
first_gps_dt = datetime.datetime.fromtimestamp(
|
|
76
|
+
first_gps_point.epoch_time, tz=datetime.timezone.utc
|
|
77
|
+
)
|
|
78
|
+
LOG.info("First GPS timestamp: %s", first_gps_dt)
|
|
79
|
+
offset = gpx_points[0].time - first_gps_point.epoch_time
|
|
80
|
+
if offset:
|
|
81
|
+
LOG.warning(
|
|
82
|
+
"Found offset between GPX %s and video GPS timestamps %s: %s seconds",
|
|
83
|
+
first_gpx_dt,
|
|
84
|
+
first_gps_dt,
|
|
85
|
+
offset,
|
|
86
|
+
)
|
|
87
|
+
else:
|
|
88
|
+
LOG.info(
|
|
89
|
+
"GPX and GPS are perfectly synchronized (all starts from %s)",
|
|
90
|
+
first_gpx_dt,
|
|
91
|
+
)
|
|
92
|
+
else:
|
|
93
|
+
LOG.warning(
|
|
94
|
+
"Skip GPX synchronization because no GPS epoch time found in video %s",
|
|
95
|
+
self.videoPath,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
return offset
|
|
64
99
|
|
|
65
100
|
def extract_make(self) -> T.Optional[str]:
|
|
66
|
-
|
|
101
|
+
# Use an empty dictionary to force video parsers to extract make/model from the video metadata itself
|
|
102
|
+
parser = GenericVideoParser(self.videoPath, self.options, {})
|
|
67
103
|
return parser.extract_make()
|
|
68
104
|
|
|
69
105
|
def extract_model(self) -> T.Optional[str]:
|
|
70
|
-
|
|
106
|
+
# Use an empty dictionary to force video parsers to extract make/model from the video metadata itself
|
|
107
|
+
parser = GenericVideoParser(self.videoPath, self.options, {})
|
|
71
108
|
return parser.extract_model()
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
mapillary_tools/__init__.py,sha256=
|
|
2
|
-
mapillary_tools/api_v4.py,sha256=
|
|
1
|
+
mapillary_tools/__init__.py,sha256=tOdTWrFVTLaGb-_qr6fgRv_nOA5NidkflFCzgUk9sdQ,19
|
|
2
|
+
mapillary_tools/api_v4.py,sha256=bbu2VuzkUysLOBcjyGD_qQqnS1zsYgYDJAqBhQEVESw,7786
|
|
3
3
|
mapillary_tools/authenticate.py,sha256=LCFcs6LqZmXaYkTUEKgGfmqytWdh5v_L3KXB48ojOZ4,3090
|
|
4
4
|
mapillary_tools/config.py,sha256=jCjaK4jJaTY4AV4qf_b_tcxn5LA_uPsEWlGIdm2zw6g,2103
|
|
5
|
-
mapillary_tools/constants.py,sha256=
|
|
5
|
+
mapillary_tools/constants.py,sha256=KKQMwzpN2z7wka6lhZv-agdxNldaEQqvQXu3Kg5XuUk,2453
|
|
6
6
|
mapillary_tools/exceptions.py,sha256=Mh1tgVEFTSMnYEzrl9x7b95fW9Z3SPVD_YMEl7r8I0I,2693
|
|
7
7
|
mapillary_tools/exif_read.py,sha256=F60A0-T8XSBHvFKgVIrUz_ZWKQrTFWrtj3c6siB0IMg,28707
|
|
8
8
|
mapillary_tools/exif_write.py,sha256=3PawLnBOY8Z86TYiA_F4LxRhe5Ui6CTNhxYm9yeJNX8,8786
|
|
@@ -17,17 +17,17 @@ mapillary_tools/process_sequence_properties.py,sha256=5oYEjz9crnLVQtCkxbwn57Tkeu
|
|
|
17
17
|
mapillary_tools/sample_video.py,sha256=dpdX7bUNEmcrz-3gh3Y3awnTDX66pChbTKuF8qGfeCI,14400
|
|
18
18
|
mapillary_tools/telemetry.py,sha256=WpBGPF_GMPjM_EFqXIutFtpDFL9wj7yEzGNGnfQZUo8,1255
|
|
19
19
|
mapillary_tools/types.py,sha256=6kww2UdKM6YzabYbc862BYzEWtxL2hhxCRFfeDiUtF0,22074
|
|
20
|
-
mapillary_tools/upload.py,sha256=
|
|
21
|
-
mapillary_tools/upload_api_v4.py,sha256=
|
|
22
|
-
mapillary_tools/uploader.py,sha256=
|
|
20
|
+
mapillary_tools/upload.py,sha256=C8sWMCYOVzjXpvMsEcx6dpZ47ezKaKV61yFwZs286oo,23639
|
|
21
|
+
mapillary_tools/upload_api_v4.py,sha256=qN6yRpi-qXycRLgIOJqixjXqkYvgt-LJX7NXbQKXZaA,7502
|
|
22
|
+
mapillary_tools/uploader.py,sha256=DBHso4QIP5nsZFDynLjkUvytzwpEOPnOlfeDyA6rTBk,14007
|
|
23
23
|
mapillary_tools/utils.py,sha256=VNtK1tAb3Hh8y3P5e5Y3iewREkIoLDa3C2myRYcF2lY,5970
|
|
24
24
|
mapillary_tools/camm/camm_builder.py,sha256=TXZfhu3xGjtrLEWnB14D7aSOrHOoSJef24YSLApiIfY,10631
|
|
25
25
|
mapillary_tools/camm/camm_parser.py,sha256=RaCWeLvS_AyHD6B6wDUu9DAsdfByVHMAPTqEqjtFibE,9734
|
|
26
26
|
mapillary_tools/commands/__init__.py,sha256=41CFrPLGlG3566uhxssEF3TGAtSpADFPPcDMHbViU0E,171
|
|
27
|
-
mapillary_tools/commands/__main__.py,sha256=
|
|
27
|
+
mapillary_tools/commands/__main__.py,sha256=iDehfT38k4D283BIqy8TDD4K_q42jNhkrUNzWEXuZP4,4974
|
|
28
28
|
mapillary_tools/commands/authenticate.py,sha256=4aVvAQal_mqtm2NEMBt5aKLahi0iRdO8b7WSBf6jokA,1136
|
|
29
29
|
mapillary_tools/commands/process.py,sha256=VxcvQpYHPw7QfT9dNwBLV1jWQ-1w4GtVNVPpmu4Sx9s,10578
|
|
30
|
-
mapillary_tools/commands/process_and_upload.py,sha256=
|
|
30
|
+
mapillary_tools/commands/process_and_upload.py,sha256=cbYr6g4sDxZ3A9jLr-GZdHZObJ8ZKS43jAdZgvqAkWk,655
|
|
31
31
|
mapillary_tools/commands/sample_video.py,sha256=bTJmlDsajkC-QJ_ZO_scdD4R664zs-r_dh-x2PlOgyY,3281
|
|
32
32
|
mapillary_tools/commands/upload.py,sha256=JIWgxupV3ppLvPi1iE7UVaE1302JGcIOvnuNt1Y7YEw,1671
|
|
33
33
|
mapillary_tools/commands/video_process.py,sha256=-wQeeIwWXPmy81HQHam5A0huMLRHknkEFa_V1OwElU4,890
|
|
@@ -40,7 +40,7 @@ mapillary_tools/geotag/geotag_images_from_exif.py,sha256=hCgBwZABk2tbBQC3cHQBV5p
|
|
|
40
40
|
mapillary_tools/geotag/geotag_images_from_exiftool.py,sha256=a-c4H8VIyPdJkfUIvJho0phR0QU0zN8-lSyiCz0wc4s,3981
|
|
41
41
|
mapillary_tools/geotag/geotag_images_from_exiftool_both_image_and_video.py,sha256=nRVAjgTJwx_eCaSBpPCgcIaZs3EYgGueYxSS9XhKv40,3350
|
|
42
42
|
mapillary_tools/geotag/geotag_images_from_gpx.py,sha256=S9Pw6FvP5kRSpHUnKUYKXmw0CHa9V92UmrS_MJfbjS4,9053
|
|
43
|
-
mapillary_tools/geotag/geotag_images_from_gpx_file.py,sha256
|
|
43
|
+
mapillary_tools/geotag/geotag_images_from_gpx_file.py,sha256=-vTbZ1HufZzJCd8VvukdTjsJRcymtfld2W5t65VSG5E,5300
|
|
44
44
|
mapillary_tools/geotag/geotag_images_from_nmea_file.py,sha256=dDdHnJInQ_WN3ZRf-w44NSBElDLPs7XYBiimvE2iCNo,1651
|
|
45
45
|
mapillary_tools/geotag/geotag_images_from_video.py,sha256=XsaWOFChGItl-j1UbKM4hNjUqN29pVNbMpGT_BvI-o8,3306
|
|
46
46
|
mapillary_tools/geotag/geotag_videos_from_exiftool_video.py,sha256=fkkWou1WFt3ft024399vis9No2cxrwot7Pg5HBw7o7s,5225
|
|
@@ -60,16 +60,16 @@ mapillary_tools/video_data_extraction/extract_video_data.py,sha256=_2BBdSYeYKR4B
|
|
|
60
60
|
mapillary_tools/video_data_extraction/video_data_parser_factory.py,sha256=qaJHvLgwI5lukJncMd8ggxeSxXOiVzBSJO5GlGQYiXY,1134
|
|
61
61
|
mapillary_tools/video_data_extraction/extractors/base_parser.py,sha256=s7Xuwg4I5JZ27oL4ebMSdo093plAXfZ-6uDQ_h97WHY,2134
|
|
62
62
|
mapillary_tools/video_data_extraction/extractors/blackvue_parser.py,sha256=jAcGyF6PML2EdJ4zle8cR12QeTRZc5qxlz8_4gcTZPU,1089
|
|
63
|
-
mapillary_tools/video_data_extraction/extractors/camm_parser.py,sha256=
|
|
63
|
+
mapillary_tools/video_data_extraction/extractors/camm_parser.py,sha256=YMiViocXSVlfn8_qm1jcwSJhnnEaK8v5ADHwo2YXe10,1117
|
|
64
64
|
mapillary_tools/video_data_extraction/extractors/exiftool_runtime_parser.py,sha256=PFNCRk9pGrPIfVwLMcnzmVNMITVjNHhbrOOMwxaSstg,2270
|
|
65
65
|
mapillary_tools/video_data_extraction/extractors/exiftool_xml_parser.py,sha256=Tt0h4TiCKocERWMlRXzlpoaA_WJ_4b20MgMLGYNl4AM,1734
|
|
66
66
|
mapillary_tools/video_data_extraction/extractors/generic_video_parser.py,sha256=34O6Km5kNDoJNJtIUOwtAzzMntuqkSZJfeli7caWSkA,1693
|
|
67
67
|
mapillary_tools/video_data_extraction/extractors/gopro_parser.py,sha256=IVnTyquSraTUaG9rxbJfVWc1-drdY5PaHn5urh3IBk4,1325
|
|
68
|
-
mapillary_tools/video_data_extraction/extractors/gpx_parser.py,sha256=
|
|
68
|
+
mapillary_tools/video_data_extraction/extractors/gpx_parser.py,sha256=FNrdnXl48k8I1I5fGwYsClhfFEHVsooRLRboUYECv3I,3811
|
|
69
69
|
mapillary_tools/video_data_extraction/extractors/nmea_parser.py,sha256=raSXavBvP-0LJCB_TwLL0mOv2uHSsB744igTsaKAaGc,658
|
|
70
|
-
mapillary_tools-0.13.
|
|
71
|
-
mapillary_tools-0.13.
|
|
72
|
-
mapillary_tools-0.13.
|
|
73
|
-
mapillary_tools-0.13.
|
|
74
|
-
mapillary_tools-0.13.
|
|
75
|
-
mapillary_tools-0.13.
|
|
70
|
+
mapillary_tools-0.13.3.dist-info/LICENSE,sha256=l2D8cKfFmmJq_wcVq_JElPJrlvWQOzNWx7gMLINucxc,1292
|
|
71
|
+
mapillary_tools-0.13.3.dist-info/METADATA,sha256=qJo2HTakj1kI0l5wl_CMb8pcRP68REFJkO1oJVPdtu4,19758
|
|
72
|
+
mapillary_tools-0.13.3.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
|
|
73
|
+
mapillary_tools-0.13.3.dist-info/entry_points.txt,sha256=A3f3LP-BO_P-U8Y29QfpT4jx6Mjk3sXjTi2Yew4bvj8,75
|
|
74
|
+
mapillary_tools-0.13.3.dist-info/top_level.txt,sha256=FbDkMgOrt1S70ho1WSBrOwzKOSkJFDwwqFOoY5-527s,16
|
|
75
|
+
mapillary_tools-0.13.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|