mapillary-tools 0.14.2__py3-none-any.whl → 0.14.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1 +1 @@
1
- VERSION = "0.14.2"
1
+ VERSION = "0.14.3"
@@ -162,6 +162,11 @@ class PersistentCache:
162
162
 
163
163
  return expired_keys
164
164
 
165
+ def keys(self):
166
+ with self._lock:
167
+ with dbm.open(self._file, flag="c") as db:
168
+ return db.keys()
169
+
165
170
  def _is_expired(self, payload: JSONDict) -> bool:
166
171
  expires_at = payload.get("expires_at")
167
172
  if isinstance(expires_at, (int, float)):
@@ -2,6 +2,9 @@ from __future__ import annotations
2
2
 
3
3
  import concurrent.futures
4
4
  import dataclasses
5
+ import datetime
6
+ import email.utils
7
+ import hashlib
5
8
  import io
6
9
  import json
7
10
  import logging
@@ -56,6 +59,9 @@ class UploadOptions:
56
59
  user_items: config.UserItem
57
60
  chunk_size: int = int(constants.UPLOAD_CHUNK_SIZE_MB * 1024 * 1024)
58
61
  num_upload_workers: int = constants.MAX_IMAGE_UPLOAD_WORKERS
62
+ # When set, upload cache will be read/write there
63
+ # This option is exposed for testing purpose. In PROD, the path is calculated based on envvar and user_items
64
+ upload_cache_path: Path | None = None
59
65
  dry_run: bool = False
60
66
  nofinish: bool = False
61
67
  noresume: bool = False
@@ -471,7 +477,7 @@ class ZipUploader:
471
477
  # Arcname should be unique, the name does not matter
472
478
  arcname = f"{idx}.jpg"
473
479
  zipinfo = zipfile.ZipInfo(arcname, date_time=(1980, 1, 1, 0, 0, 0))
474
- zipf.writestr(zipinfo, SingleImageUploader.dump_image_bytes(metadata))
480
+ zipf.writestr(zipinfo, CachedImageUploader.dump_image_bytes(metadata))
475
481
  assert len(sequence) == len(set(zipf.namelist()))
476
482
  zipf.comment = json.dumps(
477
483
  {"sequence_md5sum": sequence_md5sum},
@@ -537,6 +543,13 @@ class ImageSequenceUploader:
537
543
  def __init__(self, upload_options: UploadOptions, emitter: EventEmitter):
538
544
  self.upload_options = upload_options
539
545
  self.emitter = emitter
546
+ # Create a single shared SingleImageUploader instance that will be used across all uploads
547
+ cache = _maybe_create_persistent_cache_instance(self.upload_options)
548
+ if cache:
549
+ cache.clear_expired()
550
+ self.cached_image_uploader = CachedImageUploader(
551
+ self.upload_options, cache=cache
552
+ )
540
553
 
541
554
  def upload_images(
542
555
  self, image_metadatas: T.Sequence[types.ImageMetadata]
@@ -688,10 +701,6 @@ class ImageSequenceUploader:
688
701
  with api_v4.create_user_session(
689
702
  self.upload_options.user_items["user_upload_token"]
690
703
  ) as user_session:
691
- single_image_uploader = SingleImageUploader(
692
- self.upload_options, user_session=user_session
693
- )
694
-
695
704
  while True:
696
705
  # Assert that all images are already pushed into the queue
697
706
  try:
@@ -710,8 +719,8 @@ class ImageSequenceUploader:
710
719
  }
711
720
 
712
721
  # image_progress will be updated during uploading
713
- file_handle = single_image_uploader.upload(
714
- image_metadata, image_progress
722
+ file_handle = self.cached_image_uploader.upload(
723
+ user_session, image_metadata, image_progress
715
724
  )
716
725
 
717
726
  # Update chunk_size (it was constant if set)
@@ -731,24 +740,27 @@ class ImageSequenceUploader:
731
740
  return indexed_file_handles
732
741
 
733
742
 
734
- class SingleImageUploader:
743
+ class CachedImageUploader:
735
744
  def __init__(
736
745
  self,
737
746
  upload_options: UploadOptions,
738
- user_session: requests.Session | None = None,
747
+ cache: history.PersistentCache | None = None,
739
748
  ):
740
749
  self.upload_options = upload_options
741
- self.user_session = user_session
742
- self.cache = self._maybe_create_persistent_cache_instance(
743
- self.upload_options.user_items, upload_options
744
- )
750
+ self.cache = cache
751
+ if self.cache:
752
+ self.cache.clear_expired()
745
753
 
754
+ # Thread-safe
746
755
  def upload(
747
- self, image_metadata: types.ImageMetadata, image_progress: dict[str, T.Any]
756
+ self,
757
+ user_session: requests.Session,
758
+ image_metadata: types.ImageMetadata,
759
+ image_progress: dict[str, T.Any],
748
760
  ) -> str:
749
761
  image_bytes = self.dump_image_bytes(image_metadata)
750
762
 
751
- uploader = Uploader(self.upload_options, user_session=self.user_session)
763
+ uploader = Uploader(self.upload_options, user_session=user_session)
752
764
 
753
765
  session_key = uploader._gen_session_key(io.BytesIO(image_bytes), image_progress)
754
766
 
@@ -786,51 +798,7 @@ class SingleImageUploader:
786
798
  f"Failed to dump EXIF bytes: {ex}", metadata.filename
787
799
  ) from ex
788
800
 
789
- @classmethod
790
- def _maybe_create_persistent_cache_instance(
791
- cls, user_items: config.UserItem, upload_options: UploadOptions
792
- ) -> history.PersistentCache | None:
793
- if not constants.UPLOAD_CACHE_DIR:
794
- LOG.debug(
795
- "Upload cache directory is set empty, skipping caching upload file handles"
796
- )
797
- return None
798
-
799
- if upload_options.dry_run:
800
- LOG.debug("Dry-run mode enabled, skipping caching upload file handles")
801
- return None
802
-
803
- # Different python/CLI versions use different cache (dbm) formats.
804
- # Separate them to avoid conflicts
805
- py_version_parts = [str(part) for part in sys.version_info[:3]]
806
- version = f"py_{'_'.join(py_version_parts)}_{VERSION}"
807
-
808
- cache_path_dir = (
809
- Path(constants.UPLOAD_CACHE_DIR)
810
- .joinpath(version)
811
- .joinpath(api_v4.MAPILLARY_CLIENT_TOKEN.replace("|", "_"))
812
- .joinpath(
813
- user_items.get("MAPSettingsUserKey", user_items["user_upload_token"])
814
- )
815
- )
816
- cache_path_dir.mkdir(parents=True, exist_ok=True)
817
- cache_path = cache_path_dir.joinpath("cached_file_handles")
818
-
819
- # Sanitize sensitive segments for logging
820
- sanitized_cache_path = (
821
- Path(constants.UPLOAD_CACHE_DIR)
822
- .joinpath(version)
823
- .joinpath("***")
824
- .joinpath("***")
825
- .joinpath("cached_file_handles")
826
- )
827
- LOG.debug(f"File handle cache path: {sanitized_cache_path}")
828
-
829
- cache = history.PersistentCache(str(cache_path.resolve()))
830
- cache.clear_expired()
831
-
832
- return cache
833
-
801
+ # Thread-safe
834
802
  def _get_cached_file_handle(self, key: str) -> str | None:
835
803
  if self.cache is None:
836
804
  return None
@@ -840,6 +808,7 @@ class SingleImageUploader:
840
808
 
841
809
  return self.cache.get(key)
842
810
 
811
+ # Thread-safe
843
812
  def _set_file_handle_cache(self, key: str, value: str) -> None:
844
813
  if self.cache is None:
845
814
  return
@@ -979,27 +948,33 @@ class Uploader:
979
948
  begin_offset = progress.get("begin_offset")
980
949
  offset = progress.get("offset")
981
950
 
982
- if retries <= constants.MAX_UPLOAD_RETRIES and _is_retriable_exception(ex):
983
- self.emitter.emit("upload_retrying", progress)
951
+ LOG.warning(
952
+ f"Error uploading {self._upload_name(progress)} at {offset=} since {begin_offset=}: {ex.__class__.__name__}: {ex}"
953
+ )
984
954
 
985
- LOG.warning(
986
- f"Error uploading {self._upload_name(progress)} at {offset=} since {begin_offset=}: {ex.__class__.__name__}: {ex}"
987
- )
955
+ if retries <= constants.MAX_UPLOAD_RETRIES:
956
+ retriable, retry_after_sec = _is_retriable_exception(ex)
957
+ if retriable:
958
+ self.emitter.emit("upload_retrying", progress)
988
959
 
989
- # Keep things immutable here. Will increment retries in the caller
990
- retries += 1
991
- if _is_immediate_retriable_exception(ex):
992
- sleep_for = 0
993
- else:
994
- sleep_for = min(2**retries, 16)
995
- LOG.info(
996
- f"Retrying in {sleep_for} seconds ({retries}/{constants.MAX_UPLOAD_RETRIES})"
997
- )
998
- if sleep_for:
999
- time.sleep(sleep_for)
1000
- else:
1001
- self.emitter.emit("upload_failed", progress)
1002
- raise ex
960
+ # Keep things immutable here. Will increment retries in the caller
961
+ retries += 1
962
+ if _is_immediate_retriable_exception(ex):
963
+ sleep_for = 0
964
+ else:
965
+ sleep_for = min(2**retries, 16)
966
+ sleep_for += retry_after_sec
967
+
968
+ LOG.info(
969
+ f"Retrying in {sleep_for} seconds ({retries}/{constants.MAX_UPLOAD_RETRIES})"
970
+ )
971
+ if sleep_for:
972
+ time.sleep(sleep_for)
973
+
974
+ return
975
+
976
+ self.emitter.emit("upload_failed", progress)
977
+ raise ex
1003
978
 
1004
979
  @classmethod
1005
980
  def _upload_name(cls, progress: UploaderProgress):
@@ -1116,23 +1091,188 @@ def _is_immediate_retriable_exception(ex: BaseException) -> bool:
1116
1091
  return False
1117
1092
 
1118
1093
 
1119
- def _is_retriable_exception(ex: BaseException) -> bool:
1094
+ def _is_retriable_exception(ex: BaseException) -> tuple[bool, int]:
1095
+ """
1096
+ Determine if an exception should be retried and how long to wait.
1097
+
1098
+ Args:
1099
+ ex: Exception to check for retryability
1100
+
1101
+ Returns:
1102
+ Tuple of (retriable, retry_after_sec) where:
1103
+ - retriable: True if the exception should be retried
1104
+ - retry_after_sec: Seconds to wait before retry (>= 0)
1105
+
1106
+ Examples:
1107
+ >>> resp = requests.Response()
1108
+ >>> resp._content = b"foo"
1109
+ >>> resp.status_code = 400
1110
+ >>> ex = requests.HTTPError("error", response=resp)
1111
+ >>> _is_retriable_exception(ex)
1112
+ (False, 0)
1113
+ >>> resp._content = b'{"backoff": 13000, "debug_info": {"retriable": false, "type": "RequestRateLimitedError", "message": "Request rate limit has been exceeded"}}'
1114
+ >>> resp.status_code = 400
1115
+ >>> ex = requests.HTTPError("error", response=resp)
1116
+ >>> _is_retriable_exception(ex)
1117
+ (True, 13)
1118
+ >>> resp._content = b'{"backoff": "foo", "debug_info": {"retriable": false, "type": "RequestRateLimitedError", "message": "Request rate limit has been exceeded"}}'
1119
+ >>> resp.status_code = 400
1120
+ >>> ex = requests.HTTPError("error", response=resp)
1121
+ >>> _is_retriable_exception(ex)
1122
+ (True, 10)
1123
+ >>> resp._content = b'{"debug_info": {"retriable": false, "type": "RequestRateLimitedError", "message": "Request rate limit has been exceeded"}}'
1124
+ >>> resp.status_code = 400
1125
+ >>> ex = requests.HTTPError("error", response=resp)
1126
+ >>> _is_retriable_exception(ex)
1127
+ (True, 10)
1128
+ >>> resp._content = b"foo"
1129
+ >>> resp.status_code = 429
1130
+ >>> ex = requests.HTTPError("error", response=resp)
1131
+ >>> _is_retriable_exception(ex)
1132
+ (True, 10)
1133
+ >>> resp._content = b"foo"
1134
+ >>> resp.status_code = 429
1135
+ >>> ex = requests.HTTPError("error", response=resp)
1136
+ >>> _is_retriable_exception(ex)
1137
+ (True, 10)
1138
+ >>> resp._content = b'{"backoff": 12000, "debug_info": {"retriable": false, "type": "RequestRateLimitedError", "message": "Request rate limit has been exceeded"}}'
1139
+ >>> resp.status_code = 429
1140
+ >>> ex = requests.HTTPError("error", response=resp)
1141
+ >>> _is_retriable_exception(ex)
1142
+ (True, 12)
1143
+ >>> resp._content = b'{"backoff": 12000, "debug_info": {"retriable": false, "type": "RequestRateLimitedError", "message": "Request rate limit has been exceeded"}}'
1144
+ >>> resp.headers = {"Retry-After": "1"}
1145
+ >>> resp.status_code = 503
1146
+ >>> ex = requests.HTTPError("error", response=resp)
1147
+ >>> _is_retriable_exception(ex)
1148
+ (True, 1)
1149
+ """
1150
+
1151
+ DEFAULT_RETRY_AFTER_RATE_LIMIT_SEC = 10
1152
+
1120
1153
  if isinstance(ex, (requests.ConnectionError, requests.Timeout)):
1121
- return True
1154
+ return True, 0
1122
1155
 
1123
1156
  if isinstance(ex, requests.HTTPError) and isinstance(
1124
1157
  ex.response, requests.Response
1125
1158
  ):
1126
- if 400 <= ex.response.status_code < 500:
1159
+ status_code = ex.response.status_code
1160
+
1161
+ # Always retry with some delay
1162
+ if status_code == 429:
1163
+ retry_after_sec = (
1164
+ _parse_retry_after_from_header(ex.response)
1165
+ or DEFAULT_RETRY_AFTER_RATE_LIMIT_SEC
1166
+ )
1167
+
1127
1168
  try:
1128
- resp = ex.response.json()
1129
- except json.JSONDecodeError:
1130
- return False
1131
- return resp.get("debug_info", {}).get("retriable", False)
1132
- else:
1133
- return True
1169
+ data = ex.response.json()
1170
+ except requests.JSONDecodeError:
1171
+ return True, retry_after_sec
1134
1172
 
1135
- return False
1173
+ backoff_ms = _parse_backoff(data.get("backoff"))
1174
+ if backoff_ms is None:
1175
+ return True, retry_after_sec
1176
+ else:
1177
+ return True, max(0, int(int(backoff_ms) / 1000))
1178
+
1179
+ if 400 <= status_code < 500:
1180
+ try:
1181
+ data = ex.response.json()
1182
+ except requests.JSONDecodeError:
1183
+ return False, (_parse_retry_after_from_header(ex.response) or 0)
1184
+
1185
+ debug_info = data.get("debug_info", {})
1186
+
1187
+ if isinstance(debug_info, dict):
1188
+ error_type = debug_info.get("type")
1189
+ else:
1190
+ error_type = None
1191
+
1192
+ # The server may respond 429 RequestRateLimitedError but with retryable=False
1193
+ # We should retry for this case regardless
1194
+ # e.g. HTTP 429 {"backoff": 10000, "debug_info": {"retriable": false, "type": "RequestRateLimitedError", "message": "Request rate limit has been exceeded"}}
1195
+ if error_type == "RequestRateLimitedError":
1196
+ backoff_ms = _parse_backoff(data.get("backoff"))
1197
+ if backoff_ms is None:
1198
+ return True, (
1199
+ _parse_retry_after_from_header(ex.response)
1200
+ or DEFAULT_RETRY_AFTER_RATE_LIMIT_SEC
1201
+ )
1202
+ else:
1203
+ return True, max(0, int(int(backoff_ms) / 1000))
1204
+
1205
+ return debug_info.get("retriable", False), 0
1206
+
1207
+ if 500 <= status_code < 600:
1208
+ return True, (_parse_retry_after_from_header(ex.response) or 0)
1209
+
1210
+ return False, 0
1211
+
1212
+
1213
+ def _parse_backoff(backoff: T.Any) -> int | None:
1214
+ if backoff is not None:
1215
+ try:
1216
+ backoff_ms = int(backoff)
1217
+ except (ValueError, TypeError):
1218
+ backoff_ms = None
1219
+ else:
1220
+ backoff_ms = None
1221
+ return backoff_ms
1222
+
1223
+
1224
+ def _parse_retry_after_from_header(resp: requests.Response) -> int | None:
1225
+ """
1226
+ Parse Retry-After header from HTTP response.
1227
+ See See https://developer.mozilla.org/en-US/docs/Web/HTTP/Reference/Headers/Retry-After
1228
+
1229
+ Args:
1230
+ resp: HTTP response object with headers
1231
+
1232
+ Returns:
1233
+ Number of seconds to wait (>= 0) or None if header missing/invalid.
1234
+
1235
+ Examples:
1236
+ >>> resp = requests.Response()
1237
+ >>> resp.headers = {"Retry-After": "1"}
1238
+ >>> _parse_retry_after_from_header(resp)
1239
+ 1
1240
+ >>> resp.headers = {"Retry-After": "-1"}
1241
+ >>> _parse_retry_after_from_header(resp)
1242
+ 0
1243
+ >>> resp.headers = {"Retry-After": "Wed, 21 Oct 2015 07:28:00 GMT"}
1244
+ >>> _parse_retry_after_from_header(resp)
1245
+ 0
1246
+ >>> resp.headers = {"Retry-After": "Wed, 21 Oct 2315 07:28:00"}
1247
+ >>> _parse_retry_after_from_header(resp)
1248
+ """
1249
+
1250
+ value = resp.headers.get("Retry-After")
1251
+ if value is None:
1252
+ return None
1253
+
1254
+ try:
1255
+ return max(0, int(value))
1256
+ except (ValueError, TypeError):
1257
+ pass
1258
+
1259
+ # e.g. "Wed, 21 Oct 2015 07:28:00 GMT"
1260
+ try:
1261
+ dt = email.utils.parsedate_to_datetime(value)
1262
+ except (ValueError, TypeError):
1263
+ dt = None
1264
+
1265
+ if dt is None:
1266
+ LOG.warning(f"Error parsing Retry-After: {value}")
1267
+ return None
1268
+
1269
+ try:
1270
+ delta = dt - datetime.datetime.now(datetime.timezone.utc)
1271
+ except (TypeError, ValueError):
1272
+ # e.g. TypeError: can't subtract offset-naive and offset-aware datetimes
1273
+ return None
1274
+
1275
+ return max(0, int(delta.total_seconds()))
1136
1276
 
1137
1277
 
1138
1278
  _SUFFIX_MAP: dict[api_v4.ClusterFileType | types.FileType, str] = {
@@ -1168,3 +1308,57 @@ def _prefixed_uuid4():
1168
1308
 
1169
1309
  def _is_uuid(key: str) -> bool:
1170
1310
  return key.startswith("uuid_") or key.startswith("mly_tools_uuid_")
1311
+
1312
+
1313
+ def _build_upload_cache_path(upload_options: UploadOptions) -> Path:
1314
+ # Different python/CLI versions use different cache (dbm) formats.
1315
+ # Separate them to avoid conflicts
1316
+ py_version_parts = [str(part) for part in sys.version_info[:3]]
1317
+ version = f"py_{'_'.join(py_version_parts)}_{VERSION}"
1318
+ # File handles are not sharable between different users
1319
+ user_id = str(
1320
+ upload_options.user_items.get(
1321
+ "MAPSettingsUserKey", upload_options.user_items["user_upload_token"]
1322
+ )
1323
+ )
1324
+ # Use hash to avoid log sensitive data
1325
+ user_fingerprint = utils.md5sum_fp(
1326
+ io.BytesIO((api_v4.MAPILLARY_CLIENT_TOKEN + user_id).encode("utf-8")),
1327
+ md5=hashlib.sha256(),
1328
+ ).hexdigest()[:24]
1329
+
1330
+ cache_path = (
1331
+ Path(constants.UPLOAD_CACHE_DIR)
1332
+ .joinpath(version)
1333
+ .joinpath(user_fingerprint)
1334
+ .joinpath("cached_file_handles")
1335
+ )
1336
+
1337
+ return cache_path
1338
+
1339
+
1340
+ def _maybe_create_persistent_cache_instance(
1341
+ upload_options: UploadOptions,
1342
+ ) -> history.PersistentCache | None:
1343
+ """Create a persistent cache instance if caching is enabled."""
1344
+
1345
+ if upload_options.dry_run:
1346
+ LOG.debug("Dry-run mode enabled, skipping caching upload file handles")
1347
+ return None
1348
+
1349
+ if upload_options.upload_cache_path is None:
1350
+ if not constants.UPLOAD_CACHE_DIR:
1351
+ LOG.debug(
1352
+ "Upload cache directory is set empty, skipping caching upload file handles"
1353
+ )
1354
+ return None
1355
+
1356
+ cache_path = _build_upload_cache_path(upload_options)
1357
+ else:
1358
+ cache_path = upload_options.upload_cache_path
1359
+
1360
+ LOG.debug(f"File handle cache path: {cache_path}")
1361
+
1362
+ cache_path.parent.mkdir(parents=True, exist_ok=True)
1363
+
1364
+ return history.PersistentCache(str(cache_path.resolve()))
mapillary_tools/utils.py CHANGED
@@ -247,8 +247,8 @@ def configure_logger(
247
247
  try:
248
248
  # Disable globally for now. TODO Disable it in non-interactive mode only
249
249
  raise ImportError
250
- from rich.console import Console # type: ignore
251
- from rich.logging import RichHandler # type: ignore
250
+ from rich.console import Console # type: ignore[import]
251
+ from rich.logging import RichHandler # type: ignore[import]
252
252
  except ImportError:
253
253
  formatter = logging.Formatter(
254
254
  "%(asctime)s.%(msecs)03d - %(levelname)-7s - %(message)s",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mapillary_tools
3
- Version: 0.14.2
3
+ Version: 0.14.3
4
4
  Summary: Mapillary Image/Video Import Pipeline
5
5
  Author-email: Mapillary <support@mapillary.com>
6
6
  License: BSD
@@ -1,4 +1,4 @@
1
- mapillary_tools/__init__.py,sha256=qdi1NvyXrEYAkFGfNSU2jMM-Ua-ehuWizcwvW341obw,19
1
+ mapillary_tools/__init__.py,sha256=-BdvXvwpHU687pG7vjnbfxs01J_K3Vy-_CnU53ctcY0,19
2
2
  mapillary_tools/api_v4.py,sha256=bckAU_atUs0pSuqySeY4W0Rs011a21ClJHo_mbbcXXw,4864
3
3
  mapillary_tools/authenticate.py,sha256=mmaOwjQ444DcX4lRw2ms3naBg5Y_xwIJAIWeVdsQfqM,11742
4
4
  mapillary_tools/blackvue_parser.py,sha256=ea2JtU9MWU6yB0bQlF970_Of0bJVofSTRq1P30WKW-0,5623
@@ -12,7 +12,7 @@ mapillary_tools/exiftool_read_video.py,sha256=23O_bjUOVq6j7i3xMz6fY-XIEsjinsCejK
12
12
  mapillary_tools/exiftool_runner.py,sha256=g4gSyqeh3D6EnMJ-c3s-RnO2EP_jD354Qkaz0Y-4D04,1658
13
13
  mapillary_tools/ffmpeg.py,sha256=akpvvsjAR-Iiv-hOrUoJvPM9vUU3JqMQ5HJL1_NgwB8,22908
14
14
  mapillary_tools/geo.py,sha256=mWaESfDf_zHmyvnt5aVFro4FGrjiULNsuZ6HfGUWvSA,11009
15
- mapillary_tools/history.py,sha256=LP6e0zEYVBwRGUbFaGoE_AaBIEdpB4XrZsg9qwJVvRI,5344
15
+ mapillary_tools/history.py,sha256=zyXYXB8pO9Buffn-8-Ien4s74hGD7fyPr2QpBeZwEWw,5478
16
16
  mapillary_tools/http.py,sha256=-df_oGyImO2AOmPnXcKMcztlL4LOZLArE6ki81NMGUA,6411
17
17
  mapillary_tools/ipc.py,sha256=DwWQb9hNshx0bg0Fo5NjY0mXjs-FkbR6tIQmjMgMtmg,1089
18
18
  mapillary_tools/process_geotag_properties.py,sha256=3EaVvjfKB-O38OjopBcxeEdP6qI5IPIxqmO6isjcXKM,14205
@@ -22,8 +22,8 @@ mapillary_tools/telemetry.py,sha256=lL6qQbtOZft4DZZrCNK3njlwHT_30zLyYS_YRN5pgHY,
22
22
  mapillary_tools/types.py,sha256=pIU2wcxiOUWT5Pd05pgNzY9EVEDlwoldtlF2IIYYvE0,5909
23
23
  mapillary_tools/upload.py,sha256=XejAgmVW4Y33MiQ2g-shvHZA_zXTekEsOUHUHNx2AE4,24047
24
24
  mapillary_tools/upload_api_v4.py,sha256=VgOf7RhfUuzmlSBUp5CpekKIJ0xQrC0r-r0Ds9-wU4I,7344
25
- mapillary_tools/uploader.py,sha256=Rw-1AkxE4TnddJNU6EW--9wmKYRqHbcTeheujdaluiM,39813
26
- mapillary_tools/utils.py,sha256=cP9idKt4EJqfC0qqOGneSoPNpPiYhaW8VjQ9CLYjESc,8092
25
+ mapillary_tools/uploader.py,sha256=4bd2YGIAJOK5Jx3ZLIzkLAAfBtU2F708_lTtatJvVas,46642
26
+ mapillary_tools/utils.py,sha256=HjTZ01GQv_UNGySaTZ_Mc1Gn_Y0x3knQf7Vh17whDFw,8108
27
27
  mapillary_tools/camm/camm_builder.py,sha256=ub6Z9ijep8zAo1NOlU51Gxk95kQ2vfN58YgVCLmNMRk,9211
28
28
  mapillary_tools/camm/camm_parser.py,sha256=aNHP65hNXYQBWBTfhaj_S5XYzmAHhjwcAfGhbm83__o,18043
29
29
  mapillary_tools/commands/__init__.py,sha256=41CFrPLGlG3566uhxssEF3TGAtSpADFPPcDMHbViU0E,171
@@ -68,9 +68,9 @@ mapillary_tools/mp4/simple_mp4_builder.py,sha256=9TUGk1hzI6mQFN1P30jwHL3dCYz3Zz7
68
68
  mapillary_tools/mp4/simple_mp4_parser.py,sha256=g3vvPhBoNu7anhVzC5_XQCV7IwfRWro1vJ6d6GyDkHE,6315
69
69
  mapillary_tools/serializer/description.py,sha256=ECnQxC-1LOgkAKE5qFi9Y2KuCeH8KPUjjNFDiwebjvo,18647
70
70
  mapillary_tools/serializer/gpx.py,sha256=_xx6gHjaWHrlXaUpB5GGBrbRKzbExFyIzWWAH-CvksI,4383
71
- mapillary_tools-0.14.2.dist-info/licenses/LICENSE,sha256=l2D8cKfFmmJq_wcVq_JElPJrlvWQOzNWx7gMLINucxc,1292
72
- mapillary_tools-0.14.2.dist-info/METADATA,sha256=rEmF5Twbh9m-zYNF1klQjGLOIrmiv5mCAQnbOb1Z8GE,22200
73
- mapillary_tools-0.14.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
- mapillary_tools-0.14.2.dist-info/entry_points.txt,sha256=A3f3LP-BO_P-U8Y29QfpT4jx6Mjk3sXjTi2Yew4bvj8,75
75
- mapillary_tools-0.14.2.dist-info/top_level.txt,sha256=FbDkMgOrt1S70ho1WSBrOwzKOSkJFDwwqFOoY5-527s,16
76
- mapillary_tools-0.14.2.dist-info/RECORD,,
71
+ mapillary_tools-0.14.3.dist-info/licenses/LICENSE,sha256=l2D8cKfFmmJq_wcVq_JElPJrlvWQOzNWx7gMLINucxc,1292
72
+ mapillary_tools-0.14.3.dist-info/METADATA,sha256=f-tqdyREvL0ZXxfm_Mao2KdWkLsWhHzglP6S6SYMjTU,22200
73
+ mapillary_tools-0.14.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
74
+ mapillary_tools-0.14.3.dist-info/entry_points.txt,sha256=A3f3LP-BO_P-U8Y29QfpT4jx6Mjk3sXjTi2Yew4bvj8,75
75
+ mapillary_tools-0.14.3.dist-info/top_level.txt,sha256=FbDkMgOrt1S70ho1WSBrOwzKOSkJFDwwqFOoY5-527s,16
76
+ mapillary_tools-0.14.3.dist-info/RECORD,,