mapillary-tools 0.14.0a2__py3-none-any.whl → 0.14.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. mapillary_tools/__init__.py +1 -1
  2. mapillary_tools/api_v4.py +1 -0
  3. mapillary_tools/authenticate.py +9 -9
  4. mapillary_tools/blackvue_parser.py +79 -22
  5. mapillary_tools/config.py +38 -17
  6. mapillary_tools/constants.py +2 -0
  7. mapillary_tools/exiftool_read_video.py +52 -15
  8. mapillary_tools/exiftool_runner.py +4 -24
  9. mapillary_tools/ffmpeg.py +406 -232
  10. mapillary_tools/geotag/__init__.py +0 -0
  11. mapillary_tools/geotag/base.py +2 -2
  12. mapillary_tools/geotag/factory.py +97 -88
  13. mapillary_tools/geotag/geotag_images_from_exiftool.py +26 -19
  14. mapillary_tools/geotag/geotag_images_from_gpx.py +13 -6
  15. mapillary_tools/geotag/geotag_images_from_video.py +35 -0
  16. mapillary_tools/geotag/geotag_videos_from_exiftool.py +39 -13
  17. mapillary_tools/geotag/geotag_videos_from_gpx.py +22 -9
  18. mapillary_tools/geotag/options.py +25 -3
  19. mapillary_tools/geotag/video_extractors/base.py +1 -1
  20. mapillary_tools/geotag/video_extractors/exiftool.py +1 -1
  21. mapillary_tools/geotag/video_extractors/gpx.py +60 -70
  22. mapillary_tools/geotag/video_extractors/native.py +9 -31
  23. mapillary_tools/history.py +4 -1
  24. mapillary_tools/process_geotag_properties.py +16 -8
  25. mapillary_tools/process_sequence_properties.py +9 -11
  26. mapillary_tools/sample_video.py +7 -6
  27. mapillary_tools/serializer/description.py +587 -0
  28. mapillary_tools/serializer/gpx.py +132 -0
  29. mapillary_tools/types.py +44 -610
  30. mapillary_tools/upload.py +176 -197
  31. mapillary_tools/upload_api_v4.py +94 -51
  32. mapillary_tools/uploader.py +284 -138
  33. {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/METADATA +87 -31
  34. {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/RECORD +38 -35
  35. {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/WHEEL +1 -1
  36. {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/entry_points.txt +0 -0
  37. {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/licenses/LICENSE +0 -0
  38. {mapillary_tools-0.14.0a2.dist-info → mapillary_tools-0.14.0b1.dist-info}/top_level.txt +0 -0
mapillary_tools/upload.py CHANGED
@@ -15,6 +15,7 @@ from tqdm import tqdm
15
15
 
16
16
  from . import (
17
17
  api_v4,
18
+ config,
18
19
  constants,
19
20
  exceptions,
20
21
  geo,
@@ -22,7 +23,6 @@ from . import (
22
23
  ipc,
23
24
  telemetry,
24
25
  types,
25
- upload_api_v4,
26
26
  uploader,
27
27
  utils,
28
28
  VERSION,
@@ -30,6 +30,7 @@ from . import (
30
30
  from .camm import camm_builder, camm_parser
31
31
  from .gpmf import gpmf_parser
32
32
  from .mp4 import simple_mp4_builder
33
+ from .serializer.description import DescriptionJSONSerializer
33
34
  from .types import FileType
34
35
 
35
36
  JSONDict = T.Dict[str, T.Union[str, int, float, None]]
@@ -41,87 +42,94 @@ class UploadedAlreadyError(uploader.SequenceError):
41
42
  pass
42
43
 
43
44
 
44
- def _load_validate_metadatas_from_desc_path(
45
- desc_path: str | None, import_paths: T.Sequence[Path]
46
- ) -> list[types.Metadata]:
47
- is_default_desc_path = False
48
- if desc_path is None:
49
- is_default_desc_path = True
50
- if len(import_paths) == 1 and import_paths[0].is_dir():
51
- desc_path = str(
52
- import_paths[0].joinpath(constants.IMAGE_DESCRIPTION_FILENAME)
53
- )
54
- else:
55
- if 1 < len(import_paths):
56
- raise exceptions.MapillaryBadParameterError(
57
- "The description path must be specified (with --desc_path) when uploading multiple paths",
58
- )
59
- else:
60
- raise exceptions.MapillaryBadParameterError(
61
- "The description path must be specified (with --desc_path) when uploading a single file",
62
- )
45
+ def upload(
46
+ import_path: Path | T.Sequence[Path],
47
+ user_items: config.UserItem,
48
+ desc_path: str | None = None,
49
+ _metadatas_from_process: T.Sequence[types.MetadataOrError] | None = None,
50
+ dry_run=False,
51
+ skip_subfolders=False,
52
+ ) -> None:
53
+ import_paths = _normalize_import_paths(import_path)
63
54
 
64
- descs: list[types.DescriptionOrError] = []
55
+ metadatas = _load_descs(_metadatas_from_process, import_paths, desc_path)
65
56
 
66
- if desc_path == "-":
67
- try:
68
- descs = json.load(sys.stdin)
69
- except json.JSONDecodeError as ex:
70
- raise exceptions.MapillaryInvalidDescriptionFile(
71
- f"Invalid JSON stream from stdin: {ex}"
72
- ) from ex
73
- else:
74
- if not os.path.isfile(desc_path):
75
- if is_default_desc_path:
76
- raise exceptions.MapillaryFileNotFoundError(
77
- f"Description file {desc_path} not found. Has the directory been processed yet?"
78
- )
57
+ jsonschema.validate(instance=user_items, schema=config.UserItemSchema)
58
+
59
+ # Setup the emitter -- the order matters here
60
+
61
+ emitter = uploader.EventEmitter()
62
+
63
+ # When dry_run mode is on, we disable history by default.
64
+ # But we need dry_run for tests, so we added MAPILLARY__ENABLE_UPLOAD_HISTORY_FOR_DRY_RUN
65
+ # and when it is on, we enable history regardless of dry_run
66
+ enable_history = constants.MAPILLARY_UPLOAD_HISTORY_PATH and (
67
+ not dry_run or constants.MAPILLARY__ENABLE_UPLOAD_HISTORY_FOR_DRY_RUN
68
+ )
69
+
70
+ # Put it first one to check duplications first
71
+ if enable_history:
72
+ upload_run_params: JSONDict = {
73
+ # Null if multiple paths provided
74
+ "import_path": str(import_path) if isinstance(import_path, Path) else None,
75
+ "organization_key": user_items.get("MAPOrganizationKey"),
76
+ "user_key": user_items.get("MAPSettingsUserKey"),
77
+ "version": VERSION,
78
+ "run_at": time.time(),
79
+ }
80
+ _setup_history(emitter, upload_run_params, metadatas)
81
+
82
+ # Set up tdqm
83
+ _setup_tdqm(emitter)
84
+
85
+ # Now stats is empty but it will collect during ALL uploads
86
+ stats = _setup_api_stats(emitter)
87
+
88
+ # Send the progress via IPC, and log the progress in debug mode
89
+ _setup_ipc(emitter)
90
+
91
+ mly_uploader = uploader.Uploader(user_items, emitter=emitter, dry_run=dry_run)
92
+
93
+ results = _gen_upload_everything(
94
+ mly_uploader, metadatas, import_paths, skip_subfolders
95
+ )
96
+
97
+ upload_successes = 0
98
+ upload_errors: list[Exception] = []
99
+
100
+ # The real upload happens sequentially here
101
+ try:
102
+ for _, result in results:
103
+ if result.error is not None:
104
+ upload_errors.append(_continue_or_fail(result.error))
79
105
  else:
80
- raise exceptions.MapillaryFileNotFoundError(
81
- f"Description file {desc_path} not found"
82
- )
83
- with open(desc_path) as fp:
84
- try:
85
- descs = json.load(fp)
86
- except json.JSONDecodeError as ex:
87
- raise exceptions.MapillaryInvalidDescriptionFile(
88
- f"Invalid JSON file {desc_path}: {ex}"
89
- ) from ex
106
+ upload_successes += 1
90
107
 
91
- # the descs load from stdin or json file may contain invalid entries
92
- validated_descs = [
93
- types.validate_and_fail_desc(desc)
94
- for desc in descs
95
- # skip error descriptions
96
- if "error" not in desc
97
- ]
108
+ except Exception as ex:
109
+ # Fatal error: log and raise
110
+ if not dry_run:
111
+ _api_logging_failed(_summarize(stats), ex)
112
+ raise ex
98
113
 
99
- # throw if we found any invalid descs
100
- invalid_descs = [desc for desc in validated_descs if "error" in desc]
101
- if invalid_descs:
102
- for desc in invalid_descs:
103
- LOG.error("Invalid description entry: %s", json.dumps(desc))
104
- raise exceptions.MapillaryInvalidDescriptionFile(
105
- f"Found {len(invalid_descs)} invalid descriptions"
106
- )
114
+ else:
115
+ if not dry_run:
116
+ _api_logging_finished(_summarize(stats))
107
117
 
108
- # validated_descs should contain no errors
109
- return [
110
- types.from_desc(T.cast(types.Description, desc)) for desc in validated_descs
111
- ]
118
+ finally:
119
+ # We collected stats after every upload is finished
120
+ assert upload_successes == len(stats), (
121
+ f"Expect {upload_successes} success but got {stats}"
122
+ )
123
+ _show_upload_summary(stats, upload_errors)
112
124
 
113
125
 
114
- def zip_images(
115
- import_path: Path,
116
- zip_dir: Path,
117
- desc_path: str | None = None,
118
- ):
126
+ def zip_images(import_path: Path, zip_dir: Path, desc_path: str | None = None):
119
127
  if not import_path.is_dir():
120
128
  raise exceptions.MapillaryFileNotFoundError(
121
129
  f"Import directory not found: {import_path}"
122
130
  )
123
131
 
124
- metadatas = _load_validate_metadatas_from_desc_path(desc_path, [import_path])
132
+ metadatas = _load_valid_metadatas_from_desc_path([import_path], desc_path)
125
133
 
126
134
  if not metadatas:
127
135
  LOG.warning("No images or videos found in %s", desc_path)
@@ -141,7 +149,7 @@ def _setup_history(
141
149
  ) -> None:
142
150
  @emitter.on("upload_start")
143
151
  def check_duplication(payload: uploader.Progress):
144
- md5sum = payload.get("md5sum")
152
+ md5sum = payload.get("sequence_md5sum")
145
153
  assert md5sum is not None, f"md5sum has to be set for {payload}"
146
154
 
147
155
  if history.is_uploaded(md5sum):
@@ -164,7 +172,7 @@ def _setup_history(
164
172
  @emitter.on("upload_finished")
165
173
  def write_history(payload: uploader.Progress):
166
174
  sequence_uuid = payload.get("sequence_uuid")
167
- md5sum = payload.get("md5sum")
175
+ md5sum = payload.get("sequence_md5sum")
168
176
  assert md5sum is not None, f"md5sum has to be set for {payload}"
169
177
 
170
178
  if sequence_uuid is None:
@@ -192,8 +200,9 @@ def _setup_history(
192
200
  def _setup_tdqm(emitter: uploader.EventEmitter) -> None:
193
201
  upload_pbar: tqdm | None = None
194
202
 
203
+ @emitter.on("upload_start")
195
204
  @emitter.on("upload_fetch_offset")
196
- def upload_fetch_offset(payload: uploader.Progress) -> None:
205
+ def upload_start(payload: uploader.Progress) -> None:
197
206
  nonlocal upload_pbar
198
207
 
199
208
  if upload_pbar is not None:
@@ -204,18 +213,18 @@ def _setup_tdqm(emitter: uploader.EventEmitter) -> None:
204
213
  import_path: str | None = payload.get("import_path")
205
214
  filetype = payload.get("file_type", "unknown").upper()
206
215
  if import_path is None:
207
- _desc = f"Uploading {filetype} ({nth}/{total})"
216
+ desc = f"Uploading {filetype} ({nth}/{total})"
208
217
  else:
209
- _desc = (
218
+ desc = (
210
219
  f"Uploading {filetype} {os.path.basename(import_path)} ({nth}/{total})"
211
220
  )
212
221
  upload_pbar = tqdm(
213
222
  total=payload["entity_size"],
214
- desc=_desc,
223
+ desc=desc,
215
224
  unit="B",
216
225
  unit_scale=True,
217
226
  unit_divisor=1024,
218
- initial=payload["offset"],
227
+ initial=payload.get("offset", 0),
219
228
  disable=LOG.getEffectiveLevel() <= logging.DEBUG,
220
229
  )
221
230
 
@@ -295,8 +304,13 @@ def _setup_api_stats(emitter: uploader.EventEmitter):
295
304
 
296
305
  @emitter.on("upload_start")
297
306
  def collect_start_time(payload: _APIStats) -> None:
298
- payload["upload_start_time"] = time.time()
307
+ now = time.time()
308
+ payload["upload_start_time"] = now
299
309
  payload["upload_total_time"] = 0
310
+ # These filed should be initialized in upload events like "upload_fetch_offset"
311
+ # but since we disabled them for uploading images, so we initialize them here
312
+ payload["upload_last_restart_time"] = now
313
+ payload["upload_first_offset"] = 0
300
314
 
301
315
  @emitter.on("upload_fetch_offset")
302
316
  def collect_restart_time(payload: _APIStats) -> None:
@@ -417,34 +431,6 @@ def _api_logging_failed(payload: dict, exc: Exception):
417
431
  LOG.warning("Error from API Logging for action %s", action, exc_info=True)
418
432
 
419
433
 
420
- def _load_descs(
421
- _metadatas_from_process: T.Sequence[types.MetadataOrError] | None,
422
- desc_path: str | None,
423
- import_paths: T.Sequence[Path],
424
- ) -> list[types.Metadata]:
425
- metadatas: list[types.Metadata]
426
-
427
- if _metadatas_from_process is not None:
428
- metadatas, _ = types.separate_errors(_metadatas_from_process)
429
- else:
430
- metadatas = _load_validate_metadatas_from_desc_path(desc_path, import_paths)
431
-
432
- # Make sure all metadatas have sequence uuid assigned
433
- # It is used to find the right sequence when writing upload history
434
- missing_sequence_uuid = str(uuid.uuid4())
435
- for metadata in metadatas:
436
- if isinstance(metadata, types.ImageMetadata):
437
- if metadata.MAPSequenceUUID is None:
438
- metadata.MAPSequenceUUID = missing_sequence_uuid
439
-
440
- for metadata in metadatas:
441
- assert isinstance(metadata, (types.ImageMetadata, types.VideoMetadata))
442
- if isinstance(metadata, types.ImageMetadata):
443
- assert metadata.MAPSequenceUUID is not None
444
-
445
- return metadatas
446
-
447
-
448
434
  _M = T.TypeVar("_M", bound=types.Metadata)
449
435
 
450
436
 
@@ -466,9 +452,9 @@ def _gen_upload_everything(
466
452
  (m for m in metadatas if isinstance(m, types.ImageMetadata)),
467
453
  utils.find_images(import_paths, skip_subfolders=skip_subfolders),
468
454
  )
469
- for image_result in uploader.ZipImageSequence.prepare_images_and_upload(
470
- image_metadatas,
455
+ for image_result in uploader.ZipImageSequence.upload_images(
471
456
  mly_uploader,
457
+ image_metadatas,
472
458
  ):
473
459
  yield image_result
474
460
 
@@ -509,13 +495,9 @@ def _gen_upload_videos(
509
495
  "sequence_idx": idx,
510
496
  "file_type": video_metadata.filetype.value,
511
497
  "import_path": str(video_metadata.filename),
512
- "md5sum": video_metadata.md5sum,
498
+ "sequence_md5sum": video_metadata.md5sum,
513
499
  }
514
500
 
515
- session_key = uploader._session_key(
516
- video_metadata.md5sum, upload_api_v4.ClusterFileType.CAMM
517
- )
518
-
519
501
  try:
520
502
  with video_metadata.filename.open("rb") as src_fp:
521
503
  # Build the mp4 stream with the CAMM samples
@@ -524,12 +506,15 @@ def _gen_upload_videos(
524
506
  )
525
507
 
526
508
  # Upload the mp4 stream
527
- cluster_id = mly_uploader.upload_stream(
509
+ file_handle = mly_uploader.upload_stream(
528
510
  T.cast(T.IO[bytes], camm_fp),
529
- upload_api_v4.ClusterFileType.CAMM,
530
- session_key,
531
511
  progress=T.cast(T.Dict[str, T.Any], progress),
532
512
  )
513
+ cluster_id = mly_uploader.finish_upload(
514
+ file_handle,
515
+ api_v4.ClusterFileType.CAMM,
516
+ progress=T.cast(T.Dict[str, T.Any], progress),
517
+ )
533
518
  except Exception as ex:
534
519
  yield video_metadata, uploader.UploadResult(error=ex)
535
520
  else:
@@ -630,100 +615,94 @@ def _continue_or_fail(ex: Exception) -> Exception:
630
615
  raise ex
631
616
 
632
617
 
633
- def upload(
634
- import_path: Path | T.Sequence[Path],
635
- user_items: types.UserItem,
636
- desc_path: str | None = None,
637
- _metadatas_from_process: T.Sequence[types.MetadataOrError] | None = None,
638
- dry_run=False,
639
- skip_subfolders=False,
640
- ) -> None:
641
- import_paths = _normalize_import_paths(import_path)
642
-
643
- metadatas = _load_descs(_metadatas_from_process, desc_path, import_paths)
618
+ def _gen_upload_zipfiles(
619
+ mly_uploader: uploader.Uploader, zip_paths: T.Sequence[Path]
620
+ ) -> T.Generator[tuple[Path, uploader.UploadResult], None, None]:
621
+ for idx, zip_path in enumerate(zip_paths):
622
+ progress: uploader.SequenceProgress = {
623
+ "total_sequence_count": len(zip_paths),
624
+ "sequence_idx": idx,
625
+ "import_path": str(zip_path),
626
+ "file_type": types.FileType.ZIP.value,
627
+ "sequence_md5sum": "", # Placeholder, will be set in upload_zipfile
628
+ }
629
+ try:
630
+ cluster_id = uploader.ZipImageSequence.upload_zipfile(
631
+ mly_uploader, zip_path, progress=T.cast(T.Dict[str, T.Any], progress)
632
+ )
633
+ except Exception as ex:
634
+ yield zip_path, uploader.UploadResult(error=ex)
635
+ else:
636
+ yield zip_path, uploader.UploadResult(result=cluster_id)
644
637
 
645
- jsonschema.validate(instance=user_items, schema=types.UserItemSchema)
646
638
 
647
- # Setup the emitter -- the order matters here
639
+ def _load_descs(
640
+ _metadatas_from_process: T.Sequence[types.MetadataOrError] | None,
641
+ import_paths: T.Sequence[Path],
642
+ desc_path: str | None,
643
+ ) -> list[types.Metadata]:
644
+ metadatas: list[types.Metadata]
648
645
 
649
- emitter = uploader.EventEmitter()
646
+ if _metadatas_from_process is not None:
647
+ metadatas, _ = types.separate_errors(_metadatas_from_process)
648
+ else:
649
+ metadatas = _load_valid_metadatas_from_desc_path(import_paths, desc_path)
650
650
 
651
- # When dry_run mode is on, we disable history by default.
652
- # But we need dry_run for tests, so we added MAPILLARY__ENABLE_UPLOAD_HISTORY_FOR_DRY_RUN
653
- # and when it is on, we enable history regardless of dry_run
654
- enable_history = constants.MAPILLARY_UPLOAD_HISTORY_PATH and (
655
- not dry_run or constants.MAPILLARY__ENABLE_UPLOAD_HISTORY_FOR_DRY_RUN
656
- )
651
+ # Make sure all metadatas have sequence uuid assigned
652
+ # It is used to find the right sequence when writing upload history
653
+ missing_sequence_uuid = str(uuid.uuid4())
654
+ for metadata in metadatas:
655
+ if isinstance(metadata, types.ImageMetadata):
656
+ if metadata.MAPSequenceUUID is None:
657
+ metadata.MAPSequenceUUID = missing_sequence_uuid
657
658
 
658
- # Put it first one to check duplications first
659
- if enable_history:
660
- upload_run_params: JSONDict = {
661
- # Null if multiple paths provided
662
- "import_path": str(import_path) if isinstance(import_path, Path) else None,
663
- "organization_key": user_items.get("MAPOrganizationKey"),
664
- "user_key": user_items.get("MAPSettingsUserKey"),
665
- "version": VERSION,
666
- "run_at": time.time(),
667
- }
668
- _setup_history(emitter, upload_run_params, metadatas)
659
+ for metadata in metadatas:
660
+ assert isinstance(metadata, (types.ImageMetadata, types.VideoMetadata))
661
+ if isinstance(metadata, types.ImageMetadata):
662
+ assert metadata.MAPSequenceUUID is not None
669
663
 
670
- # Set up tdqm
671
- _setup_tdqm(emitter)
664
+ return metadatas
672
665
 
673
- # Now stats is empty but it will collect during ALL uploads
674
- stats = _setup_api_stats(emitter)
675
666
 
676
- # Send the progress via IPC, and log the progress in debug mode
677
- _setup_ipc(emitter)
667
+ def _load_valid_metadatas_from_desc_path(
668
+ import_paths: T.Sequence[Path], desc_path: str | None
669
+ ) -> list[types.Metadata]:
670
+ if desc_path is None:
671
+ desc_path = _find_desc_path(import_paths)
678
672
 
679
- mly_uploader = uploader.Uploader(user_items, emitter=emitter, dry_run=dry_run)
673
+ if desc_path == "-":
674
+ try:
675
+ metadatas = DescriptionJSONSerializer.deserialize_stream(sys.stdin.buffer)
676
+ except json.JSONDecodeError as ex:
677
+ raise exceptions.MapillaryInvalidDescriptionFile(
678
+ f"Invalid JSON stream from {desc_path}: {ex}"
679
+ ) from ex
680
680
 
681
- results = _gen_upload_everything(
682
- mly_uploader, metadatas, import_paths, skip_subfolders
683
- )
681
+ else:
682
+ if not os.path.isfile(desc_path):
683
+ raise exceptions.MapillaryFileNotFoundError(
684
+ f"Description file not found: {desc_path}"
685
+ )
686
+ with open(desc_path, "rb") as fp:
687
+ try:
688
+ metadatas = DescriptionJSONSerializer.deserialize_stream(fp)
689
+ except json.JSONDecodeError as ex:
690
+ raise exceptions.MapillaryInvalidDescriptionFile(
691
+ f"Invalid JSON stream from {desc_path}: {ex}"
692
+ ) from ex
684
693
 
685
- upload_successes = 0
686
- upload_errors: list[Exception] = []
694
+ return metadatas
687
695
 
688
- # The real upload happens sequentially here
689
- try:
690
- for _, result in results:
691
- if result.error is not None:
692
- upload_errors.append(_continue_or_fail(result.error))
693
- else:
694
- upload_successes += 1
695
696
 
696
- except Exception as ex:
697
- # Fatal error: log and raise
698
- if not dry_run:
699
- _api_logging_failed(_summarize(stats), ex)
700
- raise ex
697
+ def _find_desc_path(import_paths: T.Sequence[Path]) -> str:
698
+ if len(import_paths) == 1 and import_paths[0].is_dir():
699
+ return str(import_paths[0].joinpath(constants.IMAGE_DESCRIPTION_FILENAME))
701
700
 
701
+ if 1 < len(import_paths):
702
+ raise exceptions.MapillaryBadParameterError(
703
+ "The description path must be specified (with --desc_path) when uploading multiple paths",
704
+ )
702
705
  else:
703
- if not dry_run:
704
- _api_logging_finished(_summarize(stats))
705
-
706
- finally:
707
- # We collected stats after every upload is finished
708
- assert upload_successes == len(stats)
709
- _show_upload_summary(stats, upload_errors)
710
-
711
-
712
- def _gen_upload_zipfiles(
713
- mly_uploader: uploader.Uploader,
714
- zip_paths: T.Sequence[Path],
715
- ) -> T.Generator[tuple[Path, uploader.UploadResult], None, None]:
716
- for idx, zip_path in enumerate(zip_paths):
717
- progress: uploader.SequenceProgress = {
718
- "total_sequence_count": len(zip_paths),
719
- "sequence_idx": idx,
720
- "import_path": str(zip_path),
721
- }
722
- try:
723
- cluster_id = uploader.ZipImageSequence.prepare_zipfile_and_upload(
724
- zip_path, mly_uploader, progress=T.cast(T.Dict[str, T.Any], progress)
725
- )
726
- except Exception as ex:
727
- yield zip_path, uploader.UploadResult(error=ex)
728
- else:
729
- yield zip_path, uploader.UploadResult(result=cluster_id)
706
+ raise exceptions.MapillaryBadParameterError(
707
+ "The description path must be specified (with --desc_path) when uploading a single file",
708
+ )