nominal 1.109.0__py3-none-any.whl → 1.111.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +31 -0
- nominal/core/_checklist_types.py +48 -0
- nominal/core/_clientsbunch.py +0 -3
- nominal/core/_utils/api_tools.py +16 -2
- nominal/core/_video_types.py +16 -0
- nominal/core/asset.py +34 -18
- nominal/core/bounds.py +8 -1
- nominal/core/channel.py +0 -15
- nominal/core/checklist.py +11 -25
- nominal/core/client.py +25 -29
- nominal/core/data_review.py +32 -11
- nominal/core/dataset.py +41 -2
- nominal/core/dataset_file.py +6 -0
- nominal/core/datasource.py +0 -3
- nominal/core/run.py +25 -11
- nominal/core/streaming_checklist.py +25 -0
- nominal/core/video.py +71 -13
- nominal/core/video_file.py +62 -2
- nominal/experimental/migration/migration_utils.py +343 -42
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/METADATA +2 -2
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/RECORD +24 -21
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/WHEEL +0 -0
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/entry_points.txt +0 -0
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -4,7 +4,17 @@ import re
|
|
|
4
4
|
import uuid
|
|
5
5
|
from datetime import datetime, timedelta
|
|
6
6
|
from pathlib import Path
|
|
7
|
-
from typing import
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
BinaryIO,
|
|
10
|
+
Iterable,
|
|
11
|
+
Mapping,
|
|
12
|
+
Sequence,
|
|
13
|
+
TypeVar,
|
|
14
|
+
Union,
|
|
15
|
+
cast,
|
|
16
|
+
overload,
|
|
17
|
+
)
|
|
8
18
|
|
|
9
19
|
import requests
|
|
10
20
|
from conjure_python_client import ConjureBeanType, ConjureEnumType, ConjureUnionType
|
|
@@ -25,7 +35,10 @@ from nominal.core import (
|
|
|
25
35
|
from nominal.core._event_types import EventType, SearchEventOriginType
|
|
26
36
|
from nominal.core._utils.api_tools import Link, LinkDict
|
|
27
37
|
from nominal.core.attachment import Attachment
|
|
38
|
+
from nominal.core.filetype import FileTypes
|
|
28
39
|
from nominal.core.run import Run
|
|
40
|
+
from nominal.core.video import Video
|
|
41
|
+
from nominal.core.video_file import VideoFile
|
|
29
42
|
from nominal.experimental.dataset_utils import create_dataset_with_uuid
|
|
30
43
|
from nominal.experimental.migration.migration_data_config import MigrationDatasetConfig
|
|
31
44
|
from nominal.experimental.migration.migration_resources import MigrationResources
|
|
@@ -38,6 +51,56 @@ logger = logging.getLogger(__name__)
|
|
|
38
51
|
|
|
39
52
|
ConjureType = Union[ConjureBeanType, ConjureUnionType, ConjureEnumType]
|
|
40
53
|
|
|
54
|
+
|
|
55
|
+
def _install_migration_file_logger(
|
|
56
|
+
log_path: str | Path | None = None,
|
|
57
|
+
*,
|
|
58
|
+
logger: logging.Logger | None = None,
|
|
59
|
+
level: int = logging.INFO,
|
|
60
|
+
formatter: logging.Formatter | None = None,
|
|
61
|
+
mode: str = "a",
|
|
62
|
+
) -> logging.FileHandler:
|
|
63
|
+
"""Install a file handler that only writes log records with extra={"to_file": True}.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
log_path: File path to write filtered logs to. If None (or a directory), a timestamped
|
|
67
|
+
file named "migration_utils_output_YYYY-MM-DD-HH-MM-SS.txt" is created.
|
|
68
|
+
logger: Logger to attach the handler to. Defaults to the root logger.
|
|
69
|
+
level: Minimum log level to write to the file.
|
|
70
|
+
formatter: Optional formatter to apply to the file handler.
|
|
71
|
+
mode: File open mode for the handler.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
The attached FileHandler instance.
|
|
75
|
+
"""
|
|
76
|
+
if logger is None:
|
|
77
|
+
logger = logging.getLogger()
|
|
78
|
+
|
|
79
|
+
if log_path is None:
|
|
80
|
+
log_path_obj = Path.cwd()
|
|
81
|
+
else:
|
|
82
|
+
log_path_obj = Path(log_path)
|
|
83
|
+
|
|
84
|
+
if log_path_obj.is_dir():
|
|
85
|
+
timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
|
86
|
+
log_path_obj = log_path_obj / f"migration_utils_output_{timestamp}.txt"
|
|
87
|
+
|
|
88
|
+
handler = logging.FileHandler(log_path_obj, mode=mode, encoding="utf-8")
|
|
89
|
+
handler.setLevel(level)
|
|
90
|
+
if formatter is not None:
|
|
91
|
+
handler.setFormatter(formatter)
|
|
92
|
+
|
|
93
|
+
filter_obj = logging.Filter()
|
|
94
|
+
|
|
95
|
+
def _filter(record: logging.LogRecord) -> bool:
|
|
96
|
+
return bool(getattr(record, "to_file", False))
|
|
97
|
+
|
|
98
|
+
filter_obj.filter = _filter # type: ignore[method-assign]
|
|
99
|
+
handler.addFilter(filter_obj)
|
|
100
|
+
logger.addHandler(handler)
|
|
101
|
+
return handler
|
|
102
|
+
|
|
103
|
+
|
|
41
104
|
# Regex pattern to match strings that have a UUID format with a prefix.
|
|
42
105
|
UUID_PATTERN = re.compile(r"^(.*)([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$")
|
|
43
106
|
|
|
@@ -189,7 +252,10 @@ def _replace_uuids_in_obj(obj: Any, mapping: dict[str, str]) -> Any:
|
|
|
189
252
|
elif isinstance(value, str):
|
|
190
253
|
parsed_value, was_json = _convert_if_json(value)
|
|
191
254
|
if was_json:
|
|
192
|
-
new_obj[key] = json.dumps(
|
|
255
|
+
new_obj[key] = json.dumps(
|
|
256
|
+
_replace_uuids_in_obj(parsed_value, mapping),
|
|
257
|
+
separators=(",", ":"),
|
|
258
|
+
)
|
|
193
259
|
else:
|
|
194
260
|
new_obj[key] = _replace_uuids_in_obj(value, mapping)
|
|
195
261
|
else:
|
|
@@ -212,7 +278,9 @@ def _clone_conjure_objects_with_new_uuids(
|
|
|
212
278
|
|
|
213
279
|
|
|
214
280
|
@overload
|
|
215
|
-
def _clone_conjure_objects_with_new_uuids(
|
|
281
|
+
def _clone_conjure_objects_with_new_uuids(
|
|
282
|
+
objs: list[ConjureType],
|
|
283
|
+
) -> list[ConjureType]: ...
|
|
216
284
|
|
|
217
285
|
|
|
218
286
|
def _clone_conjure_objects_with_new_uuids(
|
|
@@ -303,7 +371,10 @@ def copy_workbook_template_from(
|
|
|
303
371
|
"destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid
|
|
304
372
|
}
|
|
305
373
|
logger.debug(
|
|
306
|
-
"Cloning workbook template: %s (rid: %s)",
|
|
374
|
+
"Cloning workbook template: %s (rid: %s)",
|
|
375
|
+
source_template.title,
|
|
376
|
+
source_template.rid,
|
|
377
|
+
extra=log_extras,
|
|
307
378
|
)
|
|
308
379
|
raw_source_template = source_template._clients.template.get(
|
|
309
380
|
source_template._clients.auth_header, source_template.rid
|
|
@@ -347,9 +418,161 @@ def copy_workbook_template_from(
|
|
|
347
418
|
source_template.rid,
|
|
348
419
|
extra=log_extras,
|
|
349
420
|
)
|
|
421
|
+
logger.info(
|
|
422
|
+
"WORKBOOK_TEMPLATE: Old RID: %s, New RID: %s",
|
|
423
|
+
source_template.rid,
|
|
424
|
+
new_workbook_template.rid,
|
|
425
|
+
extra={"to_file": True},
|
|
426
|
+
)
|
|
350
427
|
return new_workbook_template
|
|
351
428
|
|
|
352
429
|
|
|
430
|
+
def copy_video_file_to_video_dataset(
|
|
431
|
+
source_video_file: VideoFile,
|
|
432
|
+
destination_video_dataset: Video,
|
|
433
|
+
) -> VideoFile | None:
|
|
434
|
+
"""Copy a video dataset file from the source to the destination dataset.
|
|
435
|
+
|
|
436
|
+
This method is specifically designed to handle video files, which may require special handling
|
|
437
|
+
due to their size and streaming nature. It retrieves the video file from the source dataset,
|
|
438
|
+
streams it, and uploads it to the destination dataset while maintaining all associated metadata.
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
source_video_file: The source VideoFile to copy. Must be a video file with S3 handle.
|
|
442
|
+
destination_video_dataset: The Video dataset to create the copied file in.
|
|
443
|
+
|
|
444
|
+
Returns:
|
|
445
|
+
The dataset file in the new dataset.
|
|
446
|
+
"""
|
|
447
|
+
log_extras = {"destination_client_workspace": destination_video_dataset._clients.workspace_rid}
|
|
448
|
+
logger.debug("Copying video file: %s", source_video_file.name, extra=log_extras)
|
|
449
|
+
|
|
450
|
+
(mcap_video_details, timestamp_options) = source_video_file._get_file_ingest_options()
|
|
451
|
+
|
|
452
|
+
old_file_uri = source_video_file._clients.catalog.get_video_file_uri(
|
|
453
|
+
source_video_file._clients.auth_header, source_video_file.rid
|
|
454
|
+
).uri
|
|
455
|
+
|
|
456
|
+
response = requests.get(old_file_uri, stream=True)
|
|
457
|
+
response.raise_for_status()
|
|
458
|
+
|
|
459
|
+
file_name = source_video_file.name
|
|
460
|
+
file_stem = Path(file_name).stem
|
|
461
|
+
if timestamp_options is not None:
|
|
462
|
+
new_file = destination_video_dataset.add_from_io(
|
|
463
|
+
video=cast(BinaryIO, response.raw),
|
|
464
|
+
name=file_stem,
|
|
465
|
+
start=timestamp_options.starting_timestamp,
|
|
466
|
+
description=source_video_file.description,
|
|
467
|
+
)
|
|
468
|
+
new_file.update(
|
|
469
|
+
starting_timestamp=timestamp_options.starting_timestamp,
|
|
470
|
+
ending_timestamp=timestamp_options.ending_timestamp,
|
|
471
|
+
)
|
|
472
|
+
elif mcap_video_details is not None:
|
|
473
|
+
new_file = destination_video_dataset.add_mcap_from_io(
|
|
474
|
+
mcap=cast(BinaryIO, response.raw),
|
|
475
|
+
name=file_stem,
|
|
476
|
+
topic=mcap_video_details.mcap_channel_locator_topic,
|
|
477
|
+
description=source_video_file.description,
|
|
478
|
+
file_type=FileTypes.MCAP,
|
|
479
|
+
)
|
|
480
|
+
else:
|
|
481
|
+
raise ValueError(
|
|
482
|
+
"Unsupported video file ingest options for copying video file. "
|
|
483
|
+
"Expected either _mcap_video_details or _timestamp_options to be set."
|
|
484
|
+
)
|
|
485
|
+
logger.debug(
|
|
486
|
+
"New video file created %s in video dataset: %s (rid: %s)",
|
|
487
|
+
new_file.name,
|
|
488
|
+
destination_video_dataset.name,
|
|
489
|
+
destination_video_dataset.rid,
|
|
490
|
+
)
|
|
491
|
+
logger.info(
|
|
492
|
+
"VIDEO_FILE: Old RID: %s, New RID: %s",
|
|
493
|
+
source_video_file.rid,
|
|
494
|
+
new_file.rid,
|
|
495
|
+
extra={"to_file": True},
|
|
496
|
+
)
|
|
497
|
+
return new_file
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
def clone_video(source_video: Video, destination_client: NominalClient) -> Video:
|
|
501
|
+
"""Clones a video, maintaining all properties and files.
|
|
502
|
+
|
|
503
|
+
Args:
|
|
504
|
+
source_video (Video): The video to copy from.
|
|
505
|
+
destination_client (NominalClient): The destination client.
|
|
506
|
+
|
|
507
|
+
Returns:
|
|
508
|
+
The cloned video.
|
|
509
|
+
"""
|
|
510
|
+
return copy_video_from(
|
|
511
|
+
source_video=source_video,
|
|
512
|
+
destination_client=destination_client,
|
|
513
|
+
include_files=True,
|
|
514
|
+
)
|
|
515
|
+
|
|
516
|
+
|
|
517
|
+
def copy_video_from(
|
|
518
|
+
source_video: Video,
|
|
519
|
+
destination_client: NominalClient,
|
|
520
|
+
*,
|
|
521
|
+
new_video_name: str | None = None,
|
|
522
|
+
new_video_description: str | None = None,
|
|
523
|
+
new_video_properties: dict[str, Any] | None = None,
|
|
524
|
+
new_video_labels: Sequence[str] | None = None,
|
|
525
|
+
include_files: bool = False,
|
|
526
|
+
) -> Video:
|
|
527
|
+
"""Copy a video from the source to the destination client.
|
|
528
|
+
|
|
529
|
+
Args:
|
|
530
|
+
source_video: The source Video to copy.
|
|
531
|
+
destination_client: The NominalClient to create the copied video in.
|
|
532
|
+
new_video_name: Optional new name for the copied video. If not provided, the original name is used.
|
|
533
|
+
new_video_description: Optional new description for the copied video.
|
|
534
|
+
If not provided, the original description is used.
|
|
535
|
+
new_video_properties: Optional new properties for the copied video. If not provided, the original
|
|
536
|
+
properties are used.
|
|
537
|
+
new_video_labels: Optional new labels for the copied video. If not provided, the original labels are used.
|
|
538
|
+
include_files: Whether to include files in the copied video.
|
|
539
|
+
|
|
540
|
+
Returns:
|
|
541
|
+
The newly created Video in the destination client.
|
|
542
|
+
"""
|
|
543
|
+
log_extras = {
|
|
544
|
+
"destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid
|
|
545
|
+
}
|
|
546
|
+
logger.debug(
|
|
547
|
+
"Copying dataset %s (rid: %s)",
|
|
548
|
+
source_video.name,
|
|
549
|
+
source_video.rid,
|
|
550
|
+
extra=log_extras,
|
|
551
|
+
)
|
|
552
|
+
new_video = destination_client.create_video(
|
|
553
|
+
name=new_video_name if new_video_name is not None else source_video.name,
|
|
554
|
+
description=new_video_description if new_video_description is not None else source_video.description,
|
|
555
|
+
properties=new_video_properties if new_video_properties is not None else source_video.properties,
|
|
556
|
+
labels=new_video_labels if new_video_labels is not None else source_video.labels,
|
|
557
|
+
)
|
|
558
|
+
if include_files:
|
|
559
|
+
for source_file in source_video.list_files():
|
|
560
|
+
copy_video_file_to_video_dataset(source_file, new_video)
|
|
561
|
+
logger.debug(
|
|
562
|
+
"New video created: %s (rid: %s)",
|
|
563
|
+
new_video.name,
|
|
564
|
+
new_video.rid,
|
|
565
|
+
extra=log_extras,
|
|
566
|
+
)
|
|
567
|
+
logger.info(
|
|
568
|
+
"VIDEO: Old RID: %s, New RID: %s",
|
|
569
|
+
source_video.rid,
|
|
570
|
+
new_video.rid,
|
|
571
|
+
extra={"to_file": True},
|
|
572
|
+
)
|
|
573
|
+
return new_video
|
|
574
|
+
|
|
575
|
+
|
|
353
576
|
def copy_file_to_dataset(
|
|
354
577
|
source_file: DatasetFile,
|
|
355
578
|
destination_dataset: Dataset,
|
|
@@ -397,6 +620,12 @@ def copy_file_to_dataset(
|
|
|
397
620
|
destination_dataset.name,
|
|
398
621
|
destination_dataset.rid,
|
|
399
622
|
)
|
|
623
|
+
logger.info(
|
|
624
|
+
"DATASET_FILE: Old RID: %s, New RID: %s",
|
|
625
|
+
source_file.id,
|
|
626
|
+
new_file.id,
|
|
627
|
+
extra={"to_file": True},
|
|
628
|
+
)
|
|
400
629
|
return new_file
|
|
401
630
|
else: # Because these fields are optional, need to check for None. We shouldn't ever run into this.
|
|
402
631
|
raise ValueError("Unsupported file handle type or missing timestamp information.")
|
|
@@ -503,7 +732,26 @@ def copy_dataset_from(
|
|
|
503
732
|
if include_files:
|
|
504
733
|
for source_file in source_dataset.list_files():
|
|
505
734
|
copy_file_to_dataset(source_file, new_dataset)
|
|
506
|
-
|
|
735
|
+
|
|
736
|
+
# Copy bounds from source dataset if they exist
|
|
737
|
+
if source_dataset.bounds is not None:
|
|
738
|
+
new_dataset = new_dataset.update_bounds(
|
|
739
|
+
start=source_dataset.bounds.start,
|
|
740
|
+
end=source_dataset.bounds.end,
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
logger.debug(
|
|
744
|
+
"New dataset created: %s (rid: %s)",
|
|
745
|
+
new_dataset.name,
|
|
746
|
+
new_dataset.rid,
|
|
747
|
+
extra=log_extras,
|
|
748
|
+
)
|
|
749
|
+
logger.info(
|
|
750
|
+
"DATASET: Old RID: %s, New RID: %s",
|
|
751
|
+
source_dataset.rid,
|
|
752
|
+
new_dataset.rid,
|
|
753
|
+
extra={"to_file": True},
|
|
754
|
+
)
|
|
507
755
|
return new_dataset
|
|
508
756
|
|
|
509
757
|
|
|
@@ -569,7 +817,18 @@ def copy_event_from(
|
|
|
569
817
|
properties=new_properties or source_event.properties,
|
|
570
818
|
labels=new_labels or source_event.labels,
|
|
571
819
|
)
|
|
572
|
-
logger.debug(
|
|
820
|
+
logger.debug(
|
|
821
|
+
"New event created: %s (rid: %s)",
|
|
822
|
+
new_event.name,
|
|
823
|
+
new_event.rid,
|
|
824
|
+
extra=log_extras,
|
|
825
|
+
)
|
|
826
|
+
logger.info(
|
|
827
|
+
"EVENT: Old RID: %s, New RID: %s",
|
|
828
|
+
source_event.rid,
|
|
829
|
+
new_event.rid,
|
|
830
|
+
extra={"to_file": True},
|
|
831
|
+
)
|
|
573
832
|
return new_event
|
|
574
833
|
|
|
575
834
|
|
|
@@ -627,6 +886,7 @@ def copy_run_from(
|
|
|
627
886
|
source_run.rid,
|
|
628
887
|
extra=log_extras,
|
|
629
888
|
)
|
|
889
|
+
|
|
630
890
|
new_run = destination_client.create_run(
|
|
631
891
|
name=new_name or source_run.name,
|
|
632
892
|
start=new_start or source_run.start,
|
|
@@ -639,6 +899,12 @@ def copy_run_from(
|
|
|
639
899
|
attachments=new_attachments or source_run.list_attachments(),
|
|
640
900
|
)
|
|
641
901
|
logger.debug("New run created: %s (rid: %s)", new_run.name, new_run.rid, extra=log_extras)
|
|
902
|
+
logger.info(
|
|
903
|
+
"RUN: Old RID: %s, New RID: %s",
|
|
904
|
+
source_run.rid,
|
|
905
|
+
new_run.rid,
|
|
906
|
+
extra={"to_file": True},
|
|
907
|
+
)
|
|
642
908
|
return new_run
|
|
643
909
|
|
|
644
910
|
|
|
@@ -661,6 +927,7 @@ def clone_asset(
|
|
|
661
927
|
dataset_config=MigrationDatasetConfig(preserve_dataset_uuid=True, include_dataset_files=True),
|
|
662
928
|
include_events=True,
|
|
663
929
|
include_runs=True,
|
|
930
|
+
include_video=True,
|
|
664
931
|
)
|
|
665
932
|
|
|
666
933
|
|
|
@@ -675,6 +942,7 @@ def copy_asset_from(
|
|
|
675
942
|
dataset_config: MigrationDatasetConfig | None = None,
|
|
676
943
|
include_events: bool = False,
|
|
677
944
|
include_runs: bool = False,
|
|
945
|
+
include_video: bool = False,
|
|
678
946
|
) -> Asset:
|
|
679
947
|
"""Copy an asset from the source to the destination client.
|
|
680
948
|
|
|
@@ -688,6 +956,7 @@ def copy_asset_from(
|
|
|
688
956
|
dataset_config: Configuration for dataset migration.
|
|
689
957
|
include_events: Whether to include events in the copied dataset.
|
|
690
958
|
include_runs: Whether to include runs in the copied asset.
|
|
959
|
+
include_video: Whether to include video in the copied asset.
|
|
691
960
|
|
|
692
961
|
Returns:
|
|
693
962
|
The new asset created.
|
|
@@ -695,13 +964,20 @@ def copy_asset_from(
|
|
|
695
964
|
log_extras = {
|
|
696
965
|
"destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid
|
|
697
966
|
}
|
|
698
|
-
|
|
967
|
+
|
|
968
|
+
logger.debug(
|
|
969
|
+
"Copying asset %s (rid: %s)",
|
|
970
|
+
source_asset.name,
|
|
971
|
+
source_asset.rid,
|
|
972
|
+
extra=log_extras,
|
|
973
|
+
)
|
|
699
974
|
new_asset = destination_client.create_asset(
|
|
700
975
|
name=new_asset_name if new_asset_name is not None else source_asset.name,
|
|
701
976
|
description=new_asset_description if new_asset_description is not None else source_asset.description,
|
|
702
977
|
properties=new_asset_properties if new_asset_properties is not None else source_asset.properties,
|
|
703
978
|
labels=new_asset_labels if new_asset_labels is not None else source_asset.labels,
|
|
704
979
|
)
|
|
980
|
+
|
|
705
981
|
if dataset_config is not None:
|
|
706
982
|
source_datasets = source_asset.list_datasets()
|
|
707
983
|
for data_scope, source_dataset in source_datasets:
|
|
@@ -723,7 +999,25 @@ def copy_asset_from(
|
|
|
723
999
|
for source_run in source_runs:
|
|
724
1000
|
copy_run_from(source_run, destination_client, new_assets=[new_asset])
|
|
725
1001
|
|
|
1002
|
+
if include_video:
|
|
1003
|
+
for data_scope, video_dataset in source_asset.list_videos():
|
|
1004
|
+
new_video_dataset = destination_client.create_video(
|
|
1005
|
+
name=video_dataset.name,
|
|
1006
|
+
description=video_dataset.description,
|
|
1007
|
+
properties=video_dataset.properties,
|
|
1008
|
+
labels=video_dataset.labels,
|
|
1009
|
+
)
|
|
1010
|
+
new_asset.add_video(data_scope, new_video_dataset)
|
|
1011
|
+
for source_video_file in video_dataset.list_files():
|
|
1012
|
+
copy_video_file_to_video_dataset(source_video_file, new_video_dataset)
|
|
1013
|
+
|
|
726
1014
|
logger.debug("New asset created: %s (rid: %s)", new_asset, new_asset.rid, extra=log_extras)
|
|
1015
|
+
logger.info(
|
|
1016
|
+
"ASSET: Old RID: %s, New RID: %s",
|
|
1017
|
+
source_asset.rid,
|
|
1018
|
+
new_asset.rid,
|
|
1019
|
+
extra={"to_file": True},
|
|
1020
|
+
)
|
|
727
1021
|
return new_asset
|
|
728
1022
|
|
|
729
1023
|
|
|
@@ -743,40 +1037,47 @@ def copy_resources_to_destination_client(
|
|
|
743
1037
|
Returns:
|
|
744
1038
|
All of the created resources.
|
|
745
1039
|
"""
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
new_template = clone_workbook_template(source_workbook_template, destination_client)
|
|
768
|
-
new_templates.append(new_template)
|
|
769
|
-
new_workbook = new_template.create_workbook(
|
|
770
|
-
title=new_template.title, description=new_template.description, asset=new_asset
|
|
771
|
-
)
|
|
772
|
-
logger.debug(
|
|
773
|
-
"Created new workbook %s (rid: %s) from template %s (rid: %s)",
|
|
774
|
-
new_workbook.title,
|
|
775
|
-
new_workbook.rid,
|
|
776
|
-
new_template.title,
|
|
777
|
-
new_template.rid,
|
|
778
|
-
extra=log_extras,
|
|
1040
|
+
file_handler = _install_migration_file_logger()
|
|
1041
|
+
try:
|
|
1042
|
+
log_extras = {
|
|
1043
|
+
"destination_client_workspace": destination_client.get_workspace(
|
|
1044
|
+
destination_client._clients.workspace_rid
|
|
1045
|
+
).rid,
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
new_assets = []
|
|
1049
|
+
new_templates = []
|
|
1050
|
+
new_workbooks = []
|
|
1051
|
+
|
|
1052
|
+
new_data_scopes_and_datasets: list[tuple[str, Dataset]] = []
|
|
1053
|
+
for source_asset in migration_resources.source_assets:
|
|
1054
|
+
new_asset = copy_asset_from(
|
|
1055
|
+
source_asset.asset,
|
|
1056
|
+
destination_client,
|
|
1057
|
+
dataset_config=dataset_config,
|
|
1058
|
+
include_events=True,
|
|
1059
|
+
include_runs=True,
|
|
1060
|
+
include_video=True,
|
|
779
1061
|
)
|
|
780
|
-
|
|
781
|
-
|
|
1062
|
+
new_assets.append(new_asset)
|
|
1063
|
+
new_data_scopes_and_datasets.extend(new_asset.list_datasets())
|
|
1064
|
+
|
|
1065
|
+
for source_workbook_template in source_asset.source_workbook_templates:
|
|
1066
|
+
new_template = clone_workbook_template(source_workbook_template, destination_client)
|
|
1067
|
+
new_templates.append(new_template)
|
|
1068
|
+
new_workbook = new_template.create_workbook(
|
|
1069
|
+
title=new_template.title, description=new_template.description, asset=new_asset
|
|
1070
|
+
)
|
|
1071
|
+
logger.debug(
|
|
1072
|
+
"Created new workbook %s (rid: %s) from template %s (rid: %s)",
|
|
1073
|
+
new_workbook.title,
|
|
1074
|
+
new_workbook.rid,
|
|
1075
|
+
new_template.title,
|
|
1076
|
+
new_template.rid,
|
|
1077
|
+
extra=log_extras,
|
|
1078
|
+
)
|
|
1079
|
+
new_workbooks.append(new_workbook)
|
|
1080
|
+
finally:
|
|
1081
|
+
file_handler.close()
|
|
1082
|
+
logger.removeHandler(file_handler)
|
|
782
1083
|
return (new_data_scopes_and_datasets, new_assets, new_templates, new_workbooks)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nominal
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.111.0
|
|
4
4
|
Summary: Automate Nominal workflows in Python
|
|
5
5
|
Project-URL: Homepage, https://nominal.io
|
|
6
6
|
Project-URL: Documentation, https://docs.nominal.io
|
|
@@ -20,7 +20,7 @@ Requires-Dist: cachetools>=6.1.0
|
|
|
20
20
|
Requires-Dist: click<9,>=8
|
|
21
21
|
Requires-Dist: conjure-python-client<4,>=3.1.0
|
|
22
22
|
Requires-Dist: ffmpeg-python>=0.2.0
|
|
23
|
-
Requires-Dist: nominal-api==0.
|
|
23
|
+
Requires-Dist: nominal-api==0.1079.0
|
|
24
24
|
Requires-Dist: nominal-streaming==0.5.8; platform_python_implementation == 'CPython' and python_version >= '3.10' and ((sys_platform == 'win32' and platform_machine == 'AMD64') or (sys_platform == 'darwin' and platform_machine == 'arm64') or (sys_platform == 'linux' and (platform_machine == 'x86_64' or platform_machine == 'armv7l')))
|
|
25
25
|
Requires-Dist: openpyxl>=0.0.0
|
|
26
26
|
Requires-Dist: pandas>=0.0.0
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
CHANGELOG.md,sha256=
|
|
1
|
+
CHANGELOG.md,sha256=kMJRSF1lP8evZvrdsVNE9E-a4A9JhWBGMFhTl7sbON0,91733
|
|
2
2
|
LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
3
3
|
README.md,sha256=KKe0dxh_pHXCtB7I9G4qWGQYvot_BZU8yW6MJyuyUHM,311
|
|
4
4
|
nominal/__init__.py,sha256=rbraORnXUrNn1hywLXM0XwSQCd9UmQt20PDYlsBalfE,2167
|
|
@@ -28,32 +28,35 @@ nominal/cli/util/verify_connection.py,sha256=KU17ejaDfKBLmLiZ3MZSVLyfrqNE7c6mFBv
|
|
|
28
28
|
nominal/config/__init__.py,sha256=wV8cq8X3J4NTJ5H_uR5THaMT_NQpWQO5qCUGEb-rPnM,3157
|
|
29
29
|
nominal/config/_config.py,sha256=yKq_H1iYJDoxRfLz2iXLbbVdoL0MTEY0FS4eVL12w0g,2004
|
|
30
30
|
nominal/core/__init__.py,sha256=1MiCC44cxHYFofP4hf2fz4EIkepK-OAhDzpPFIzHbWw,2422
|
|
31
|
-
nominal/core/
|
|
31
|
+
nominal/core/_checklist_types.py,sha256=YcjvuUbQDIOYqqM5H1Eto9ws9Ivm4cPWEaeEF2Uwn1o,1361
|
|
32
|
+
nominal/core/_clientsbunch.py,sha256=k8XeVH5c_gv037RlZI44vPI8XaWvDeF5DRtFRKHDylE,8447
|
|
32
33
|
nominal/core/_constants.py,sha256=SrxgaSqAEB1MvTSrorgGam3eO29iCmRr6VIdajxX3gI,56
|
|
33
34
|
nominal/core/_event_types.py,sha256=Cq_8x-zv_5EDvRo9UTbaOpenAy92bTfQxlsEuHPOhtE,3706
|
|
34
35
|
nominal/core/_types.py,sha256=FktMmcQ5_rD2rbXv8_p-WISzSo8T2NtO-exsLm-iadU,122
|
|
35
|
-
nominal/core/
|
|
36
|
+
nominal/core/_video_types.py,sha256=Cdl0sZxX3cyYtCXzsnnLWjK38hHp3_orMe6oiUU_dyc,465
|
|
37
|
+
nominal/core/asset.py,sha256=S41KS_c14tFcxFLJzU3bnt958KpMSI_U524QcMCiSEE,23609
|
|
36
38
|
nominal/core/attachment.py,sha256=yOtDUdkLY5MT_Rk9kUlr1yupIJN7a5pt5sJWx4RLQV8,4355
|
|
37
|
-
nominal/core/bounds.py,sha256=
|
|
38
|
-
nominal/core/channel.py,sha256=
|
|
39
|
-
nominal/core/checklist.py,sha256=
|
|
40
|
-
nominal/core/client.py,sha256=
|
|
39
|
+
nominal/core/bounds.py,sha256=mYUWBtpRMltqPVmHD3Bu282rtwhw1oqS3VJxpcCC0H4,882
|
|
40
|
+
nominal/core/channel.py,sha256=oSZUAXRH9k9YPXZJ3LdgKEeFBmLjlVdgK3zdfQqRe5o,18991
|
|
41
|
+
nominal/core/checklist.py,sha256=TXEm9qNYCG6lU5NB5P3RAe-XmXdj1Tcsdbx_c5_spXI,6663
|
|
42
|
+
nominal/core/client.py,sha256=RjMQCU8DmvHcp7lypVCFLY54caoTXu76EwN-oxaFjsw,68091
|
|
41
43
|
nominal/core/connection.py,sha256=LYllr3a1H2xp8-i4MaX1M7yK8X-HnwuIkciyK9XgLtQ,5175
|
|
42
44
|
nominal/core/containerized_extractors.py,sha256=fUz3-NHoNWYKqOCD15gLwGXDKVfdsW-x_kpXnkOI3BE,10224
|
|
43
|
-
nominal/core/data_review.py,sha256=
|
|
44
|
-
nominal/core/dataset.py,sha256=
|
|
45
|
-
nominal/core/dataset_file.py,sha256=
|
|
46
|
-
nominal/core/datasource.py,sha256=
|
|
45
|
+
nominal/core/data_review.py,sha256=8pyoJiP-6KCSSB4NE_LKjp1JfenEigHTmEVdF1xF1bA,11674
|
|
46
|
+
nominal/core/dataset.py,sha256=rSMB9CKw0OhCIxiPV4Jz84I4QLL71ZGLYZqD3GBfzVo,48674
|
|
47
|
+
nominal/core/dataset_file.py,sha256=1cvEsf3IXGCOIr5kWIBBSwfHpZMAY-BEUEtewR6RjNc,16789
|
|
48
|
+
nominal/core/datasource.py,sha256=lW9CqeA_4gNqeSjx0u-hyp0W1yeyWuMLqUXFhF5e0QM,18604
|
|
47
49
|
nominal/core/event.py,sha256=8trZXyuAqRlKedgcqSgDIimXAAJBmEfDLyHkOOBwUC0,7762
|
|
48
50
|
nominal/core/exceptions.py,sha256=GUpwXRgdYamLl6684FE8ttCRHkBx6WEhOZ3NPE-ybD4,2671
|
|
49
51
|
nominal/core/filetype.py,sha256=R8goHGW4SP0iO6AoQiUil2tNVuDgaQoHclftRbw44oc,5558
|
|
50
52
|
nominal/core/log.py,sha256=z3hI3CIEyMwpUSWjwBsJ6a3JNGzBbsmrVusSU6uI7CY,3885
|
|
51
|
-
nominal/core/run.py,sha256=
|
|
53
|
+
nominal/core/run.py,sha256=1mRMl4bfmhd0MUR-JkvgqqkJYU8_RDNtKX1Qh8xtNtY,18308
|
|
52
54
|
nominal/core/secret.py,sha256=Ckq48m60i7rktxL9GY-nxHU5v8gHv9F1-JN7_MSf4bM,2863
|
|
55
|
+
nominal/core/streaming_checklist.py,sha256=t7cilpW79hUQ86fJxiAr4Hocy9CdpLLP4azonjOi22o,844
|
|
53
56
|
nominal/core/unit.py,sha256=Wa-Bvu0hD-nzxVaQJSnn5YqAfnhUd2kWw2SswXnbMHY,3161
|
|
54
57
|
nominal/core/user.py,sha256=FV333TN4pQzcLh5b2CfxvBnnXyB1TrOP8Ppx1-XdaiE,481
|
|
55
|
-
nominal/core/video.py,sha256=
|
|
56
|
-
nominal/core/video_file.py,sha256=
|
|
58
|
+
nominal/core/video.py,sha256=p5H46V-esDEEg-j6X0zKrX3_xe5yYA6PSM4MmW6z3a8,18126
|
|
59
|
+
nominal/core/video_file.py,sha256=haq5Gf6V4HCP7iq-wObq5voGEx96ApJ2Ju3FPcTsv4U,8887
|
|
57
60
|
nominal/core/workbook.py,sha256=lJo9ZaYm0TevAyIs239ZA-_1WUriTkj8i1lxvxH9TJw,8902
|
|
58
61
|
nominal/core/workbook_template.py,sha256=PBgQjEDVVQdZMlVea99BbhHdAr_bawknSvNKhNtDAq0,7125
|
|
59
62
|
nominal/core/workspace.py,sha256=_FmMu86xzIcxMt8_82oRSe3N4ss3law-rk0I0s8GMCQ,512
|
|
@@ -64,7 +67,7 @@ nominal/core/_stream/write_stream.py,sha256=Xd4VnWU9NANHu7hzknylv_d7qWoIiAOqzVtX
|
|
|
64
67
|
nominal/core/_stream/write_stream_base.py,sha256=AxK3fAq3IBjNXZkxYFVXu3dGNWLCBhgknroMEyXqVJo,3787
|
|
65
68
|
nominal/core/_utils/README.md,sha256=kWPQDc6kn-PjXFUsIH9u2nOA3RdGSXCOlxqeJSmUsPA,160
|
|
66
69
|
nominal/core/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
67
|
-
nominal/core/_utils/api_tools.py,sha256=
|
|
70
|
+
nominal/core/_utils/api_tools.py,sha256=rFL_YsdsEhxHi8v1boDM1NCk1olngRSJjYLUd-9cG-s,3539
|
|
68
71
|
nominal/core/_utils/multipart.py,sha256=0dA2XcTHuOQIyS0139O8WZiCjwePaD1sYDUmTgmWG9w,10243
|
|
69
72
|
nominal/core/_utils/multipart_downloader.py,sha256=16OJEPqxCwOnfjptYdrlwQVuSUQYoe9_iiW60ZSjWos,13859
|
|
70
73
|
nominal/core/_utils/networking.py,sha256=n9ZqYtnpwPCjz9C-4eixsTkrhFh-DW6lknBJlHckHhg,8200
|
|
@@ -91,7 +94,7 @@ nominal/experimental/logging/rich_log_handler.py,sha256=8yz_VtxNgJg2oiesnXz2iXoB
|
|
|
91
94
|
nominal/experimental/migration/__init__.py,sha256=E2IgWJLwJ5bN6jbl8k5nHECKFx5aT11jKAzVYcyXn3o,460
|
|
92
95
|
nominal/experimental/migration/migration_data_config.py,sha256=sPwZjyLmL-_pHvDZvQspxrfW6yNZhEsQjDVwKA8IaXM,522
|
|
93
96
|
nominal/experimental/migration/migration_resources.py,sha256=Tf_7kNBeSaY8z2fTF7DAxk-9q3a7F8xXFVvxI8tTc9c,415
|
|
94
|
-
nominal/experimental/migration/migration_utils.py,sha256=
|
|
97
|
+
nominal/experimental/migration/migration_utils.py,sha256=DxPKtPT1ZHTUdkMh6yt3YLK3A7C7eAaaVVI0JaEjKZo,42423
|
|
95
98
|
nominal/experimental/rust_streaming/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
96
99
|
nominal/experimental/rust_streaming/rust_write_stream.py,sha256=oQ6ixwm8ct8ZDc_qNB7AucDt8o5-_aBVlW2fFCQ_nmA,1541
|
|
97
100
|
nominal/experimental/stream_v2/__init__.py,sha256=W39vK46pssx5sXvmsImMuJiEPs7iGtwrbYBI0bWnXCY,2313
|
|
@@ -110,8 +113,8 @@ nominal/thirdparty/polars/polars_export_handler.py,sha256=hGCSwXX9dC4MG01CmmjlTb
|
|
|
110
113
|
nominal/thirdparty/tdms/__init__.py,sha256=6n2ImFr2Wiil6JM1P5Q7Mpr0VzLcnDkmup_ftNpPq-s,142
|
|
111
114
|
nominal/thirdparty/tdms/_tdms.py,sha256=m4gxbpxB9MTLi2FuYvGlbUGSyDAZKFxbM3ia2x1wIz0,8746
|
|
112
115
|
nominal/ts/__init__.py,sha256=hmd0ENvDhxRnzDKGLxIub6QG8LpcxCgcyAct029CaEs,21442
|
|
113
|
-
nominal-1.
|
|
114
|
-
nominal-1.
|
|
115
|
-
nominal-1.
|
|
116
|
-
nominal-1.
|
|
117
|
-
nominal-1.
|
|
116
|
+
nominal-1.111.0.dist-info/METADATA,sha256=Ysxt0asOhclp8ByoO1yzp3-gWL_WLoKEYHTps_ACIU8,2307
|
|
117
|
+
nominal-1.111.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
118
|
+
nominal-1.111.0.dist-info/entry_points.txt,sha256=-mCLhxgg9R_lm5efT7vW9wuBH12izvY322R0a3TYxbE,66
|
|
119
|
+
nominal-1.111.0.dist-info/licenses/LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
120
|
+
nominal-1.111.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|