nominal 1.110.0__py3-none-any.whl → 1.111.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +25 -0
- nominal/core/_clientsbunch.py +0 -3
- nominal/core/bounds.py +8 -1
- nominal/core/channel.py +0 -15
- nominal/core/dataset.py +41 -2
- nominal/core/datasource.py +0 -3
- nominal/experimental/migration/migration_utils.py +151 -36
- {nominal-1.110.0.dist-info → nominal-1.111.1.dist-info}/METADATA +1 -1
- {nominal-1.110.0.dist-info → nominal-1.111.1.dist-info}/RECORD +12 -12
- {nominal-1.110.0.dist-info → nominal-1.111.1.dist-info}/WHEEL +0 -0
- {nominal-1.110.0.dist-info → nominal-1.111.1.dist-info}/entry_points.txt +0 -0
- {nominal-1.110.0.dist-info → nominal-1.111.1.dist-info}/licenses/LICENSE +0 -0
CHANGELOG.md
CHANGED
|
@@ -1,5 +1,30 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.111.1](https://github.com/nominal-io/nominal-client/compare/v1.111.0...v1.111.1) (2026-01-29)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Bug Fixes
|
|
7
|
+
|
|
8
|
+
* update bounds on dataset ([#598](https://github.com/nominal-io/nominal-client/issues/598)) ([9d57d3a](https://github.com/nominal-io/nominal-client/commit/9d57d3ae515359dce3ae4c102ffe230e3bf91faf))
|
|
9
|
+
|
|
10
|
+
## [1.111.0](https://github.com/nominal-io/nominal-client/compare/v1.110.0...v1.111.0) (2026-01-29)
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
### Features
|
|
14
|
+
|
|
15
|
+
* add RID mapping logging to migration utils ([#593](https://github.com/nominal-io/nominal-client/issues/593)) ([bf19a8b](https://github.com/nominal-io/nominal-client/commit/bf19a8b99f176d0d25586c9f134278a684cedcd2))
|
|
16
|
+
* copy over bounds in clone dataset method ([#597](https://github.com/nominal-io/nominal-client/issues/597)) ([49f0cc1](https://github.com/nominal-io/nominal-client/commit/49f0cc119895768f48fd94a70719ad516bf51a94))
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
### Bug Fixes
|
|
20
|
+
|
|
21
|
+
* remove logical series service references ([#591](https://github.com/nominal-io/nominal-client/issues/591)) ([ec3ce3b](https://github.com/nominal-io/nominal-client/commit/ec3ce3bc6ddfad7a3e07cfc51f0681bbb719f952))
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
### Documentation
|
|
25
|
+
|
|
26
|
+
* update avro stream docstring with new schema types ([#594](https://github.com/nominal-io/nominal-client/issues/594)) ([690e5a3](https://github.com/nominal-io/nominal-client/commit/690e5a34dc782601df13d72034c452844fc644a4))
|
|
27
|
+
|
|
3
28
|
## [1.110.0](https://github.com/nominal-io/nominal-client/compare/v1.109.0...v1.110.0) (2026-01-23)
|
|
4
29
|
|
|
5
30
|
|
nominal/core/_clientsbunch.py
CHANGED
|
@@ -27,7 +27,6 @@ from nominal_api import (
|
|
|
27
27
|
storage_datasource_api,
|
|
28
28
|
storage_writer_api,
|
|
29
29
|
timeseries_channelmetadata,
|
|
30
|
-
timeseries_logicalseries,
|
|
31
30
|
timeseries_metadata,
|
|
32
31
|
upload_api,
|
|
33
32
|
)
|
|
@@ -124,7 +123,6 @@ class ClientsBunch:
|
|
|
124
123
|
dataexport: scout_dataexport_api.DataExportService
|
|
125
124
|
datasource: scout_datasource.DataSourceService
|
|
126
125
|
ingest: ingest_api.IngestService
|
|
127
|
-
logical_series: timeseries_logicalseries.LogicalSeriesService
|
|
128
126
|
run: scout.RunService
|
|
129
127
|
units: scout.UnitsService
|
|
130
128
|
upload: upload_api.UploadService
|
|
@@ -165,7 +163,6 @@ class ClientsBunch:
|
|
|
165
163
|
dataexport=client_factory(scout_dataexport_api.DataExportService),
|
|
166
164
|
datasource=client_factory(scout_datasource.DataSourceService),
|
|
167
165
|
ingest=client_factory(ingest_api.IngestService),
|
|
168
|
-
logical_series=client_factory(timeseries_logicalseries.LogicalSeriesService),
|
|
169
166
|
run=client_factory(scout.RunService),
|
|
170
167
|
units=client_factory(scout.UnitsService),
|
|
171
168
|
upload=client_factory(upload_api.UploadService),
|
nominal/core/bounds.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
|
|
5
|
-
from nominal_api import scout_catalog
|
|
5
|
+
from nominal_api import datasource, scout_catalog
|
|
6
6
|
from typing_extensions import Self
|
|
7
7
|
|
|
8
8
|
from nominal.ts import (
|
|
@@ -22,3 +22,10 @@ class Bounds:
|
|
|
22
22
|
start=_SecondsNanos.from_api(bounds.start).to_nanoseconds(),
|
|
23
23
|
end=_SecondsNanos.from_api(bounds.end).to_nanoseconds(),
|
|
24
24
|
)
|
|
25
|
+
|
|
26
|
+
def _to_conjure(self) -> scout_catalog.Bounds:
|
|
27
|
+
return scout_catalog.Bounds(
|
|
28
|
+
type=datasource.TimestampType.ABSOLUTE,
|
|
29
|
+
start=_SecondsNanos.from_nanoseconds(self.start).to_api(),
|
|
30
|
+
end=_SecondsNanos.from_nanoseconds(self.end).to_api(),
|
|
31
|
+
)
|
nominal/core/channel.py
CHANGED
|
@@ -15,7 +15,6 @@ from nominal_api import (
|
|
|
15
15
|
storage_series_api,
|
|
16
16
|
timeseries_channelmetadata,
|
|
17
17
|
timeseries_channelmetadata_api,
|
|
18
|
-
timeseries_logicalseries_api,
|
|
19
18
|
)
|
|
20
19
|
from typing_extensions import Self
|
|
21
20
|
|
|
@@ -298,20 +297,6 @@ class Channel(RefreshableMixin[timeseries_channelmetadata_api.ChannelMetadata]):
|
|
|
298
297
|
_clients=clients,
|
|
299
298
|
)
|
|
300
299
|
|
|
301
|
-
@classmethod
|
|
302
|
-
def _from_conjure_logicalseries_api(
|
|
303
|
-
cls, clients: _Clients, series: timeseries_logicalseries_api.LogicalSeries
|
|
304
|
-
) -> Self:
|
|
305
|
-
channel_data_type = ChannelDataType._from_conjure(series.series_data_type) if series.series_data_type else None
|
|
306
|
-
return cls(
|
|
307
|
-
name=series.channel,
|
|
308
|
-
data_source=series.data_source_rid,
|
|
309
|
-
unit=series.unit,
|
|
310
|
-
description=series.description,
|
|
311
|
-
data_type=channel_data_type,
|
|
312
|
-
_clients=clients,
|
|
313
|
-
)
|
|
314
|
-
|
|
315
300
|
@classmethod
|
|
316
301
|
def _from_channel_metadata_api(
|
|
317
302
|
cls, clients: _Clients, channel: timeseries_channelmetadata_api.ChannelMetadata
|
nominal/core/dataset.py
CHANGED
|
@@ -26,6 +26,8 @@ from nominal.core.filetype import FileType, FileTypes
|
|
|
26
26
|
from nominal.core.log import LogPoint, _write_logs
|
|
27
27
|
from nominal.ts import (
|
|
28
28
|
_AnyTimestampType,
|
|
29
|
+
_InferrableTimestampType,
|
|
30
|
+
_SecondsNanos,
|
|
29
31
|
_to_typed_timestamp_type,
|
|
30
32
|
)
|
|
31
33
|
|
|
@@ -93,6 +95,32 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
93
95
|
updated_dataset = self._clients.catalog.update_dataset_metadata(self._clients.auth_header, self.rid, request)
|
|
94
96
|
return self._refresh_from_api(updated_dataset)
|
|
95
97
|
|
|
98
|
+
def update_bounds(
|
|
99
|
+
self,
|
|
100
|
+
*,
|
|
101
|
+
start: _InferrableTimestampType,
|
|
102
|
+
end: _InferrableTimestampType,
|
|
103
|
+
) -> Self:
|
|
104
|
+
"""Update the bounds (start and end timestamps) of the dataset.
|
|
105
|
+
Updates the current instance, and returns it.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
start: The start timestamp of the dataset bounds. Can be a datetime, ISO 8601 string,
|
|
109
|
+
or integer nanoseconds since epoch.
|
|
110
|
+
end: The end timestamp of the dataset bounds. Can be a datetime, ISO 8601 string,
|
|
111
|
+
or integer nanoseconds since epoch.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
The updated Dataset instance with new bounds.
|
|
115
|
+
"""
|
|
116
|
+
bounds = Bounds(
|
|
117
|
+
start=_SecondsNanos.from_flexible(start).to_nanoseconds(),
|
|
118
|
+
end=_SecondsNanos.from_flexible(end).to_nanoseconds(),
|
|
119
|
+
)
|
|
120
|
+
request = scout_catalog.UpdateBoundsRequest(bounds=bounds._to_conjure())
|
|
121
|
+
self._clients.catalog.update_global_dataset_bounds(self._clients.auth_header, request, self.rid)
|
|
122
|
+
return self.refresh()
|
|
123
|
+
|
|
96
124
|
def _handle_ingest_response(self, response: ingest_api.IngestResponse) -> DatasetFile:
|
|
97
125
|
if response.details.dataset is None:
|
|
98
126
|
raise ValueError(f"Expected response to provide dataset details, received: {response.details.type}")
|
|
@@ -216,6 +244,8 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
216
244
|
API, making it useful for use cases where network connection drops during streaming and a backup file needs
|
|
217
245
|
to be created.
|
|
218
246
|
|
|
247
|
+
For struct columns, values should be converted to JSON strings and wrapped in the JsonStruct record type.
|
|
248
|
+
|
|
219
249
|
If this schema is not used, will result in a failed ingestion.
|
|
220
250
|
{
|
|
221
251
|
"type": "record",
|
|
@@ -234,8 +264,15 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
234
264
|
},
|
|
235
265
|
{
|
|
236
266
|
"name": "values",
|
|
237
|
-
"type": {"type": "array", "items": [
|
|
238
|
-
|
|
267
|
+
"type": {"type": "array", "items": [
|
|
268
|
+
"double",
|
|
269
|
+
"string",
|
|
270
|
+
"long",
|
|
271
|
+
{"type": "record", "name": "DoubleArray", "fields": [{"name": "items", "type": {"type": "array", "items": "double"}}]},
|
|
272
|
+
{"type": "record", "name": "StringArray", "fields": [{"name": "items", "type": {"type": "array", "items": "string"}}]},
|
|
273
|
+
{"type": "record", "name": "JsonStruct", "fields": [{"name": "json", "type": "string"}]}
|
|
274
|
+
]},
|
|
275
|
+
"doc": "Array of values. Can be doubles, longs, strings, arrays, or JSON structs",
|
|
239
276
|
},
|
|
240
277
|
{
|
|
241
278
|
"name": "tags",
|
|
@@ -246,6 +283,8 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
246
283
|
],
|
|
247
284
|
}
|
|
248
285
|
|
|
286
|
+
Note: The previous schema with only "double" and "string" value types is still fully supported.
|
|
287
|
+
|
|
249
288
|
Args:
|
|
250
289
|
path: Path to the .avro file to upload
|
|
251
290
|
|
nominal/core/datasource.py
CHANGED
|
@@ -18,7 +18,6 @@ from nominal_api import (
|
|
|
18
18
|
storage_writer_api,
|
|
19
19
|
timeseries_channelmetadata,
|
|
20
20
|
timeseries_channelmetadata_api,
|
|
21
|
-
timeseries_logicalseries,
|
|
22
21
|
timeseries_metadata,
|
|
23
22
|
timeseries_metadata_api,
|
|
24
23
|
upload_api,
|
|
@@ -53,8 +52,6 @@ class DataSource(HasRid):
|
|
|
53
52
|
@property
|
|
54
53
|
def datasource(self) -> scout_datasource.DataSourceService: ...
|
|
55
54
|
@property
|
|
56
|
-
def logical_series(self) -> timeseries_logicalseries.LogicalSeriesService: ...
|
|
57
|
-
@property
|
|
58
55
|
def units(self) -> scout.UnitsService: ...
|
|
59
56
|
@property
|
|
60
57
|
def ingest(self) -> ingest_api.IngestService: ...
|
|
@@ -51,6 +51,56 @@ logger = logging.getLogger(__name__)
|
|
|
51
51
|
|
|
52
52
|
ConjureType = Union[ConjureBeanType, ConjureUnionType, ConjureEnumType]
|
|
53
53
|
|
|
54
|
+
|
|
55
|
+
def _install_migration_file_logger(
|
|
56
|
+
log_path: str | Path | None = None,
|
|
57
|
+
*,
|
|
58
|
+
logger: logging.Logger | None = None,
|
|
59
|
+
level: int = logging.INFO,
|
|
60
|
+
formatter: logging.Formatter | None = None,
|
|
61
|
+
mode: str = "a",
|
|
62
|
+
) -> logging.FileHandler:
|
|
63
|
+
"""Install a file handler that only writes log records with extra={"to_file": True}.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
log_path: File path to write filtered logs to. If None (or a directory), a timestamped
|
|
67
|
+
file named "migration_utils_output_YYYY-MM-DD-HH-MM-SS.txt" is created.
|
|
68
|
+
logger: Logger to attach the handler to. Defaults to the root logger.
|
|
69
|
+
level: Minimum log level to write to the file.
|
|
70
|
+
formatter: Optional formatter to apply to the file handler.
|
|
71
|
+
mode: File open mode for the handler.
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
The attached FileHandler instance.
|
|
75
|
+
"""
|
|
76
|
+
if logger is None:
|
|
77
|
+
logger = logging.getLogger()
|
|
78
|
+
|
|
79
|
+
if log_path is None:
|
|
80
|
+
log_path_obj = Path.cwd()
|
|
81
|
+
else:
|
|
82
|
+
log_path_obj = Path(log_path)
|
|
83
|
+
|
|
84
|
+
if log_path_obj.is_dir():
|
|
85
|
+
timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
|
|
86
|
+
log_path_obj = log_path_obj / f"migration_utils_output_{timestamp}.txt"
|
|
87
|
+
|
|
88
|
+
handler = logging.FileHandler(log_path_obj, mode=mode, encoding="utf-8")
|
|
89
|
+
handler.setLevel(level)
|
|
90
|
+
if formatter is not None:
|
|
91
|
+
handler.setFormatter(formatter)
|
|
92
|
+
|
|
93
|
+
filter_obj = logging.Filter()
|
|
94
|
+
|
|
95
|
+
def _filter(record: logging.LogRecord) -> bool:
|
|
96
|
+
return bool(getattr(record, "to_file", False))
|
|
97
|
+
|
|
98
|
+
filter_obj.filter = _filter # type: ignore[method-assign]
|
|
99
|
+
handler.addFilter(filter_obj)
|
|
100
|
+
logger.addHandler(handler)
|
|
101
|
+
return handler
|
|
102
|
+
|
|
103
|
+
|
|
54
104
|
# Regex pattern to match strings that have a UUID format with a prefix.
|
|
55
105
|
UUID_PATTERN = re.compile(r"^(.*)([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$")
|
|
56
106
|
|
|
@@ -368,6 +418,12 @@ def copy_workbook_template_from(
|
|
|
368
418
|
source_template.rid,
|
|
369
419
|
extra=log_extras,
|
|
370
420
|
)
|
|
421
|
+
logger.info(
|
|
422
|
+
"WORKBOOK_TEMPLATE: Old RID: %s, New RID: %s",
|
|
423
|
+
source_template.rid,
|
|
424
|
+
new_workbook_template.rid,
|
|
425
|
+
extra={"to_file": True},
|
|
426
|
+
)
|
|
371
427
|
return new_workbook_template
|
|
372
428
|
|
|
373
429
|
|
|
@@ -432,6 +488,12 @@ def copy_video_file_to_video_dataset(
|
|
|
432
488
|
destination_video_dataset.name,
|
|
433
489
|
destination_video_dataset.rid,
|
|
434
490
|
)
|
|
491
|
+
logger.info(
|
|
492
|
+
"VIDEO_FILE: Old RID: %s, New RID: %s",
|
|
493
|
+
source_video_file.rid,
|
|
494
|
+
new_file.rid,
|
|
495
|
+
extra={"to_file": True},
|
|
496
|
+
)
|
|
435
497
|
return new_file
|
|
436
498
|
|
|
437
499
|
|
|
@@ -502,6 +564,12 @@ def copy_video_from(
|
|
|
502
564
|
new_video.rid,
|
|
503
565
|
extra=log_extras,
|
|
504
566
|
)
|
|
567
|
+
logger.info(
|
|
568
|
+
"VIDEO: Old RID: %s, New RID: %s",
|
|
569
|
+
source_video.rid,
|
|
570
|
+
new_video.rid,
|
|
571
|
+
extra={"to_file": True},
|
|
572
|
+
)
|
|
505
573
|
return new_video
|
|
506
574
|
|
|
507
575
|
|
|
@@ -552,6 +620,12 @@ def copy_file_to_dataset(
|
|
|
552
620
|
destination_dataset.name,
|
|
553
621
|
destination_dataset.rid,
|
|
554
622
|
)
|
|
623
|
+
logger.info(
|
|
624
|
+
"DATASET_FILE: Old RID: %s, New RID: %s",
|
|
625
|
+
source_file.id,
|
|
626
|
+
new_file.id,
|
|
627
|
+
extra={"to_file": True},
|
|
628
|
+
)
|
|
555
629
|
return new_file
|
|
556
630
|
else: # Because these fields are optional, need to check for None. We shouldn't ever run into this.
|
|
557
631
|
raise ValueError("Unsupported file handle type or missing timestamp information.")
|
|
@@ -658,12 +732,26 @@ def copy_dataset_from(
|
|
|
658
732
|
if include_files:
|
|
659
733
|
for source_file in source_dataset.list_files():
|
|
660
734
|
copy_file_to_dataset(source_file, new_dataset)
|
|
735
|
+
|
|
736
|
+
# Copy bounds from source dataset if they exist
|
|
737
|
+
if source_dataset.bounds is not None:
|
|
738
|
+
new_dataset = new_dataset.update_bounds(
|
|
739
|
+
start=source_dataset.bounds.start,
|
|
740
|
+
end=source_dataset.bounds.end,
|
|
741
|
+
)
|
|
742
|
+
|
|
661
743
|
logger.debug(
|
|
662
744
|
"New dataset created: %s (rid: %s)",
|
|
663
745
|
new_dataset.name,
|
|
664
746
|
new_dataset.rid,
|
|
665
747
|
extra=log_extras,
|
|
666
748
|
)
|
|
749
|
+
logger.info(
|
|
750
|
+
"DATASET: Old RID: %s, New RID: %s",
|
|
751
|
+
source_dataset.rid,
|
|
752
|
+
new_dataset.rid,
|
|
753
|
+
extra={"to_file": True},
|
|
754
|
+
)
|
|
667
755
|
return new_dataset
|
|
668
756
|
|
|
669
757
|
|
|
@@ -735,6 +823,12 @@ def copy_event_from(
|
|
|
735
823
|
new_event.rid,
|
|
736
824
|
extra=log_extras,
|
|
737
825
|
)
|
|
826
|
+
logger.info(
|
|
827
|
+
"EVENT: Old RID: %s, New RID: %s",
|
|
828
|
+
source_event.rid,
|
|
829
|
+
new_event.rid,
|
|
830
|
+
extra={"to_file": True},
|
|
831
|
+
)
|
|
738
832
|
return new_event
|
|
739
833
|
|
|
740
834
|
|
|
@@ -792,6 +886,7 @@ def copy_run_from(
|
|
|
792
886
|
source_run.rid,
|
|
793
887
|
extra=log_extras,
|
|
794
888
|
)
|
|
889
|
+
|
|
795
890
|
new_run = destination_client.create_run(
|
|
796
891
|
name=new_name or source_run.name,
|
|
797
892
|
start=new_start or source_run.start,
|
|
@@ -804,6 +899,12 @@ def copy_run_from(
|
|
|
804
899
|
attachments=new_attachments or source_run.list_attachments(),
|
|
805
900
|
)
|
|
806
901
|
logger.debug("New run created: %s (rid: %s)", new_run.name, new_run.rid, extra=log_extras)
|
|
902
|
+
logger.info(
|
|
903
|
+
"RUN: Old RID: %s, New RID: %s",
|
|
904
|
+
source_run.rid,
|
|
905
|
+
new_run.rid,
|
|
906
|
+
extra={"to_file": True},
|
|
907
|
+
)
|
|
807
908
|
return new_run
|
|
808
909
|
|
|
809
910
|
|
|
@@ -863,6 +964,7 @@ def copy_asset_from(
|
|
|
863
964
|
log_extras = {
|
|
864
965
|
"destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid
|
|
865
966
|
}
|
|
967
|
+
|
|
866
968
|
logger.debug(
|
|
867
969
|
"Copying asset %s (rid: %s)",
|
|
868
970
|
source_asset.name,
|
|
@@ -875,6 +977,7 @@ def copy_asset_from(
|
|
|
875
977
|
properties=new_asset_properties if new_asset_properties is not None else source_asset.properties,
|
|
876
978
|
labels=new_asset_labels if new_asset_labels is not None else source_asset.labels,
|
|
877
979
|
)
|
|
980
|
+
|
|
878
981
|
if dataset_config is not None:
|
|
879
982
|
source_datasets = source_asset.list_datasets()
|
|
880
983
|
for data_scope, source_dataset in source_datasets:
|
|
@@ -909,6 +1012,12 @@ def copy_asset_from(
|
|
|
909
1012
|
copy_video_file_to_video_dataset(source_video_file, new_video_dataset)
|
|
910
1013
|
|
|
911
1014
|
logger.debug("New asset created: %s (rid: %s)", new_asset, new_asset.rid, extra=log_extras)
|
|
1015
|
+
logger.info(
|
|
1016
|
+
"ASSET: Old RID: %s, New RID: %s",
|
|
1017
|
+
source_asset.rid,
|
|
1018
|
+
new_asset.rid,
|
|
1019
|
+
extra={"to_file": True},
|
|
1020
|
+
)
|
|
912
1021
|
return new_asset
|
|
913
1022
|
|
|
914
1023
|
|
|
@@ -928,41 +1037,47 @@ def copy_resources_to_destination_client(
|
|
|
928
1037
|
Returns:
|
|
929
1038
|
All of the created resources.
|
|
930
1039
|
"""
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
for source_workbook_template in source_asset.source_workbook_templates:
|
|
953
|
-
new_template = clone_workbook_template(source_workbook_template, destination_client)
|
|
954
|
-
new_templates.append(new_template)
|
|
955
|
-
new_workbook = new_template.create_workbook(
|
|
956
|
-
title=new_template.title, description=new_template.description, asset=new_asset
|
|
957
|
-
)
|
|
958
|
-
logger.debug(
|
|
959
|
-
"Created new workbook %s (rid: %s) from template %s (rid: %s)",
|
|
960
|
-
new_workbook.title,
|
|
961
|
-
new_workbook.rid,
|
|
962
|
-
new_template.title,
|
|
963
|
-
new_template.rid,
|
|
964
|
-
extra=log_extras,
|
|
1040
|
+
file_handler = _install_migration_file_logger()
|
|
1041
|
+
try:
|
|
1042
|
+
log_extras = {
|
|
1043
|
+
"destination_client_workspace": destination_client.get_workspace(
|
|
1044
|
+
destination_client._clients.workspace_rid
|
|
1045
|
+
).rid,
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
new_assets = []
|
|
1049
|
+
new_templates = []
|
|
1050
|
+
new_workbooks = []
|
|
1051
|
+
|
|
1052
|
+
new_data_scopes_and_datasets: list[tuple[str, Dataset]] = []
|
|
1053
|
+
for source_asset in migration_resources.source_assets:
|
|
1054
|
+
new_asset = copy_asset_from(
|
|
1055
|
+
source_asset.asset,
|
|
1056
|
+
destination_client,
|
|
1057
|
+
dataset_config=dataset_config,
|
|
1058
|
+
include_events=True,
|
|
1059
|
+
include_runs=True,
|
|
1060
|
+
include_video=True,
|
|
965
1061
|
)
|
|
966
|
-
|
|
967
|
-
|
|
1062
|
+
new_assets.append(new_asset)
|
|
1063
|
+
new_data_scopes_and_datasets.extend(new_asset.list_datasets())
|
|
1064
|
+
|
|
1065
|
+
for source_workbook_template in source_asset.source_workbook_templates:
|
|
1066
|
+
new_template = clone_workbook_template(source_workbook_template, destination_client)
|
|
1067
|
+
new_templates.append(new_template)
|
|
1068
|
+
new_workbook = new_template.create_workbook(
|
|
1069
|
+
title=new_template.title, description=new_template.description, asset=new_asset
|
|
1070
|
+
)
|
|
1071
|
+
logger.debug(
|
|
1072
|
+
"Created new workbook %s (rid: %s) from template %s (rid: %s)",
|
|
1073
|
+
new_workbook.title,
|
|
1074
|
+
new_workbook.rid,
|
|
1075
|
+
new_template.title,
|
|
1076
|
+
new_template.rid,
|
|
1077
|
+
extra=log_extras,
|
|
1078
|
+
)
|
|
1079
|
+
new_workbooks.append(new_workbook)
|
|
1080
|
+
finally:
|
|
1081
|
+
file_handler.close()
|
|
1082
|
+
logger.removeHandler(file_handler)
|
|
968
1083
|
return (new_data_scopes_and_datasets, new_assets, new_templates, new_workbooks)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
CHANGELOG.md,sha256=
|
|
1
|
+
CHANGELOG.md,sha256=jHCernM5EkOd7IWlG7EiBylcKPNjD0wHgEJdNIstFoc,92050
|
|
2
2
|
LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
3
3
|
README.md,sha256=KKe0dxh_pHXCtB7I9G4qWGQYvot_BZU8yW6MJyuyUHM,311
|
|
4
4
|
nominal/__init__.py,sha256=rbraORnXUrNn1hywLXM0XwSQCd9UmQt20PDYlsBalfE,2167
|
|
@@ -29,23 +29,23 @@ nominal/config/__init__.py,sha256=wV8cq8X3J4NTJ5H_uR5THaMT_NQpWQO5qCUGEb-rPnM,31
|
|
|
29
29
|
nominal/config/_config.py,sha256=yKq_H1iYJDoxRfLz2iXLbbVdoL0MTEY0FS4eVL12w0g,2004
|
|
30
30
|
nominal/core/__init__.py,sha256=1MiCC44cxHYFofP4hf2fz4EIkepK-OAhDzpPFIzHbWw,2422
|
|
31
31
|
nominal/core/_checklist_types.py,sha256=YcjvuUbQDIOYqqM5H1Eto9ws9Ivm4cPWEaeEF2Uwn1o,1361
|
|
32
|
-
nominal/core/_clientsbunch.py,sha256=
|
|
32
|
+
nominal/core/_clientsbunch.py,sha256=k8XeVH5c_gv037RlZI44vPI8XaWvDeF5DRtFRKHDylE,8447
|
|
33
33
|
nominal/core/_constants.py,sha256=SrxgaSqAEB1MvTSrorgGam3eO29iCmRr6VIdajxX3gI,56
|
|
34
34
|
nominal/core/_event_types.py,sha256=Cq_8x-zv_5EDvRo9UTbaOpenAy92bTfQxlsEuHPOhtE,3706
|
|
35
35
|
nominal/core/_types.py,sha256=FktMmcQ5_rD2rbXv8_p-WISzSo8T2NtO-exsLm-iadU,122
|
|
36
36
|
nominal/core/_video_types.py,sha256=Cdl0sZxX3cyYtCXzsnnLWjK38hHp3_orMe6oiUU_dyc,465
|
|
37
37
|
nominal/core/asset.py,sha256=S41KS_c14tFcxFLJzU3bnt958KpMSI_U524QcMCiSEE,23609
|
|
38
38
|
nominal/core/attachment.py,sha256=yOtDUdkLY5MT_Rk9kUlr1yupIJN7a5pt5sJWx4RLQV8,4355
|
|
39
|
-
nominal/core/bounds.py,sha256=
|
|
40
|
-
nominal/core/channel.py,sha256=
|
|
39
|
+
nominal/core/bounds.py,sha256=mYUWBtpRMltqPVmHD3Bu282rtwhw1oqS3VJxpcCC0H4,882
|
|
40
|
+
nominal/core/channel.py,sha256=oSZUAXRH9k9YPXZJ3LdgKEeFBmLjlVdgK3zdfQqRe5o,18991
|
|
41
41
|
nominal/core/checklist.py,sha256=TXEm9qNYCG6lU5NB5P3RAe-XmXdj1Tcsdbx_c5_spXI,6663
|
|
42
42
|
nominal/core/client.py,sha256=RjMQCU8DmvHcp7lypVCFLY54caoTXu76EwN-oxaFjsw,68091
|
|
43
43
|
nominal/core/connection.py,sha256=LYllr3a1H2xp8-i4MaX1M7yK8X-HnwuIkciyK9XgLtQ,5175
|
|
44
44
|
nominal/core/containerized_extractors.py,sha256=fUz3-NHoNWYKqOCD15gLwGXDKVfdsW-x_kpXnkOI3BE,10224
|
|
45
45
|
nominal/core/data_review.py,sha256=8pyoJiP-6KCSSB4NE_LKjp1JfenEigHTmEVdF1xF1bA,11674
|
|
46
|
-
nominal/core/dataset.py,sha256=
|
|
46
|
+
nominal/core/dataset.py,sha256=gnI_nZT1U1kAT_qGp40LgdKjLfRzFIhqst0Mk5vvWzE,48646
|
|
47
47
|
nominal/core/dataset_file.py,sha256=1cvEsf3IXGCOIr5kWIBBSwfHpZMAY-BEUEtewR6RjNc,16789
|
|
48
|
-
nominal/core/datasource.py,sha256=
|
|
48
|
+
nominal/core/datasource.py,sha256=lW9CqeA_4gNqeSjx0u-hyp0W1yeyWuMLqUXFhF5e0QM,18604
|
|
49
49
|
nominal/core/event.py,sha256=8trZXyuAqRlKedgcqSgDIimXAAJBmEfDLyHkOOBwUC0,7762
|
|
50
50
|
nominal/core/exceptions.py,sha256=GUpwXRgdYamLl6684FE8ttCRHkBx6WEhOZ3NPE-ybD4,2671
|
|
51
51
|
nominal/core/filetype.py,sha256=R8goHGW4SP0iO6AoQiUil2tNVuDgaQoHclftRbw44oc,5558
|
|
@@ -94,7 +94,7 @@ nominal/experimental/logging/rich_log_handler.py,sha256=8yz_VtxNgJg2oiesnXz2iXoB
|
|
|
94
94
|
nominal/experimental/migration/__init__.py,sha256=E2IgWJLwJ5bN6jbl8k5nHECKFx5aT11jKAzVYcyXn3o,460
|
|
95
95
|
nominal/experimental/migration/migration_data_config.py,sha256=sPwZjyLmL-_pHvDZvQspxrfW6yNZhEsQjDVwKA8IaXM,522
|
|
96
96
|
nominal/experimental/migration/migration_resources.py,sha256=Tf_7kNBeSaY8z2fTF7DAxk-9q3a7F8xXFVvxI8tTc9c,415
|
|
97
|
-
nominal/experimental/migration/migration_utils.py,sha256=
|
|
97
|
+
nominal/experimental/migration/migration_utils.py,sha256=DxPKtPT1ZHTUdkMh6yt3YLK3A7C7eAaaVVI0JaEjKZo,42423
|
|
98
98
|
nominal/experimental/rust_streaming/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
99
99
|
nominal/experimental/rust_streaming/rust_write_stream.py,sha256=oQ6ixwm8ct8ZDc_qNB7AucDt8o5-_aBVlW2fFCQ_nmA,1541
|
|
100
100
|
nominal/experimental/stream_v2/__init__.py,sha256=W39vK46pssx5sXvmsImMuJiEPs7iGtwrbYBI0bWnXCY,2313
|
|
@@ -113,8 +113,8 @@ nominal/thirdparty/polars/polars_export_handler.py,sha256=hGCSwXX9dC4MG01CmmjlTb
|
|
|
113
113
|
nominal/thirdparty/tdms/__init__.py,sha256=6n2ImFr2Wiil6JM1P5Q7Mpr0VzLcnDkmup_ftNpPq-s,142
|
|
114
114
|
nominal/thirdparty/tdms/_tdms.py,sha256=m4gxbpxB9MTLi2FuYvGlbUGSyDAZKFxbM3ia2x1wIz0,8746
|
|
115
115
|
nominal/ts/__init__.py,sha256=hmd0ENvDhxRnzDKGLxIub6QG8LpcxCgcyAct029CaEs,21442
|
|
116
|
-
nominal-1.
|
|
117
|
-
nominal-1.
|
|
118
|
-
nominal-1.
|
|
119
|
-
nominal-1.
|
|
120
|
-
nominal-1.
|
|
116
|
+
nominal-1.111.1.dist-info/METADATA,sha256=F-Dar1ZHrNnsCswYsOuIaA8zKfsKsBVtm47o7RflGMo,2307
|
|
117
|
+
nominal-1.111.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
118
|
+
nominal-1.111.1.dist-info/entry_points.txt,sha256=-mCLhxgg9R_lm5efT7vW9wuBH12izvY322R0a3TYxbE,66
|
|
119
|
+
nominal-1.111.1.dist-info/licenses/LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
120
|
+
nominal-1.111.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|