nominal 1.110.0__tar.gz → 1.111.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. {nominal-1.110.0 → nominal-1.111.1}/.gitignore +2 -1
  2. {nominal-1.110.0 → nominal-1.111.1}/CHANGELOG.md +25 -0
  3. {nominal-1.110.0 → nominal-1.111.1}/PKG-INFO +1 -1
  4. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_clientsbunch.py +0 -3
  5. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/bounds.py +8 -1
  6. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/channel.py +0 -15
  7. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/dataset.py +41 -2
  8. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/datasource.py +0 -3
  9. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/migration/migration_utils.py +151 -36
  10. {nominal-1.110.0 → nominal-1.111.1}/pyproject.toml +4 -1
  11. {nominal-1.110.0 → nominal-1.111.1}/LICENSE +0 -0
  12. {nominal-1.110.0 → nominal-1.111.1}/README.md +0 -0
  13. {nominal-1.110.0 → nominal-1.111.1}/nominal/__init__.py +0 -0
  14. {nominal-1.110.0 → nominal-1.111.1}/nominal/__main__.py +0 -0
  15. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/README.md +0 -0
  16. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/__init__.py +0 -0
  17. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/dataclass_tools.py +0 -0
  18. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/deprecation_tools.py +0 -0
  19. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/iterator_tools.py +0 -0
  20. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/streaming_tools.py +0 -0
  21. {nominal-1.110.0 → nominal-1.111.1}/nominal/_utils/timing_tools.py +0 -0
  22. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/__init__.py +0 -0
  23. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/__main__.py +0 -0
  24. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/attachment.py +0 -0
  25. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/auth.py +0 -0
  26. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/config.py +0 -0
  27. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/dataset.py +0 -0
  28. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/download.py +0 -0
  29. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/mis.py +0 -0
  30. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/run.py +0 -0
  31. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/util/__init__.py +0 -0
  32. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/util/click_log_handler.py +0 -0
  33. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/util/global_decorators.py +0 -0
  34. {nominal-1.110.0 → nominal-1.111.1}/nominal/cli/util/verify_connection.py +0 -0
  35. {nominal-1.110.0 → nominal-1.111.1}/nominal/config/__init__.py +0 -0
  36. {nominal-1.110.0 → nominal-1.111.1}/nominal/config/_config.py +0 -0
  37. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/__init__.py +0 -0
  38. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_checklist_types.py +0 -0
  39. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_constants.py +0 -0
  40. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_event_types.py +0 -0
  41. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_stream/__init__.py +0 -0
  42. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_stream/batch_processor.py +0 -0
  43. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_stream/batch_processor_proto.py +0 -0
  44. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_stream/write_stream.py +0 -0
  45. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_stream/write_stream_base.py +0 -0
  46. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_types.py +0 -0
  47. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/README.md +0 -0
  48. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/__init__.py +0 -0
  49. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/api_tools.py +0 -0
  50. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/multipart.py +0 -0
  51. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/multipart_downloader.py +0 -0
  52. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/networking.py +0 -0
  53. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/pagination_tools.py +0 -0
  54. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/query_tools.py +0 -0
  55. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_utils/queueing.py +0 -0
  56. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/_video_types.py +0 -0
  57. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/asset.py +0 -0
  58. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/attachment.py +0 -0
  59. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/checklist.py +0 -0
  60. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/client.py +0 -0
  61. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/connection.py +0 -0
  62. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/containerized_extractors.py +0 -0
  63. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/data_review.py +0 -0
  64. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/dataset_file.py +0 -0
  65. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/event.py +0 -0
  66. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/exceptions.py +0 -0
  67. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/filetype.py +0 -0
  68. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/log.py +0 -0
  69. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/run.py +0 -0
  70. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/secret.py +0 -0
  71. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/streaming_checklist.py +0 -0
  72. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/unit.py +0 -0
  73. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/user.py +0 -0
  74. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/video.py +0 -0
  75. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/video_file.py +0 -0
  76. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/workbook.py +0 -0
  77. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/workbook_template.py +0 -0
  78. {nominal-1.110.0 → nominal-1.111.1}/nominal/core/workspace.py +0 -0
  79. {nominal-1.110.0 → nominal-1.111.1}/nominal/exceptions/__init__.py +0 -0
  80. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/__init__.py +0 -0
  81. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/README.md +0 -0
  82. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/__init__.py +0 -0
  83. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/_buckets.py +0 -0
  84. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/dsl/__init__.py +0 -0
  85. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/dsl/_enum_expr_impls.py +0 -0
  86. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/dsl/_numeric_expr_impls.py +0 -0
  87. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/dsl/_range_expr_impls.py +0 -0
  88. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/dsl/exprs.py +0 -0
  89. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/compute/dsl/params.py +0 -0
  90. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/dataset_utils/__init__.py +0 -0
  91. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/dataset_utils/_dataset_utils.py +0 -0
  92. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/logging/__init__.py +0 -0
  93. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/logging/click_log_handler.py +0 -0
  94. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/logging/nominal_log_handler.py +0 -0
  95. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/logging/rich_log_handler.py +0 -0
  96. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/migration/__init__.py +0 -0
  97. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/migration/migration_data_config.py +0 -0
  98. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/migration/migration_resources.py +0 -0
  99. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/rust_streaming/__init__.py +0 -0
  100. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/rust_streaming/rust_write_stream.py +0 -0
  101. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/stream_v2/__init__.py +0 -0
  102. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/stream_v2/_serializer.py +0 -0
  103. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/stream_v2/_write_stream.py +0 -0
  104. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/video_processing/__init__.py +0 -0
  105. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/video_processing/resolution.py +0 -0
  106. {nominal-1.110.0 → nominal-1.111.1}/nominal/experimental/video_processing/video_conversion.py +0 -0
  107. {nominal-1.110.0 → nominal-1.111.1}/nominal/nominal.py +0 -0
  108. {nominal-1.110.0 → nominal-1.111.1}/nominal/py.typed +0 -0
  109. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/__init__.py +0 -0
  110. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/matlab/__init__.py +0 -0
  111. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/matlab/_matlab.py +0 -0
  112. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/pandas/__init__.py +0 -0
  113. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/pandas/_pandas.py +0 -0
  114. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/polars/__init__.py +0 -0
  115. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/polars/polars_export_handler.py +0 -0
  116. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/tdms/__init__.py +0 -0
  117. {nominal-1.110.0 → nominal-1.111.1}/nominal/thirdparty/tdms/_tdms.py +0 -0
  118. {nominal-1.110.0 → nominal-1.111.1}/nominal/ts/__init__.py +0 -0
@@ -176,4 +176,5 @@ uv.toml
176
176
 
177
177
  # Don't check in data
178
178
  *.csv
179
- *.parquet
179
+ *.parquet
180
+ *migration_utils_output_*.txt
@@ -1,5 +1,30 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.111.1](https://github.com/nominal-io/nominal-client/compare/v1.111.0...v1.111.1) (2026-01-29)
4
+
5
+
6
+ ### Bug Fixes
7
+
8
+ * update bounds on dataset ([#598](https://github.com/nominal-io/nominal-client/issues/598)) ([9d57d3a](https://github.com/nominal-io/nominal-client/commit/9d57d3ae515359dce3ae4c102ffe230e3bf91faf))
9
+
10
+ ## [1.111.0](https://github.com/nominal-io/nominal-client/compare/v1.110.0...v1.111.0) (2026-01-29)
11
+
12
+
13
+ ### Features
14
+
15
+ * add RID mapping logging to migration utils ([#593](https://github.com/nominal-io/nominal-client/issues/593)) ([bf19a8b](https://github.com/nominal-io/nominal-client/commit/bf19a8b99f176d0d25586c9f134278a684cedcd2))
16
+ * copy over bounds in clone dataset method ([#597](https://github.com/nominal-io/nominal-client/issues/597)) ([49f0cc1](https://github.com/nominal-io/nominal-client/commit/49f0cc119895768f48fd94a70719ad516bf51a94))
17
+
18
+
19
+ ### Bug Fixes
20
+
21
+ * remove logical series service references ([#591](https://github.com/nominal-io/nominal-client/issues/591)) ([ec3ce3b](https://github.com/nominal-io/nominal-client/commit/ec3ce3bc6ddfad7a3e07cfc51f0681bbb719f952))
22
+
23
+
24
+ ### Documentation
25
+
26
+ * update avro stream docstring with new schema types ([#594](https://github.com/nominal-io/nominal-client/issues/594)) ([690e5a3](https://github.com/nominal-io/nominal-client/commit/690e5a34dc782601df13d72034c452844fc644a4))
27
+
3
28
  ## [1.110.0](https://github.com/nominal-io/nominal-client/compare/v1.109.0...v1.110.0) (2026-01-23)
4
29
 
5
30
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nominal
3
- Version: 1.110.0
3
+ Version: 1.111.1
4
4
  Summary: Automate Nominal workflows in Python
5
5
  Project-URL: Homepage, https://nominal.io
6
6
  Project-URL: Documentation, https://docs.nominal.io
@@ -27,7 +27,6 @@ from nominal_api import (
27
27
  storage_datasource_api,
28
28
  storage_writer_api,
29
29
  timeseries_channelmetadata,
30
- timeseries_logicalseries,
31
30
  timeseries_metadata,
32
31
  upload_api,
33
32
  )
@@ -124,7 +123,6 @@ class ClientsBunch:
124
123
  dataexport: scout_dataexport_api.DataExportService
125
124
  datasource: scout_datasource.DataSourceService
126
125
  ingest: ingest_api.IngestService
127
- logical_series: timeseries_logicalseries.LogicalSeriesService
128
126
  run: scout.RunService
129
127
  units: scout.UnitsService
130
128
  upload: upload_api.UploadService
@@ -165,7 +163,6 @@ class ClientsBunch:
165
163
  dataexport=client_factory(scout_dataexport_api.DataExportService),
166
164
  datasource=client_factory(scout_datasource.DataSourceService),
167
165
  ingest=client_factory(ingest_api.IngestService),
168
- logical_series=client_factory(timeseries_logicalseries.LogicalSeriesService),
169
166
  run=client_factory(scout.RunService),
170
167
  units=client_factory(scout.UnitsService),
171
168
  upload=client_factory(upload_api.UploadService),
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  from dataclasses import dataclass
4
4
 
5
- from nominal_api import scout_catalog
5
+ from nominal_api import datasource, scout_catalog
6
6
  from typing_extensions import Self
7
7
 
8
8
  from nominal.ts import (
@@ -22,3 +22,10 @@ class Bounds:
22
22
  start=_SecondsNanos.from_api(bounds.start).to_nanoseconds(),
23
23
  end=_SecondsNanos.from_api(bounds.end).to_nanoseconds(),
24
24
  )
25
+
26
+ def _to_conjure(self) -> scout_catalog.Bounds:
27
+ return scout_catalog.Bounds(
28
+ type=datasource.TimestampType.ABSOLUTE,
29
+ start=_SecondsNanos.from_nanoseconds(self.start).to_api(),
30
+ end=_SecondsNanos.from_nanoseconds(self.end).to_api(),
31
+ )
@@ -15,7 +15,6 @@ from nominal_api import (
15
15
  storage_series_api,
16
16
  timeseries_channelmetadata,
17
17
  timeseries_channelmetadata_api,
18
- timeseries_logicalseries_api,
19
18
  )
20
19
  from typing_extensions import Self
21
20
 
@@ -298,20 +297,6 @@ class Channel(RefreshableMixin[timeseries_channelmetadata_api.ChannelMetadata]):
298
297
  _clients=clients,
299
298
  )
300
299
 
301
- @classmethod
302
- def _from_conjure_logicalseries_api(
303
- cls, clients: _Clients, series: timeseries_logicalseries_api.LogicalSeries
304
- ) -> Self:
305
- channel_data_type = ChannelDataType._from_conjure(series.series_data_type) if series.series_data_type else None
306
- return cls(
307
- name=series.channel,
308
- data_source=series.data_source_rid,
309
- unit=series.unit,
310
- description=series.description,
311
- data_type=channel_data_type,
312
- _clients=clients,
313
- )
314
-
315
300
  @classmethod
316
301
  def _from_channel_metadata_api(
317
302
  cls, clients: _Clients, channel: timeseries_channelmetadata_api.ChannelMetadata
@@ -26,6 +26,8 @@ from nominal.core.filetype import FileType, FileTypes
26
26
  from nominal.core.log import LogPoint, _write_logs
27
27
  from nominal.ts import (
28
28
  _AnyTimestampType,
29
+ _InferrableTimestampType,
30
+ _SecondsNanos,
29
31
  _to_typed_timestamp_type,
30
32
  )
31
33
 
@@ -93,6 +95,32 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
93
95
  updated_dataset = self._clients.catalog.update_dataset_metadata(self._clients.auth_header, self.rid, request)
94
96
  return self._refresh_from_api(updated_dataset)
95
97
 
98
+ def update_bounds(
99
+ self,
100
+ *,
101
+ start: _InferrableTimestampType,
102
+ end: _InferrableTimestampType,
103
+ ) -> Self:
104
+ """Update the bounds (start and end timestamps) of the dataset.
105
+ Updates the current instance, and returns it.
106
+
107
+ Args:
108
+ start: The start timestamp of the dataset bounds. Can be a datetime, ISO 8601 string,
109
+ or integer nanoseconds since epoch.
110
+ end: The end timestamp of the dataset bounds. Can be a datetime, ISO 8601 string,
111
+ or integer nanoseconds since epoch.
112
+
113
+ Returns:
114
+ The updated Dataset instance with new bounds.
115
+ """
116
+ bounds = Bounds(
117
+ start=_SecondsNanos.from_flexible(start).to_nanoseconds(),
118
+ end=_SecondsNanos.from_flexible(end).to_nanoseconds(),
119
+ )
120
+ request = scout_catalog.UpdateBoundsRequest(bounds=bounds._to_conjure())
121
+ self._clients.catalog.update_global_dataset_bounds(self._clients.auth_header, request, self.rid)
122
+ return self.refresh()
123
+
96
124
  def _handle_ingest_response(self, response: ingest_api.IngestResponse) -> DatasetFile:
97
125
  if response.details.dataset is None:
98
126
  raise ValueError(f"Expected response to provide dataset details, received: {response.details.type}")
@@ -216,6 +244,8 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
216
244
  API, making it useful for use cases where network connection drops during streaming and a backup file needs
217
245
  to be created.
218
246
 
247
+ For struct columns, values should be converted to JSON strings and wrapped in the JsonStruct record type.
248
+
219
249
  If this schema is not used, will result in a failed ingestion.
220
250
  {
221
251
  "type": "record",
@@ -234,8 +264,15 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
234
264
  },
235
265
  {
236
266
  "name": "values",
237
- "type": {"type": "array", "items": ["double", "string"]},
238
- "doc": "Array of values. Can either be doubles or strings",
267
+ "type": {"type": "array", "items": [
268
+ "double",
269
+ "string",
270
+ "long",
271
+ {"type": "record", "name": "DoubleArray", "fields": [{"name": "items", "type": {"type": "array", "items": "double"}}]},
272
+ {"type": "record", "name": "StringArray", "fields": [{"name": "items", "type": {"type": "array", "items": "string"}}]},
273
+ {"type": "record", "name": "JsonStruct", "fields": [{"name": "json", "type": "string"}]}
274
+ ]},
275
+ "doc": "Array of values. Can be doubles, longs, strings, arrays, or JSON structs",
239
276
  },
240
277
  {
241
278
  "name": "tags",
@@ -246,6 +283,8 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
246
283
  ],
247
284
  }
248
285
 
286
+ Note: The previous schema with only "double" and "string" value types is still fully supported.
287
+
249
288
  Args:
250
289
  path: Path to the .avro file to upload
251
290
 
@@ -18,7 +18,6 @@ from nominal_api import (
18
18
  storage_writer_api,
19
19
  timeseries_channelmetadata,
20
20
  timeseries_channelmetadata_api,
21
- timeseries_logicalseries,
22
21
  timeseries_metadata,
23
22
  timeseries_metadata_api,
24
23
  upload_api,
@@ -53,8 +52,6 @@ class DataSource(HasRid):
53
52
  @property
54
53
  def datasource(self) -> scout_datasource.DataSourceService: ...
55
54
  @property
56
- def logical_series(self) -> timeseries_logicalseries.LogicalSeriesService: ...
57
- @property
58
55
  def units(self) -> scout.UnitsService: ...
59
56
  @property
60
57
  def ingest(self) -> ingest_api.IngestService: ...
@@ -51,6 +51,56 @@ logger = logging.getLogger(__name__)
51
51
 
52
52
  ConjureType = Union[ConjureBeanType, ConjureUnionType, ConjureEnumType]
53
53
 
54
+
55
+ def _install_migration_file_logger(
56
+ log_path: str | Path | None = None,
57
+ *,
58
+ logger: logging.Logger | None = None,
59
+ level: int = logging.INFO,
60
+ formatter: logging.Formatter | None = None,
61
+ mode: str = "a",
62
+ ) -> logging.FileHandler:
63
+ """Install a file handler that only writes log records with extra={"to_file": True}.
64
+
65
+ Args:
66
+ log_path: File path to write filtered logs to. If None (or a directory), a timestamped
67
+ file named "migration_utils_output_YYYY-MM-DD-HH-MM-SS.txt" is created.
68
+ logger: Logger to attach the handler to. Defaults to the root logger.
69
+ level: Minimum log level to write to the file.
70
+ formatter: Optional formatter to apply to the file handler.
71
+ mode: File open mode for the handler.
72
+
73
+ Returns:
74
+ The attached FileHandler instance.
75
+ """
76
+ if logger is None:
77
+ logger = logging.getLogger()
78
+
79
+ if log_path is None:
80
+ log_path_obj = Path.cwd()
81
+ else:
82
+ log_path_obj = Path(log_path)
83
+
84
+ if log_path_obj.is_dir():
85
+ timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
86
+ log_path_obj = log_path_obj / f"migration_utils_output_{timestamp}.txt"
87
+
88
+ handler = logging.FileHandler(log_path_obj, mode=mode, encoding="utf-8")
89
+ handler.setLevel(level)
90
+ if formatter is not None:
91
+ handler.setFormatter(formatter)
92
+
93
+ filter_obj = logging.Filter()
94
+
95
+ def _filter(record: logging.LogRecord) -> bool:
96
+ return bool(getattr(record, "to_file", False))
97
+
98
+ filter_obj.filter = _filter # type: ignore[method-assign]
99
+ handler.addFilter(filter_obj)
100
+ logger.addHandler(handler)
101
+ return handler
102
+
103
+
54
104
  # Regex pattern to match strings that have a UUID format with a prefix.
55
105
  UUID_PATTERN = re.compile(r"^(.*)([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$")
56
106
 
@@ -368,6 +418,12 @@ def copy_workbook_template_from(
368
418
  source_template.rid,
369
419
  extra=log_extras,
370
420
  )
421
+ logger.info(
422
+ "WORKBOOK_TEMPLATE: Old RID: %s, New RID: %s",
423
+ source_template.rid,
424
+ new_workbook_template.rid,
425
+ extra={"to_file": True},
426
+ )
371
427
  return new_workbook_template
372
428
 
373
429
 
@@ -432,6 +488,12 @@ def copy_video_file_to_video_dataset(
432
488
  destination_video_dataset.name,
433
489
  destination_video_dataset.rid,
434
490
  )
491
+ logger.info(
492
+ "VIDEO_FILE: Old RID: %s, New RID: %s",
493
+ source_video_file.rid,
494
+ new_file.rid,
495
+ extra={"to_file": True},
496
+ )
435
497
  return new_file
436
498
 
437
499
 
@@ -502,6 +564,12 @@ def copy_video_from(
502
564
  new_video.rid,
503
565
  extra=log_extras,
504
566
  )
567
+ logger.info(
568
+ "VIDEO: Old RID: %s, New RID: %s",
569
+ source_video.rid,
570
+ new_video.rid,
571
+ extra={"to_file": True},
572
+ )
505
573
  return new_video
506
574
 
507
575
 
@@ -552,6 +620,12 @@ def copy_file_to_dataset(
552
620
  destination_dataset.name,
553
621
  destination_dataset.rid,
554
622
  )
623
+ logger.info(
624
+ "DATASET_FILE: Old RID: %s, New RID: %s",
625
+ source_file.id,
626
+ new_file.id,
627
+ extra={"to_file": True},
628
+ )
555
629
  return new_file
556
630
  else: # Because these fields are optional, need to check for None. We shouldn't ever run into this.
557
631
  raise ValueError("Unsupported file handle type or missing timestamp information.")
@@ -658,12 +732,26 @@ def copy_dataset_from(
658
732
  if include_files:
659
733
  for source_file in source_dataset.list_files():
660
734
  copy_file_to_dataset(source_file, new_dataset)
735
+
736
+ # Copy bounds from source dataset if they exist
737
+ if source_dataset.bounds is not None:
738
+ new_dataset = new_dataset.update_bounds(
739
+ start=source_dataset.bounds.start,
740
+ end=source_dataset.bounds.end,
741
+ )
742
+
661
743
  logger.debug(
662
744
  "New dataset created: %s (rid: %s)",
663
745
  new_dataset.name,
664
746
  new_dataset.rid,
665
747
  extra=log_extras,
666
748
  )
749
+ logger.info(
750
+ "DATASET: Old RID: %s, New RID: %s",
751
+ source_dataset.rid,
752
+ new_dataset.rid,
753
+ extra={"to_file": True},
754
+ )
667
755
  return new_dataset
668
756
 
669
757
 
@@ -735,6 +823,12 @@ def copy_event_from(
735
823
  new_event.rid,
736
824
  extra=log_extras,
737
825
  )
826
+ logger.info(
827
+ "EVENT: Old RID: %s, New RID: %s",
828
+ source_event.rid,
829
+ new_event.rid,
830
+ extra={"to_file": True},
831
+ )
738
832
  return new_event
739
833
 
740
834
 
@@ -792,6 +886,7 @@ def copy_run_from(
792
886
  source_run.rid,
793
887
  extra=log_extras,
794
888
  )
889
+
795
890
  new_run = destination_client.create_run(
796
891
  name=new_name or source_run.name,
797
892
  start=new_start or source_run.start,
@@ -804,6 +899,12 @@ def copy_run_from(
804
899
  attachments=new_attachments or source_run.list_attachments(),
805
900
  )
806
901
  logger.debug("New run created: %s (rid: %s)", new_run.name, new_run.rid, extra=log_extras)
902
+ logger.info(
903
+ "RUN: Old RID: %s, New RID: %s",
904
+ source_run.rid,
905
+ new_run.rid,
906
+ extra={"to_file": True},
907
+ )
807
908
  return new_run
808
909
 
809
910
 
@@ -863,6 +964,7 @@ def copy_asset_from(
863
964
  log_extras = {
864
965
  "destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid
865
966
  }
967
+
866
968
  logger.debug(
867
969
  "Copying asset %s (rid: %s)",
868
970
  source_asset.name,
@@ -875,6 +977,7 @@ def copy_asset_from(
875
977
  properties=new_asset_properties if new_asset_properties is not None else source_asset.properties,
876
978
  labels=new_asset_labels if new_asset_labels is not None else source_asset.labels,
877
979
  )
980
+
878
981
  if dataset_config is not None:
879
982
  source_datasets = source_asset.list_datasets()
880
983
  for data_scope, source_dataset in source_datasets:
@@ -909,6 +1012,12 @@ def copy_asset_from(
909
1012
  copy_video_file_to_video_dataset(source_video_file, new_video_dataset)
910
1013
 
911
1014
  logger.debug("New asset created: %s (rid: %s)", new_asset, new_asset.rid, extra=log_extras)
1015
+ logger.info(
1016
+ "ASSET: Old RID: %s, New RID: %s",
1017
+ source_asset.rid,
1018
+ new_asset.rid,
1019
+ extra={"to_file": True},
1020
+ )
912
1021
  return new_asset
913
1022
 
914
1023
 
@@ -928,41 +1037,47 @@ def copy_resources_to_destination_client(
928
1037
  Returns:
929
1038
  All of the created resources.
930
1039
  """
931
- log_extras = {
932
- "destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid,
933
- }
934
-
935
- new_assets = []
936
- new_templates = []
937
- new_workbooks = []
938
-
939
- new_data_scopes_and_datasets: list[tuple[str, Dataset]] = []
940
- for source_asset in migration_resources.source_assets:
941
- new_asset = copy_asset_from(
942
- source_asset.asset,
943
- destination_client,
944
- dataset_config=dataset_config,
945
- include_events=True,
946
- include_runs=True,
947
- include_video=True,
948
- )
949
- new_assets.append(new_asset)
950
- new_data_scopes_and_datasets.extend(new_asset.list_datasets())
951
-
952
- for source_workbook_template in source_asset.source_workbook_templates:
953
- new_template = clone_workbook_template(source_workbook_template, destination_client)
954
- new_templates.append(new_template)
955
- new_workbook = new_template.create_workbook(
956
- title=new_template.title, description=new_template.description, asset=new_asset
957
- )
958
- logger.debug(
959
- "Created new workbook %s (rid: %s) from template %s (rid: %s)",
960
- new_workbook.title,
961
- new_workbook.rid,
962
- new_template.title,
963
- new_template.rid,
964
- extra=log_extras,
1040
+ file_handler = _install_migration_file_logger()
1041
+ try:
1042
+ log_extras = {
1043
+ "destination_client_workspace": destination_client.get_workspace(
1044
+ destination_client._clients.workspace_rid
1045
+ ).rid,
1046
+ }
1047
+
1048
+ new_assets = []
1049
+ new_templates = []
1050
+ new_workbooks = []
1051
+
1052
+ new_data_scopes_and_datasets: list[tuple[str, Dataset]] = []
1053
+ for source_asset in migration_resources.source_assets:
1054
+ new_asset = copy_asset_from(
1055
+ source_asset.asset,
1056
+ destination_client,
1057
+ dataset_config=dataset_config,
1058
+ include_events=True,
1059
+ include_runs=True,
1060
+ include_video=True,
965
1061
  )
966
- new_workbooks.append(new_workbook)
967
-
1062
+ new_assets.append(new_asset)
1063
+ new_data_scopes_and_datasets.extend(new_asset.list_datasets())
1064
+
1065
+ for source_workbook_template in source_asset.source_workbook_templates:
1066
+ new_template = clone_workbook_template(source_workbook_template, destination_client)
1067
+ new_templates.append(new_template)
1068
+ new_workbook = new_template.create_workbook(
1069
+ title=new_template.title, description=new_template.description, asset=new_asset
1070
+ )
1071
+ logger.debug(
1072
+ "Created new workbook %s (rid: %s) from template %s (rid: %s)",
1073
+ new_workbook.title,
1074
+ new_workbook.rid,
1075
+ new_template.title,
1076
+ new_template.rid,
1077
+ extra=log_extras,
1078
+ )
1079
+ new_workbooks.append(new_workbook)
1080
+ finally:
1081
+ file_handler.close()
1082
+ logger.removeHandler(file_handler)
968
1083
  return (new_data_scopes_and_datasets, new_assets, new_templates, new_workbooks)
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "nominal"
3
- version = "1.110.0"
3
+ version = "1.111.1"
4
4
  description = "Automate Nominal workflows in Python"
5
5
  authors = [
6
6
  { name = "Alexander Reynolds", email = "alex.reynolds@nominal.io" },
@@ -150,6 +150,9 @@ ignore = [
150
150
  "PLW0603", # globals
151
151
  ]
152
152
 
153
+ [tool.ruff.lint.per-file-ignores]
154
+ "nominal/core/dataset.py" = ["E501"] # allow long lines for avro schema in docstring
155
+
153
156
  [tool.mypy]
154
157
  strict = true
155
158
  packages = ["nominal"]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes