nominal 1.102.0__tar.gz → 1.104.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. {nominal-1.102.0 → nominal-1.104.0}/.gitignore +7 -0
  2. {nominal-1.102.0 → nominal-1.104.0}/CHANGELOG.md +15 -0
  3. {nominal-1.102.0 → nominal-1.104.0}/PKG-INFO +1 -1
  4. nominal-1.104.0/nominal/core/_types.py +6 -0
  5. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/attachment.py +3 -1
  6. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/client.py +3 -2
  7. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/connection.py +3 -3
  8. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/dataset.py +18 -17
  9. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/dataset_file.py +5 -2
  10. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/datasource.py +4 -4
  11. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/filetype.py +7 -5
  12. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/video.py +3 -2
  13. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/migration/migration_utils.py +86 -6
  14. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/rust_streaming/rust_write_stream.py +3 -2
  15. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/video_processing/video_conversion.py +5 -2
  16. {nominal-1.102.0 → nominal-1.104.0}/nominal/nominal.py +9 -8
  17. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/tdms/_tdms.py +4 -3
  18. {nominal-1.102.0 → nominal-1.104.0}/pyproject.toml +1 -1
  19. {nominal-1.102.0 → nominal-1.104.0}/LICENSE +0 -0
  20. {nominal-1.102.0 → nominal-1.104.0}/README.md +0 -0
  21. {nominal-1.102.0 → nominal-1.104.0}/nominal/__init__.py +0 -0
  22. {nominal-1.102.0 → nominal-1.104.0}/nominal/__main__.py +0 -0
  23. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/README.md +0 -0
  24. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/__init__.py +0 -0
  25. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/dataclass_tools.py +0 -0
  26. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/deprecation_tools.py +0 -0
  27. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/iterator_tools.py +0 -0
  28. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/streaming_tools.py +0 -0
  29. {nominal-1.102.0 → nominal-1.104.0}/nominal/_utils/timing_tools.py +0 -0
  30. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/__init__.py +0 -0
  31. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/__main__.py +0 -0
  32. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/attachment.py +0 -0
  33. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/auth.py +0 -0
  34. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/config.py +0 -0
  35. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/dataset.py +0 -0
  36. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/download.py +0 -0
  37. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/mis.py +0 -0
  38. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/run.py +0 -0
  39. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/util/__init__.py +0 -0
  40. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/util/click_log_handler.py +0 -0
  41. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/util/global_decorators.py +0 -0
  42. {nominal-1.102.0 → nominal-1.104.0}/nominal/cli/util/verify_connection.py +0 -0
  43. {nominal-1.102.0 → nominal-1.104.0}/nominal/config/__init__.py +0 -0
  44. {nominal-1.102.0 → nominal-1.104.0}/nominal/config/_config.py +0 -0
  45. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/__init__.py +0 -0
  46. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_clientsbunch.py +0 -0
  47. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_constants.py +0 -0
  48. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_event_types.py +0 -0
  49. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_stream/__init__.py +0 -0
  50. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_stream/batch_processor.py +0 -0
  51. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_stream/batch_processor_proto.py +0 -0
  52. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_stream/write_stream.py +0 -0
  53. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_stream/write_stream_base.py +0 -0
  54. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/README.md +0 -0
  55. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/__init__.py +0 -0
  56. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/api_tools.py +0 -0
  57. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/multipart.py +0 -0
  58. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/multipart_downloader.py +0 -0
  59. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/networking.py +0 -0
  60. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/pagination_tools.py +0 -0
  61. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/query_tools.py +0 -0
  62. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/_utils/queueing.py +0 -0
  63. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/asset.py +0 -0
  64. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/bounds.py +0 -0
  65. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/channel.py +0 -0
  66. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/checklist.py +0 -0
  67. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/containerized_extractors.py +0 -0
  68. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/data_review.py +0 -0
  69. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/event.py +0 -0
  70. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/exceptions.py +0 -0
  71. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/log.py +0 -0
  72. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/run.py +0 -0
  73. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/secret.py +0 -0
  74. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/unit.py +0 -0
  75. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/user.py +0 -0
  76. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/video_file.py +0 -0
  77. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/workbook.py +0 -0
  78. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/workbook_template.py +0 -0
  79. {nominal-1.102.0 → nominal-1.104.0}/nominal/core/workspace.py +0 -0
  80. {nominal-1.102.0 → nominal-1.104.0}/nominal/exceptions/__init__.py +0 -0
  81. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/__init__.py +0 -0
  82. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/README.md +0 -0
  83. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/__init__.py +0 -0
  84. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/_buckets.py +0 -0
  85. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/dsl/__init__.py +0 -0
  86. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/dsl/_enum_expr_impls.py +0 -0
  87. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/dsl/_numeric_expr_impls.py +0 -0
  88. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/dsl/_range_expr_impls.py +0 -0
  89. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/dsl/exprs.py +0 -0
  90. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/compute/dsl/params.py +0 -0
  91. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/logging/__init__.py +0 -0
  92. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/logging/click_log_handler.py +0 -0
  93. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/logging/nominal_log_handler.py +0 -0
  94. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/logging/rich_log_handler.py +0 -0
  95. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/migration/__init__.py +0 -0
  96. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/rust_streaming/__init__.py +0 -0
  97. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/stream_v2/__init__.py +0 -0
  98. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/stream_v2/_serializer.py +0 -0
  99. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/stream_v2/_write_stream.py +0 -0
  100. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/video_processing/__init__.py +0 -0
  101. {nominal-1.102.0 → nominal-1.104.0}/nominal/experimental/video_processing/resolution.py +0 -0
  102. {nominal-1.102.0 → nominal-1.104.0}/nominal/py.typed +0 -0
  103. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/__init__.py +0 -0
  104. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/matlab/__init__.py +0 -0
  105. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/matlab/_matlab.py +0 -0
  106. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/pandas/__init__.py +0 -0
  107. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/pandas/_pandas.py +0 -0
  108. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/polars/__init__.py +0 -0
  109. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/polars/polars_export_handler.py +0 -0
  110. {nominal-1.102.0 → nominal-1.104.0}/nominal/thirdparty/tdms/__init__.py +0 -0
  111. {nominal-1.102.0 → nominal-1.104.0}/nominal/ts/__init__.py +0 -0
@@ -170,3 +170,10 @@ playground/
170
170
  # Backup files
171
171
  *~
172
172
  \#*
173
+
174
+ # Don't check in local uv.toml settings
175
+ uv.toml
176
+
177
+ # Don't check in data
178
+ *.csv
179
+ *.parquet
@@ -1,5 +1,20 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.104.0](https://github.com/nominal-io/nominal-client/compare/v1.103.0...v1.104.0) (2026-01-08)
4
+
5
+
6
+ ### Features
7
+
8
+ * properly use timestamp metadata fallback containerized ([#568](https://github.com/nominal-io/nominal-client/issues/568)) ([8aa5c8c](https://github.com/nominal-io/nominal-client/commit/8aa5c8cb39eeee54bb4b643ad087b80ac5e12f1a))
9
+
10
+ ## [1.103.0](https://github.com/nominal-io/nominal-client/compare/v1.102.0...v1.103.0) (2026-01-06)
11
+
12
+
13
+ ### Features
14
+
15
+ * add clone/copy run ([#563](https://github.com/nominal-io/nominal-client/issues/563)) ([276c511](https://github.com/nominal-io/nominal-client/commit/276c511d10c73b9f80ea2efed32d4047fe33795b))
16
+ * universally support paths and strings, and add PathLike alias ([#566](https://github.com/nominal-io/nominal-client/issues/566)) ([860dd3d](https://github.com/nominal-io/nominal-client/commit/860dd3d67456d269356b66b5434b904647e5a2ff))
17
+
3
18
  ## [1.102.0](https://github.com/nominal-io/nominal-client/compare/v1.101.0...v1.102.0) (2026-01-05)
4
19
 
5
20
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: nominal
3
- Version: 1.102.0
3
+ Version: 1.104.0
4
4
  Summary: Automate Nominal workflows in Python
5
5
  Project-URL: Homepage, https://nominal.io
6
6
  Project-URL: Documentation, https://docs.nominal.io
@@ -0,0 +1,6 @@
1
+ from __future__ import annotations
2
+
3
+ import pathlib
4
+ from typing import TypeAlias
5
+
6
+ PathLike: TypeAlias = pathlib.Path | str
@@ -10,6 +10,7 @@ from nominal_api import attachments_api
10
10
  from typing_extensions import Self
11
11
 
12
12
  from nominal.core._clientsbunch import HasScoutParams
13
+ from nominal.core._types import PathLike
13
14
  from nominal.core._utils.api_tools import HasRid, RefreshableMixin
14
15
  from nominal.ts import IntegralNanosecondsUTC, _SecondsNanos
15
16
 
@@ -69,11 +70,12 @@ class Attachment(HasRid, RefreshableMixin[attachments_api.Attachment]):
69
70
  # this acts like a file-like object in binary-mode.
70
71
  return cast(BinaryIO, response)
71
72
 
72
- def write(self, path: Path, mkdir: bool = True) -> None:
73
+ def write(self, path: PathLike, mkdir: bool = True) -> None:
73
74
  """Write an attachment to the filesystem.
74
75
 
75
76
  `path` should be the path you want to save to, i.e. a file, not a directory.
76
77
  """
78
+ path = Path(path)
77
79
  if mkdir:
78
80
  path.parent.mkdir(exist_ok=True, parents=True)
79
81
  with open(path, "wb") as wf:
@@ -37,6 +37,7 @@ from nominal.config import NominalConfig, _config
37
37
  from nominal.core._clientsbunch import ClientsBunch
38
38
  from nominal.core._constants import DEFAULT_API_BASE_URL
39
39
  from nominal.core._event_types import EventType
40
+ from nominal.core._types import PathLike
40
41
  from nominal.core._utils.api_tools import (
41
42
  Link,
42
43
  LinkDict,
@@ -853,7 +854,7 @@ class NominalClient:
853
854
 
854
855
  def create_attachment(
855
856
  self,
856
- attachment_file: Path | str,
857
+ attachment_file: PathLike,
857
858
  *,
858
859
  description: str | None = None,
859
860
  properties: Mapping[str, str] | None = None,
@@ -965,7 +966,7 @@ class NominalClient:
965
966
  )
966
967
  def create_video_from_mcap(
967
968
  self,
968
- path: Path | str,
969
+ path: PathLike,
969
970
  topic: str,
970
971
  name: str | None = None,
971
972
  description: str | None = None,
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import pathlib
4
3
  from dataclasses import dataclass
5
4
  from datetime import timedelta
6
5
  from typing import Literal, Sequence, overload
@@ -8,6 +7,7 @@ from typing import Literal, Sequence, overload
8
7
  from nominal_api import scout_datasource_connection_api
9
8
 
10
9
  from nominal.core._stream.write_stream import DataStream
10
+ from nominal.core._types import PathLike
11
11
  from nominal.core.datasource import DataSource, _get_write_stream
12
12
 
13
13
 
@@ -71,7 +71,7 @@ class StreamingConnection(Connection):
71
71
  batch_size: int = 50_000,
72
72
  max_wait: timedelta = timedelta(seconds=1),
73
73
  data_format: Literal["rust_experimental"] | None = None,
74
- file_fallback: pathlib.Path | None = None,
74
+ file_fallback: PathLike | None = None,
75
75
  log_level: str | None = None,
76
76
  num_workers: int | None = None,
77
77
  ) -> DataStream: ...
@@ -80,7 +80,7 @@ class StreamingConnection(Connection):
80
80
  batch_size: int = 50_000,
81
81
  max_wait: timedelta = timedelta(seconds=1),
82
82
  data_format: Literal["json", "protobuf", "experimental", "rust_experimental"] | None = None,
83
- file_fallback: pathlib.Path | None = None,
83
+ file_fallback: PathLike | None = None,
84
84
  log_level: str | None = None,
85
85
  num_workers: int | None = None,
86
86
  ) -> DataStream:
@@ -14,6 +14,7 @@ from typing_extensions import Self, deprecated
14
14
 
15
15
  from nominal.core._stream.batch_processor import process_log_batch
16
16
  from nominal.core._stream.write_stream import LogStream, WriteStream
17
+ from nominal.core._types import PathLike
17
18
  from nominal.core._utils.api_tools import RefreshableMixin
18
19
  from nominal.core._utils.multipart import path_upload_name, upload_multipart_file, upload_multipart_io
19
20
  from nominal.core.bounds import Bounds
@@ -112,7 +113,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
112
113
 
113
114
  def add_tabular_data(
114
115
  self,
115
- path: Path | str,
116
+ path: PathLike,
116
117
  timestamp_column: str,
117
118
  timestamp_type: _AnyTimestampType,
118
119
  tag_columns: Mapping[str, str] | None = None,
@@ -206,7 +207,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
206
207
 
207
208
  def add_avro_stream(
208
209
  self,
209
- path: Path | str,
210
+ path: PathLike,
210
211
  ) -> DatasetFile:
211
212
  """Upload an avro stream file with a specific schema, described below.
212
213
 
@@ -278,7 +279,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
278
279
 
279
280
  def add_journal_json(
280
281
  self,
281
- path: Path | str,
282
+ path: PathLike,
282
283
  ) -> DatasetFile:
283
284
  """Add a journald jsonl file to an existing dataset."""
284
285
  log_path = Path(path)
@@ -310,7 +311,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
310
311
 
311
312
  def add_mcap(
312
313
  self,
313
- path: Path | str,
314
+ path: PathLike,
314
315
  include_topics: Iterable[str] | None = None,
315
316
  exclude_topics: Iterable[str] | None = None,
316
317
  ) -> DatasetFile:
@@ -384,7 +385,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
384
385
 
385
386
  def add_ardupilot_dataflash(
386
387
  self,
387
- path: Path | str,
388
+ path: PathLike,
388
389
  tags: Mapping[str, str] | None = None,
389
390
  ) -> DatasetFile:
390
391
  """Add a Dataflash file to an existing dataset.
@@ -415,7 +416,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
415
416
  def add_containerized(
416
417
  self,
417
418
  extractor: str | ContainerizedExtractor,
418
- sources: Mapping[str, Path | str],
419
+ sources: Mapping[str, PathLike],
419
420
  tag: str | None = None,
420
421
  *,
421
422
  arguments: Mapping[str, str] | None = None,
@@ -425,7 +426,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
425
426
  def add_containerized(
426
427
  self,
427
428
  extractor: str | ContainerizedExtractor,
428
- sources: Mapping[str, Path | str],
429
+ sources: Mapping[str, PathLike],
429
430
  tag: str | None = None,
430
431
  *,
431
432
  arguments: Mapping[str, str] | None = None,
@@ -436,7 +437,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
436
437
  def add_containerized(
437
438
  self,
438
439
  extractor: str | ContainerizedExtractor,
439
- sources: Mapping[str, Path | str],
440
+ sources: Mapping[str, PathLike],
440
441
  tag: str | None = None,
441
442
  *,
442
443
  arguments: Mapping[str, str] | None = None,
@@ -467,7 +468,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
467
468
  series_name=timestamp_column,
468
469
  timestamp_type=_to_typed_timestamp_type(timestamp_type)._to_conjure_ingest_api(),
469
470
  )
470
- elif None in (timestamp_column, timestamp_type):
471
+ elif (timestamp_column is None) != (timestamp_type is None):
471
472
  raise ValueError("Only one of `timestamp_column` and `timestamp_type` provided!")
472
473
 
473
474
  if isinstance(extractor, str):
@@ -708,7 +709,7 @@ class _DatasetWrapper(abc.ABC):
708
709
  def add_tabular_data(
709
710
  self,
710
711
  data_scope_name: str,
711
- path: Path | str,
712
+ path: PathLike,
712
713
  *,
713
714
  timestamp_column: str,
714
715
  timestamp_type: _AnyTimestampType,
@@ -735,7 +736,7 @@ class _DatasetWrapper(abc.ABC):
735
736
  def add_avro_stream(
736
737
  self,
737
738
  data_scope_name: str,
738
- path: Path | str,
739
+ path: PathLike,
739
740
  ) -> DatasetFile:
740
741
  """Upload an avro stream file to the dataset selected by `data_scope_name`.
741
742
 
@@ -761,7 +762,7 @@ class _DatasetWrapper(abc.ABC):
761
762
  def add_journal_json(
762
763
  self,
763
764
  data_scope_name: str,
764
- path: Path | str,
765
+ path: PathLike,
765
766
  ) -> DatasetFile:
766
767
  """Add a journald json file to the dataset selected by `data_scope_name`.
767
768
 
@@ -787,7 +788,7 @@ class _DatasetWrapper(abc.ABC):
787
788
  def add_mcap(
788
789
  self,
789
790
  data_scope_name: str,
790
- path: Path | str,
791
+ path: PathLike,
791
792
  *,
792
793
  include_topics: Iterable[str] | None = None,
793
794
  exclude_topics: Iterable[str] | None = None,
@@ -815,7 +816,7 @@ class _DatasetWrapper(abc.ABC):
815
816
  def add_ardupilot_dataflash(
816
817
  self,
817
818
  data_scope_name: str,
818
- path: Path | str,
819
+ path: PathLike,
819
820
  tags: Mapping[str, str] | None = None,
820
821
  ) -> DatasetFile:
821
822
  """Add a Dataflash file to the dataset selected by `data_scope_name`.
@@ -835,7 +836,7 @@ class _DatasetWrapper(abc.ABC):
835
836
  self,
836
837
  data_scope_name: str,
837
838
  extractor: str | ContainerizedExtractor,
838
- sources: Mapping[str, Path | str],
839
+ sources: Mapping[str, PathLike],
839
840
  *,
840
841
  tag: str | None = None,
841
842
  tags: Mapping[str, str] | None = None,
@@ -845,7 +846,7 @@ class _DatasetWrapper(abc.ABC):
845
846
  self,
846
847
  data_scope_name: str,
847
848
  extractor: str | ContainerizedExtractor,
848
- sources: Mapping[str, Path | str],
849
+ sources: Mapping[str, PathLike],
849
850
  *,
850
851
  tag: str | None = None,
851
852
  tags: Mapping[str, str] | None = None,
@@ -856,7 +857,7 @@ class _DatasetWrapper(abc.ABC):
856
857
  self,
857
858
  data_scope_name: str,
858
859
  extractor: str | ContainerizedExtractor,
859
- sources: Mapping[str, Path | str],
860
+ sources: Mapping[str, PathLike],
860
861
  *,
861
862
  tag: str | None = None,
862
863
  tags: Mapping[str, str] | None = None,
@@ -13,6 +13,7 @@ from nominal_api import api, ingest_api, scout_catalog
13
13
  from typing_extensions import Self
14
14
 
15
15
  from nominal.core._clientsbunch import HasScoutParams
16
+ from nominal.core._types import PathLike
16
17
  from nominal.core._utils.api_tools import RefreshableMixin
17
18
  from nominal.core._utils.multipart import DEFAULT_CHUNK_SIZE
18
19
  from nominal.core._utils.multipart_downloader import (
@@ -128,7 +129,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
128
129
 
129
130
  def download(
130
131
  self,
131
- output_directory: pathlib.Path,
132
+ output_directory: PathLike,
132
133
  *,
133
134
  part_size: int = DEFAULT_CHUNK_SIZE,
134
135
  num_retries: int = 3,
@@ -148,6 +149,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
148
149
  FileExistsError: File already exists at destination
149
150
  RuntimeError: Error downloading file
150
151
  """
152
+ output_directory = pathlib.Path(output_directory)
151
153
  if output_directory.exists() and not output_directory.is_dir():
152
154
  raise NotADirectoryError(f"Output directory is not a directory: {output_directory}")
153
155
 
@@ -160,7 +162,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
160
162
 
161
163
  def download_original_files(
162
164
  self,
163
- output_directory: pathlib.Path,
165
+ output_directory: PathLike,
164
166
  *,
165
167
  part_size: int = DEFAULT_CHUNK_SIZE,
166
168
  num_retries: int = 3,
@@ -184,6 +186,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
184
186
  NOTE: any file that fails to download will result in an error log and will not be returned
185
187
  as an output path
186
188
  """
189
+ output_directory = pathlib.Path(output_directory)
187
190
  if output_directory.exists() and not output_directory.is_dir():
188
191
  raise NotADirectoryError(f"Output directory is not a directory: {output_directory}")
189
192
 
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- import pathlib
5
4
  from dataclasses import dataclass, field
6
5
  from datetime import timedelta
7
6
  from typing import Iterable, Literal, Mapping, Protocol, Sequence, overload
@@ -27,6 +26,7 @@ from nominal._utils import batched
27
26
  from nominal.core._clientsbunch import HasScoutParams, ProtoWriteService
28
27
  from nominal.core._stream.batch_processor import process_batch_legacy
29
28
  from nominal.core._stream.write_stream import DataStream, WriteStream
29
+ from nominal.core._types import PathLike
30
30
  from nominal.core._utils.api_tools import HasRid
31
31
  from nominal.core.channel import Channel, ChannelDataType
32
32
  from nominal.core.unit import UnitMapping, _build_unit_update, _error_on_invalid_units
@@ -125,7 +125,7 @@ class DataSource(HasRid):
125
125
  batch_size: int = 50_000,
126
126
  max_wait: timedelta = timedelta(seconds=1),
127
127
  data_format: Literal["rust_experimental"] | None = None,
128
- file_fallback: pathlib.Path | None = None,
128
+ file_fallback: PathLike | None = None,
129
129
  log_level: str | None = None,
130
130
  num_workers: int | None = None,
131
131
  ) -> DataStream: ...
@@ -134,7 +134,7 @@ class DataSource(HasRid):
134
134
  batch_size: int = 50_000,
135
135
  max_wait: timedelta = timedelta(seconds=1),
136
136
  data_format: Literal["json", "protobuf", "experimental", "rust_experimental"] | None = None,
137
- file_fallback: pathlib.Path | None = None,
137
+ file_fallback: PathLike | None = None,
138
138
  log_level: str | None = None,
139
139
  num_workers: int | None = None,
140
140
  ) -> DataStream:
@@ -327,7 +327,7 @@ def _get_write_stream(
327
327
  batch_size: int,
328
328
  max_wait: timedelta,
329
329
  data_format: Literal["json", "protobuf", "experimental", "rust_experimental"] | None,
330
- file_fallback: pathlib.Path | None,
330
+ file_fallback: PathLike | None,
331
331
  log_level: str | None,
332
332
  num_workers: int | None,
333
333
  write_rid: str,
@@ -5,6 +5,8 @@ import mimetypes
5
5
  from pathlib import Path
6
6
  from typing import NamedTuple
7
7
 
8
+ from nominal.core._types import PathLike
9
+
8
10
  logger = logging.getLogger(__name__)
9
11
 
10
12
 
@@ -13,7 +15,7 @@ class FileType(NamedTuple):
13
15
  mimetype: str
14
16
 
15
17
  @classmethod
16
- def from_path(cls, path: Path | str, default_mimetype: str = "application/octect-stream") -> FileType:
18
+ def from_path(cls, path: PathLike, default_mimetype: str = "application/octect-stream") -> FileType:
17
19
  path = Path(path)
18
20
 
19
21
  # Note: not using path.suffix because this fails for files with multiple suffixes,
@@ -70,7 +72,7 @@ class FileType(NamedTuple):
70
72
  return self in FileTypes._VIDEO_TYPES
71
73
 
72
74
  @classmethod
73
- def from_path_dataset(cls, path: Path | str) -> FileType:
75
+ def from_path_dataset(cls, path: PathLike) -> FileType:
74
76
  file_type = cls.from_path(path)
75
77
  if not file_type.is_parquet_file() and not file_type.is_csv():
76
78
  allowed_extensions = (*FileTypes._PARQUET_FILE_TYPES, *FileTypes._CSV_TYPES)
@@ -79,7 +81,7 @@ class FileType(NamedTuple):
79
81
  return file_type
80
82
 
81
83
  @classmethod
82
- def from_tabular(cls, path: Path | str) -> FileType:
84
+ def from_tabular(cls, path: PathLike) -> FileType:
83
85
  file_type = cls.from_path(path)
84
86
  if not file_type.is_csv() and not file_type.is_parquet():
85
87
  allowed_extensions = (
@@ -92,7 +94,7 @@ class FileType(NamedTuple):
92
94
  return file_type
93
95
 
94
96
  @classmethod
95
- def from_path_journal_json(cls, path: Path | str) -> FileType:
97
+ def from_path_journal_json(cls, path: PathLike) -> FileType:
96
98
  file_type = cls.from_path(path)
97
99
  if not file_type.is_journal():
98
100
  raise ValueError(
@@ -102,7 +104,7 @@ class FileType(NamedTuple):
102
104
  return file_type
103
105
 
104
106
  @classmethod
105
- def from_video(cls, path: Path | str) -> FileType:
107
+ def from_video(cls, path: PathLike) -> FileType:
106
108
  file_type = cls.from_path(path)
107
109
  if not file_type.is_video():
108
110
  raise ValueError(f"video path '{path}' must end in one of {[f.extension for f in FileTypes._VIDEO_TYPES]}")
@@ -14,6 +14,7 @@ from nominal_api import api, ingest_api, scout_video, scout_video_api, upload_ap
14
14
  from typing_extensions import Self
15
15
 
16
16
  from nominal.core._clientsbunch import HasScoutParams
17
+ from nominal.core._types import PathLike
17
18
  from nominal.core._utils.api_tools import HasRid, RefreshableMixin
18
19
  from nominal.core._utils.multipart import path_upload_name, upload_multipart_io
19
20
  from nominal.core.exceptions import NominalIngestError, NominalIngestFailed
@@ -118,7 +119,7 @@ class Video(HasRid, RefreshableMixin[scout_video_api.Video]):
118
119
 
119
120
  def add_file(
120
121
  self,
121
- path: pathlib.Path | str,
122
+ path: PathLike,
122
123
  start: datetime | IntegralNanosecondsUTC | None = None,
123
124
  frame_timestamps: Sequence[IntegralNanosecondsUTC] | None = None,
124
125
  description: str | None = None,
@@ -212,7 +213,7 @@ class Video(HasRid, RefreshableMixin[scout_video_api.Video]):
212
213
 
213
214
  def add_mcap(
214
215
  self,
215
- path: pathlib.Path,
216
+ path: PathLike,
216
217
  topic: str,
217
218
  description: str | None = None,
218
219
  ) -> VideoFile:
@@ -23,6 +23,9 @@ from nominal.core import (
23
23
  WorkbookTemplate,
24
24
  )
25
25
  from nominal.core._event_types import EventType, SearchEventOriginType
26
+ from nominal.core._utils.api_tools import Link, LinkDict
27
+ from nominal.core.attachment import Attachment
28
+ from nominal.core.run import Run
26
29
  from nominal.ts import (
27
30
  IntegralNanosecondsDuration,
28
31
  IntegralNanosecondsUTC,
@@ -523,6 +526,75 @@ def copy_event_from(
523
526
  return new_event
524
527
 
525
528
 
529
+ def clone_run(source_run: Run, destination_client: NominalClient) -> Run:
530
+ """Clones a run, maintaining all properties, linked assets, and attachments.
531
+
532
+ Args:
533
+ source_run: The run to copy from.
534
+ destination_client: The destination client.
535
+
536
+ Returns:
537
+ The cloned run.
538
+ """
539
+ return copy_run_from(source_run=source_run, destination_client=destination_client)
540
+
541
+
542
+ def copy_run_from(
543
+ source_run: Run,
544
+ destination_client: NominalClient,
545
+ *,
546
+ new_name: str | None = None,
547
+ new_start: datetime | IntegralNanosecondsUTC | None = None,
548
+ new_end: datetime | IntegralNanosecondsUTC | None = None,
549
+ new_description: str | None = None,
550
+ new_properties: Mapping[str, str] | None = None,
551
+ new_labels: Sequence[str] = (),
552
+ new_links: Sequence[str | Link | LinkDict] = (),
553
+ new_attachments: Iterable[Attachment] | Iterable[str] = (),
554
+ new_assets: Sequence[Asset | str] = (),
555
+ ) -> Run:
556
+ """Copy a run from the source to the destination client.
557
+
558
+ Args:
559
+ source_run: The source Run to copy.
560
+ destination_client: The NominalClient to create the copied run in.
561
+ new_name: Optionally override the name of the copied run. Defaults to original name.
562
+ new_start: Optionally override the start time of the copied run. Defaults to original start time.
563
+ new_end: Optionally override the end time of the copied run. Defaults to original end time.
564
+ new_description: Optionally override the description of the copied run. Defaults to original description.
565
+ new_properties: Optionally override the properties of the copied run. Defaults to original properties.
566
+ new_labels: Optionally override the labels of the copied run. Defaults to original labels.
567
+ new_links: Optionally override the links of the copied run. Defaults to original links.
568
+ new_attachments: Optionally override the attachments of the copied run. Defaults to original attachments.
569
+ new_assets: Optionally override the linked assets of the copied run. Defaults to original linked assets.
570
+
571
+ Returns:
572
+ The newly created Run in the destination client.
573
+ """
574
+ log_extras = {
575
+ "destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid
576
+ }
577
+ logger.debug(
578
+ "Copying run %s (rid: %s)",
579
+ source_run.name,
580
+ source_run.rid,
581
+ extra=log_extras,
582
+ )
583
+ new_run = destination_client.create_run(
584
+ name=new_name or source_run.name,
585
+ start=new_start or source_run.start,
586
+ end=new_end or source_run.end,
587
+ description=new_description or source_run.description,
588
+ properties=new_properties or source_run.properties,
589
+ labels=new_labels or source_run.labels,
590
+ assets=new_assets or source_run.assets,
591
+ links=new_links or source_run.links,
592
+ attachments=new_attachments or source_run.list_attachments(),
593
+ )
594
+ logger.debug("New run created: %s (rid: %s)", new_run.name, new_run.rid, extra=log_extras)
595
+ return new_run
596
+
597
+
526
598
  def clone_asset(
527
599
  source_asset: Asset,
528
600
  destination_client: NominalClient,
@@ -537,7 +609,11 @@ def clone_asset(
537
609
  The newly created Asset in the target client.
538
610
  """
539
611
  return copy_asset_from(
540
- source_asset=source_asset, destination_client=destination_client, include_data=True, include_events=True
612
+ source_asset=source_asset,
613
+ destination_client=destination_client,
614
+ include_data=True,
615
+ include_events=True,
616
+ include_runs=True,
541
617
  )
542
618
 
543
619
 
@@ -551,6 +627,7 @@ def copy_asset_from(
551
627
  new_asset_labels: Sequence[str] | None = None,
552
628
  include_data: bool = False,
553
629
  include_events: bool = False,
630
+ include_runs: bool = False,
554
631
  ) -> Asset:
555
632
  """Copy an asset from the source to the destination client.
556
633
 
@@ -563,6 +640,7 @@ def copy_asset_from(
563
640
  new_asset_labels: Optional new labels for the copied asset. If not provided, the original labels are used.
564
641
  include_data: Whether to include data in the copied asset.
565
642
  include_events: Whether to include events in the copied dataset.
643
+ include_runs: Whether to include runs in the copied asset.
566
644
 
567
645
  Returns:
568
646
  The new asset created.
@@ -579,21 +657,23 @@ def copy_asset_from(
579
657
  )
580
658
  if include_data:
581
659
  source_datasets = source_asset.list_datasets()
582
- new_datasets = []
583
660
  for data_scope, source_dataset in source_datasets:
584
661
  new_dataset = clone_dataset(
585
662
  source_dataset=source_dataset,
586
663
  destination_client=destination_client,
587
664
  )
588
- new_datasets.append(new_dataset)
589
665
  new_asset.add_dataset(data_scope, new_dataset)
666
+ source_asset._list_dataset_scopes
590
667
 
591
668
  if include_events:
592
669
  source_events = source_asset.search_events(origin_types=SearchEventOriginType.get_manual_origin_types())
593
- new_events = []
594
670
  for source_event in source_events:
595
- new_event = copy_event_from(source_event, destination_client, new_assets=[new_asset])
596
- new_events.append(new_event)
671
+ copy_event_from(source_event, destination_client, new_assets=[new_asset])
672
+
673
+ if include_runs:
674
+ source_runs = source_asset.list_runs()
675
+ for source_run in source_runs:
676
+ copy_run_from(source_run, destination_client, new_assets=[new_asset])
597
677
 
598
678
  logger.debug("New asset created: %s (rid: %s)", new_asset, new_asset.rid, extra=log_extras)
599
679
  return new_asset
@@ -6,6 +6,7 @@ import pathlib
6
6
  from nominal_streaming import NominalDatasetStream
7
7
 
8
8
  from nominal.core._stream.write_stream import DataStream
9
+ from nominal.core._types import PathLike
9
10
  from nominal.core.datasource import DataSource
10
11
 
11
12
 
@@ -22,7 +23,7 @@ class RustWriteStream(NominalDatasetStream, DataStream):
22
23
  datasource_clients: DataSource._Clients,
23
24
  batch_size: int,
24
25
  max_wait: datetime.timedelta,
25
- file_fallback: pathlib.Path | None = None,
26
+ file_fallback: PathLike | None = None,
26
27
  log_level: str | None = None,
27
28
  num_workers: int | None = None,
28
29
  ) -> RustWriteStream:
@@ -41,7 +42,7 @@ class RustWriteStream(NominalDatasetStream, DataStream):
41
42
  ).with_core_consumer(datasource_rid)
42
43
 
43
44
  if file_fallback is not None:
44
- stream = stream.with_file_fallback(file_fallback)
45
+ stream = stream.with_file_fallback(pathlib.Path(file_fallback))
45
46
 
46
47
  if log_level is not None:
47
48
  stream = stream.enable_logging(log_level)
@@ -6,6 +6,7 @@ import shlex
6
6
 
7
7
  import ffmpeg
8
8
 
9
+ from nominal.core._types import PathLike
9
10
  from nominal.experimental.video_processing.resolution import (
10
11
  AnyResolutionType,
11
12
  scale_factor_from_resolution,
@@ -20,8 +21,8 @@ DEFAULT_KEY_FRAME_INTERVAL_SEC = 2
20
21
 
21
22
 
22
23
  def normalize_video(
23
- input_path: pathlib.Path,
24
- output_path: pathlib.Path,
24
+ input_path: PathLike,
25
+ output_path: PathLike,
25
26
  key_frame_interval: int | None = DEFAULT_KEY_FRAME_INTERVAL_SEC,
26
27
  force: bool = True,
27
28
  resolution: AnyResolutionType | None = None,
@@ -59,6 +60,8 @@ def normalize_video(
59
60
 
60
61
  NOTE: this requires that you have installed ffmpeg on your system with support for H264.
61
62
  """
63
+ input_path = pathlib.Path(input_path)
64
+ output_path = pathlib.Path(output_path)
62
65
  assert input_path.exists(), "Input path must exist"
63
66
  assert output_path.suffix.lower() in (".mkv", ".mp4")
64
67
 
@@ -24,6 +24,7 @@ from nominal.core import (
24
24
  poll_until_ingestion_completed,
25
25
  )
26
26
  from nominal.core._constants import DEFAULT_API_BASE_URL
27
+ from nominal.core._types import PathLike
27
28
  from nominal.core.connection import StreamingConnection
28
29
  from nominal.core.data_review import DataReview, DataReviewBuilder
29
30
 
@@ -102,7 +103,7 @@ def get_user() -> User:
102
103
  "Use `nominal.thirdparty.tdms.upload_tdms` instead."
103
104
  )
104
105
  def upload_tdms(
105
- file: Path | str,
106
+ file: PathLike,
106
107
  name: str | None = None,
107
108
  description: str | None = None,
108
109
  timestamp_column: str | None = None,
@@ -213,7 +214,7 @@ def create_dataset(
213
214
  "Use `nominal.create_dataset` or `nominal.get_dataset`, add data to an existing dataset instead."
214
215
  )
215
216
  def upload_csv(
216
- file: Path | str,
217
+ file: PathLike,
217
218
  name: str | None,
218
219
  timestamp_column: str,
219
220
  timestamp_type: ts._AnyTimestampType,
@@ -245,7 +246,7 @@ def upload_csv(
245
246
 
246
247
  def _upload_csv(
247
248
  client: NominalClient,
248
- file: Path | str,
249
+ file: PathLike,
249
250
  name: str | None,
250
251
  timestamp_column: str,
251
252
  timestamp_type: ts._AnyTimestampType,
@@ -317,7 +318,7 @@ def create_run(
317
318
  f"see {AUTHENTICATION_DOCS_LINK}"
318
319
  )
319
320
  def create_run_csv(
320
- file: Path | str,
321
+ file: PathLike,
321
322
  name: str,
322
323
  timestamp_column: str,
323
324
  timestamp_type: ts._LiteralAbsolute | ts.Iso8601 | ts.Epoch,
@@ -379,7 +380,7 @@ def search_runs(
379
380
  f"Use `nominal.NominalClient.create_attachment` instead, see {AUTHENTICATION_DOCS_LINK}"
380
381
  )
381
382
  def upload_attachment(
382
- file: Path | str,
383
+ file: PathLike,
383
384
  name: str,
384
385
  description: str | None = None,
385
386
  ) -> Attachment:
@@ -406,7 +407,7 @@ def get_attachment(rid: str) -> Attachment:
406
407
  "Use `nominal.NominalClient.get_attachment` and `nominal.core.Attachment.write` instead, "
407
408
  f"see {AUTHENTICATION_DOCS_LINK}"
408
409
  )
409
- def download_attachment(rid: str, file: Path | str) -> None:
410
+ def download_attachment(rid: str, file: PathLike) -> None:
410
411
  """Retrieve an attachment from the Nominal platform and save it to `file`."""
411
412
  client = _get_default_client()
412
413
  attachment = client.get_attachment(rid)
@@ -418,7 +419,7 @@ def download_attachment(rid: str, file: Path | str) -> None:
418
419
  f"Use `nominal.NominalClient.create_video` instead, see {AUTHENTICATION_DOCS_LINK}"
419
420
  )
420
421
  def upload_video(
421
- file: Path | str, name: str, start: datetime | str | ts.IntegralNanosecondsUTC, description: str | None = None
422
+ file: PathLike, name: str, start: datetime | str | ts.IntegralNanosecondsUTC, description: str | None = None
422
423
  ) -> Video:
423
424
  """Upload a video to Nominal from a file."""
424
425
  client = _get_default_client()
@@ -542,7 +543,7 @@ def get_checklist(checklist_rid: str) -> Checklist:
542
543
  f"Use `nominal.NominalClient.create_mcap_video` instead, see {AUTHENTICATION_DOCS_LINK}"
543
544
  )
544
545
  def upload_mcap_video(
545
- file: Path | str,
546
+ file: PathLike,
546
547
  topic: str,
547
548
  name: str | None = None,
548
549
  description: str | None = None,
@@ -10,6 +10,7 @@ import pandas as pd
10
10
  from nptdms import TdmsChannel, TdmsFile, TdmsGroup
11
11
 
12
12
  from nominal import ts
13
+ from nominal.core._types import PathLike
13
14
  from nominal.core.client import NominalClient
14
15
  from nominal.core.dataset import Dataset
15
16
  from nominal.core.dataset_file import DatasetFile
@@ -19,7 +20,7 @@ logger = logging.getLogger(__name__)
19
20
 
20
21
 
21
22
  def _tdms_to_dataframe(
22
- file: Path | str,
23
+ file: PathLike,
23
24
  timestamp_column: str | None = None,
24
25
  timestamp_type: ts._AnyTimestampType | None = None,
25
26
  ) -> Tuple[str, ts._AnyTimestampType, pd.DataFrame]:
@@ -46,7 +47,7 @@ def _tdms_to_dataframe(
46
47
 
47
48
  def upload_tdms_to_dataset(
48
49
  dataset: Dataset,
49
- file: Path | str,
50
+ file: PathLike,
50
51
  timestamp_column: str | None = None,
51
52
  timestamp_type: ts._AnyTimestampType | None = None,
52
53
  *,
@@ -91,7 +92,7 @@ def upload_tdms_to_dataset(
91
92
 
92
93
  def upload_tdms(
93
94
  client: NominalClient,
94
- file: Path | str,
95
+ file: PathLike,
95
96
  name: str | None = None,
96
97
  description: str | None = None,
97
98
  timestamp_column: str | None = None,
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "nominal"
3
- version = "1.102.0"
3
+ version = "1.104.0"
4
4
  description = "Automate Nominal workflows in Python"
5
5
  authors = [
6
6
  { name = "Alexander Reynolds", email = "alex.reynolds@nominal.io" },
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes