nominal 1.101.0__py3-none-any.whl → 1.103.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
nominal/core/dataset.py CHANGED
@@ -14,6 +14,7 @@ from typing_extensions import Self, deprecated
14
14
 
15
15
  from nominal.core._stream.batch_processor import process_log_batch
16
16
  from nominal.core._stream.write_stream import LogStream, WriteStream
17
+ from nominal.core._types import PathLike
17
18
  from nominal.core._utils.api_tools import RefreshableMixin
18
19
  from nominal.core._utils.multipart import path_upload_name, upload_multipart_file, upload_multipart_io
19
20
  from nominal.core.bounds import Bounds
@@ -112,7 +113,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
112
113
 
113
114
  def add_tabular_data(
114
115
  self,
115
- path: Path | str,
116
+ path: PathLike,
116
117
  timestamp_column: str,
117
118
  timestamp_type: _AnyTimestampType,
118
119
  tag_columns: Mapping[str, str] | None = None,
@@ -206,7 +207,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
206
207
 
207
208
  def add_avro_stream(
208
209
  self,
209
- path: Path | str,
210
+ path: PathLike,
210
211
  ) -> DatasetFile:
211
212
  """Upload an avro stream file with a specific schema, described below.
212
213
 
@@ -278,7 +279,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
278
279
 
279
280
  def add_journal_json(
280
281
  self,
281
- path: Path | str,
282
+ path: PathLike,
282
283
  ) -> DatasetFile:
283
284
  """Add a journald jsonl file to an existing dataset."""
284
285
  log_path = Path(path)
@@ -310,7 +311,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
310
311
 
311
312
  def add_mcap(
312
313
  self,
313
- path: Path | str,
314
+ path: PathLike,
314
315
  include_topics: Iterable[str] | None = None,
315
316
  exclude_topics: Iterable[str] | None = None,
316
317
  ) -> DatasetFile:
@@ -384,7 +385,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
384
385
 
385
386
  def add_ardupilot_dataflash(
386
387
  self,
387
- path: Path | str,
388
+ path: PathLike,
388
389
  tags: Mapping[str, str] | None = None,
389
390
  ) -> DatasetFile:
390
391
  """Add a Dataflash file to an existing dataset.
@@ -415,7 +416,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
415
416
  def add_containerized(
416
417
  self,
417
418
  extractor: str | ContainerizedExtractor,
418
- sources: Mapping[str, Path | str],
419
+ sources: Mapping[str, PathLike],
419
420
  tag: str | None = None,
420
421
  *,
421
422
  arguments: Mapping[str, str] | None = None,
@@ -425,7 +426,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
425
426
  def add_containerized(
426
427
  self,
427
428
  extractor: str | ContainerizedExtractor,
428
- sources: Mapping[str, Path | str],
429
+ sources: Mapping[str, PathLike],
429
430
  tag: str | None = None,
430
431
  *,
431
432
  arguments: Mapping[str, str] | None = None,
@@ -436,7 +437,7 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
436
437
  def add_containerized(
437
438
  self,
438
439
  extractor: str | ContainerizedExtractor,
439
- sources: Mapping[str, Path | str],
440
+ sources: Mapping[str, PathLike],
440
441
  tag: str | None = None,
441
442
  *,
442
443
  arguments: Mapping[str, str] | None = None,
@@ -708,7 +709,7 @@ class _DatasetWrapper(abc.ABC):
708
709
  def add_tabular_data(
709
710
  self,
710
711
  data_scope_name: str,
711
- path: Path | str,
712
+ path: PathLike,
712
713
  *,
713
714
  timestamp_column: str,
714
715
  timestamp_type: _AnyTimestampType,
@@ -735,7 +736,7 @@ class _DatasetWrapper(abc.ABC):
735
736
  def add_avro_stream(
736
737
  self,
737
738
  data_scope_name: str,
738
- path: Path | str,
739
+ path: PathLike,
739
740
  ) -> DatasetFile:
740
741
  """Upload an avro stream file to the dataset selected by `data_scope_name`.
741
742
 
@@ -761,7 +762,7 @@ class _DatasetWrapper(abc.ABC):
761
762
  def add_journal_json(
762
763
  self,
763
764
  data_scope_name: str,
764
- path: Path | str,
765
+ path: PathLike,
765
766
  ) -> DatasetFile:
766
767
  """Add a journald json file to the dataset selected by `data_scope_name`.
767
768
 
@@ -787,7 +788,7 @@ class _DatasetWrapper(abc.ABC):
787
788
  def add_mcap(
788
789
  self,
789
790
  data_scope_name: str,
790
- path: Path | str,
791
+ path: PathLike,
791
792
  *,
792
793
  include_topics: Iterable[str] | None = None,
793
794
  exclude_topics: Iterable[str] | None = None,
@@ -815,7 +816,7 @@ class _DatasetWrapper(abc.ABC):
815
816
  def add_ardupilot_dataflash(
816
817
  self,
817
818
  data_scope_name: str,
818
- path: Path | str,
819
+ path: PathLike,
819
820
  tags: Mapping[str, str] | None = None,
820
821
  ) -> DatasetFile:
821
822
  """Add a Dataflash file to the dataset selected by `data_scope_name`.
@@ -835,7 +836,7 @@ class _DatasetWrapper(abc.ABC):
835
836
  self,
836
837
  data_scope_name: str,
837
838
  extractor: str | ContainerizedExtractor,
838
- sources: Mapping[str, Path | str],
839
+ sources: Mapping[str, PathLike],
839
840
  *,
840
841
  tag: str | None = None,
841
842
  tags: Mapping[str, str] | None = None,
@@ -845,7 +846,7 @@ class _DatasetWrapper(abc.ABC):
845
846
  self,
846
847
  data_scope_name: str,
847
848
  extractor: str | ContainerizedExtractor,
848
- sources: Mapping[str, Path | str],
849
+ sources: Mapping[str, PathLike],
849
850
  *,
850
851
  tag: str | None = None,
851
852
  tags: Mapping[str, str] | None = None,
@@ -856,7 +857,7 @@ class _DatasetWrapper(abc.ABC):
856
857
  self,
857
858
  data_scope_name: str,
858
859
  extractor: str | ContainerizedExtractor,
859
- sources: Mapping[str, Path | str],
860
+ sources: Mapping[str, PathLike],
860
861
  *,
861
862
  tag: str | None = None,
862
863
  tags: Mapping[str, str] | None = None,
@@ -13,6 +13,7 @@ from nominal_api import api, ingest_api, scout_catalog
13
13
  from typing_extensions import Self
14
14
 
15
15
  from nominal.core._clientsbunch import HasScoutParams
16
+ from nominal.core._types import PathLike
16
17
  from nominal.core._utils.api_tools import RefreshableMixin
17
18
  from nominal.core._utils.multipart import DEFAULT_CHUNK_SIZE
18
19
  from nominal.core._utils.multipart_downloader import (
@@ -128,7 +129,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
128
129
 
129
130
  def download(
130
131
  self,
131
- output_directory: pathlib.Path,
132
+ output_directory: PathLike,
132
133
  *,
133
134
  part_size: int = DEFAULT_CHUNK_SIZE,
134
135
  num_retries: int = 3,
@@ -148,6 +149,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
148
149
  FileExistsError: File already exists at destination
149
150
  RuntimeError: Error downloading file
150
151
  """
152
+ output_directory = pathlib.Path(output_directory)
151
153
  if output_directory.exists() and not output_directory.is_dir():
152
154
  raise NotADirectoryError(f"Output directory is not a directory: {output_directory}")
153
155
 
@@ -160,7 +162,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
160
162
 
161
163
  def download_original_files(
162
164
  self,
163
- output_directory: pathlib.Path,
165
+ output_directory: PathLike,
164
166
  *,
165
167
  part_size: int = DEFAULT_CHUNK_SIZE,
166
168
  num_retries: int = 3,
@@ -184,6 +186,7 @@ class DatasetFile(RefreshableMixin[scout_catalog.DatasetFile]):
184
186
  NOTE: any file that fails to download will result in an error log and will not be returned
185
187
  as an output path
186
188
  """
189
+ output_directory = pathlib.Path(output_directory)
187
190
  if output_directory.exists() and not output_directory.is_dir():
188
191
  raise NotADirectoryError(f"Output directory is not a directory: {output_directory}")
189
192
 
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
- import pathlib
5
4
  from dataclasses import dataclass, field
6
5
  from datetime import timedelta
7
6
  from typing import Iterable, Literal, Mapping, Protocol, Sequence, overload
@@ -27,6 +26,7 @@ from nominal._utils import batched
27
26
  from nominal.core._clientsbunch import HasScoutParams, ProtoWriteService
28
27
  from nominal.core._stream.batch_processor import process_batch_legacy
29
28
  from nominal.core._stream.write_stream import DataStream, WriteStream
29
+ from nominal.core._types import PathLike
30
30
  from nominal.core._utils.api_tools import HasRid
31
31
  from nominal.core.channel import Channel, ChannelDataType
32
32
  from nominal.core.unit import UnitMapping, _build_unit_update, _error_on_invalid_units
@@ -125,7 +125,7 @@ class DataSource(HasRid):
125
125
  batch_size: int = 50_000,
126
126
  max_wait: timedelta = timedelta(seconds=1),
127
127
  data_format: Literal["rust_experimental"] | None = None,
128
- file_fallback: pathlib.Path | None = None,
128
+ file_fallback: PathLike | None = None,
129
129
  log_level: str | None = None,
130
130
  num_workers: int | None = None,
131
131
  ) -> DataStream: ...
@@ -134,7 +134,7 @@ class DataSource(HasRid):
134
134
  batch_size: int = 50_000,
135
135
  max_wait: timedelta = timedelta(seconds=1),
136
136
  data_format: Literal["json", "protobuf", "experimental", "rust_experimental"] | None = None,
137
- file_fallback: pathlib.Path | None = None,
137
+ file_fallback: PathLike | None = None,
138
138
  log_level: str | None = None,
139
139
  num_workers: int | None = None,
140
140
  ) -> DataStream:
@@ -327,7 +327,7 @@ def _get_write_stream(
327
327
  batch_size: int,
328
328
  max_wait: timedelta,
329
329
  data_format: Literal["json", "protobuf", "experimental", "rust_experimental"] | None,
330
- file_fallback: pathlib.Path | None,
330
+ file_fallback: PathLike | None,
331
331
  log_level: str | None,
332
332
  num_workers: int | None,
333
333
  write_rid: str,
nominal/core/event.py CHANGED
@@ -3,16 +3,18 @@ from __future__ import annotations
3
3
  import warnings
4
4
  from dataclasses import dataclass, field
5
5
  from datetime import datetime, timedelta
6
- from enum import Enum
7
6
  from typing import Iterable, Mapping, Protocol, Sequence
8
7
 
9
- from nominal_api import api, event
8
+ from nominal_api import event
10
9
  from typing_extensions import Self
11
10
 
12
11
  from nominal.core import asset as core_asset
13
12
  from nominal.core._clientsbunch import HasScoutParams
13
+ from nominal.core._event_types import EventType as EventType # noqa: PLC0414
14
+ from nominal.core._event_types import SearchEventOriginType as SearchEventOriginType # noqa: PLC0414
14
15
  from nominal.core._utils.api_tools import HasRid, RefreshableMixin, rid_from_instance_or_string
15
16
  from nominal.core._utils.pagination_tools import search_events_paginated
17
+ from nominal.core._utils.query_tools import _create_search_events_query
16
18
  from nominal.ts import IntegralNanosecondsDuration, IntegralNanosecondsUTC, _SecondsNanos, _to_api_duration
17
19
 
18
20
 
@@ -124,39 +126,6 @@ class Event(HasRid, RefreshableMixin[event.Event]):
124
126
  )
125
127
 
126
128
 
127
- class EventType(Enum):
128
- INFO = "INFO"
129
- FLAG = "FLAG"
130
- ERROR = "ERROR"
131
- SUCCESS = "SUCCESS"
132
- UNKNOWN = "UNKNOWN"
133
-
134
- @classmethod
135
- def from_api_event_type(cls, event: event.EventType) -> EventType:
136
- if event.name == "INFO":
137
- return cls.INFO
138
- elif event.name == "FLAG":
139
- return cls.FLAG
140
- elif event.name == "ERROR":
141
- return cls.ERROR
142
- elif event.name == "SUCCESS":
143
- return cls.SUCCESS
144
- else:
145
- return cls.UNKNOWN
146
-
147
- def _to_api_event_type(self) -> event.EventType:
148
- if self.name == "INFO":
149
- return event.EventType.INFO
150
- elif self.name == "FLAG":
151
- return event.EventType.FLAG
152
- elif self.name == "ERROR":
153
- return event.EventType.ERROR
154
- elif self.name == "SUCCESS":
155
- return event.EventType.SUCCESS
156
- else:
157
- return event.EventType.UNKNOWN
158
-
159
-
160
129
  def _create_event(
161
130
  clients: Event._Clients,
162
131
  *,
@@ -203,6 +172,7 @@ def _search_events(
203
172
  data_review_rid: str | None = None,
204
173
  assignee_rid: str | None = None,
205
174
  event_type: EventType | None = None,
175
+ origin_types: Iterable[SearchEventOriginType] | None = None,
206
176
  workspace_rid: str | None = None,
207
177
  ) -> Sequence[Event]:
208
178
  query = _create_search_events_query(
@@ -216,53 +186,10 @@ def _search_events(
216
186
  workbook_rid=workbook_rid,
217
187
  data_review_rid=data_review_rid,
218
188
  assignee_rid=assignee_rid,
219
- event_type=event_type,
189
+ event_type=event_type._to_api_event_type() if event_type else None,
190
+ origin_types=[origin_type._to_api_search_event_origin_type() for origin_type in origin_types]
191
+ if origin_types
192
+ else None,
220
193
  workspace_rid=workspace_rid,
221
194
  )
222
195
  return list(_iter_search_events(clients, query))
223
-
224
-
225
- def _create_search_events_query( # noqa: PLR0912
226
- search_text: str | None = None,
227
- after: str | datetime | IntegralNanosecondsUTC | None = None,
228
- before: str | datetime | IntegralNanosecondsUTC | None = None,
229
- asset_rids: Iterable[str] | None = None,
230
- labels: Iterable[str] | None = None,
231
- properties: Mapping[str, str] | None = None,
232
- created_by_rid: str | None = None,
233
- workbook_rid: str | None = None,
234
- data_review_rid: str | None = None,
235
- assignee_rid: str | None = None,
236
- event_type: EventType | None = None,
237
- workspace_rid: str | None = None,
238
- ) -> event.SearchQuery:
239
- queries = []
240
- if search_text is not None:
241
- queries.append(event.SearchQuery(search_text=search_text))
242
- if after is not None:
243
- queries.append(event.SearchQuery(after=_SecondsNanos.from_flexible(after).to_api()))
244
- if before is not None:
245
- queries.append(event.SearchQuery(before=_SecondsNanos.from_flexible(before).to_api()))
246
- if asset_rids:
247
- for asset in asset_rids:
248
- queries.append(event.SearchQuery(asset=asset))
249
- if labels:
250
- for label in labels:
251
- queries.append(event.SearchQuery(label=label))
252
- if properties:
253
- for name, value in properties.items():
254
- queries.append(event.SearchQuery(property=api.Property(name=name, value=value)))
255
- if created_by_rid:
256
- queries.append(event.SearchQuery(created_by=created_by_rid))
257
- if workbook_rid is not None:
258
- queries.append(event.SearchQuery(workbook=workbook_rid))
259
- if data_review_rid is not None:
260
- queries.append(event.SearchQuery(data_review=data_review_rid))
261
- if assignee_rid is not None:
262
- queries.append(event.SearchQuery(assignee=assignee_rid))
263
- if event_type is not None:
264
- queries.append(event.SearchQuery(event_type=event_type._to_api_event_type()))
265
- if workspace_rid is not None:
266
- queries.append(event.SearchQuery(workspace=workspace_rid))
267
-
268
- return event.SearchQuery(and_=queries)
nominal/core/filetype.py CHANGED
@@ -5,6 +5,8 @@ import mimetypes
5
5
  from pathlib import Path
6
6
  from typing import NamedTuple
7
7
 
8
+ from nominal.core._types import PathLike
9
+
8
10
  logger = logging.getLogger(__name__)
9
11
 
10
12
 
@@ -13,7 +15,7 @@ class FileType(NamedTuple):
13
15
  mimetype: str
14
16
 
15
17
  @classmethod
16
- def from_path(cls, path: Path | str, default_mimetype: str = "application/octect-stream") -> FileType:
18
+ def from_path(cls, path: PathLike, default_mimetype: str = "application/octect-stream") -> FileType:
17
19
  path = Path(path)
18
20
 
19
21
  # Note: not using path.suffix because this fails for files with multiple suffixes,
@@ -70,7 +72,7 @@ class FileType(NamedTuple):
70
72
  return self in FileTypes._VIDEO_TYPES
71
73
 
72
74
  @classmethod
73
- def from_path_dataset(cls, path: Path | str) -> FileType:
75
+ def from_path_dataset(cls, path: PathLike) -> FileType:
74
76
  file_type = cls.from_path(path)
75
77
  if not file_type.is_parquet_file() and not file_type.is_csv():
76
78
  allowed_extensions = (*FileTypes._PARQUET_FILE_TYPES, *FileTypes._CSV_TYPES)
@@ -79,7 +81,7 @@ class FileType(NamedTuple):
79
81
  return file_type
80
82
 
81
83
  @classmethod
82
- def from_tabular(cls, path: Path | str) -> FileType:
84
+ def from_tabular(cls, path: PathLike) -> FileType:
83
85
  file_type = cls.from_path(path)
84
86
  if not file_type.is_csv() and not file_type.is_parquet():
85
87
  allowed_extensions = (
@@ -92,7 +94,7 @@ class FileType(NamedTuple):
92
94
  return file_type
93
95
 
94
96
  @classmethod
95
- def from_path_journal_json(cls, path: Path | str) -> FileType:
97
+ def from_path_journal_json(cls, path: PathLike) -> FileType:
96
98
  file_type = cls.from_path(path)
97
99
  if not file_type.is_journal():
98
100
  raise ValueError(
@@ -102,7 +104,7 @@ class FileType(NamedTuple):
102
104
  return file_type
103
105
 
104
106
  @classmethod
105
- def from_video(cls, path: Path | str) -> FileType:
107
+ def from_video(cls, path: PathLike) -> FileType:
106
108
  file_type = cls.from_path(path)
107
109
  if not file_type.is_video():
108
110
  raise ValueError(f"video path '{path}' must end in one of {[f.extension for f in FileTypes._VIDEO_TYPES]}")
nominal/core/run.py CHANGED
@@ -13,6 +13,7 @@ from typing_extensions import Self
13
13
 
14
14
  from nominal.core import asset as core_asset
15
15
  from nominal.core._clientsbunch import HasScoutParams
16
+ from nominal.core._event_types import EventType
16
17
  from nominal.core._utils.api_tools import (
17
18
  HasRid,
18
19
  Link,
@@ -25,7 +26,7 @@ from nominal.core.asset import _filter_scopes
25
26
  from nominal.core.attachment import Attachment, _iter_get_attachments
26
27
  from nominal.core.connection import Connection, _get_connections
27
28
  from nominal.core.dataset import Dataset, _DatasetWrapper, _get_datasets
28
- from nominal.core.event import Event, EventType, _create_event
29
+ from nominal.core.event import Event, _create_event
29
30
  from nominal.core.video import Video, _get_video
30
31
  from nominal.ts import IntegralNanosecondsDuration, IntegralNanosecondsUTC, _SecondsNanos, _to_api_duration
31
32
 
@@ -372,11 +373,13 @@ class Run(HasRid, RefreshableMixin[scout_run_api.Run], _DatasetWrapper):
372
373
  def archive(self) -> None:
373
374
  """Archive this run.
374
375
  Archived runs are not deleted, but are hidden from the UI.
375
-
376
- NOTE: currently, it is not possible (yet) to unarchive a run once archived.
377
376
  """
378
377
  self._clients.run.archive_run(self._clients.auth_header, self.rid)
379
378
 
379
+ def unarchive(self) -> None:
380
+ """Unarchive this run, allowing it to appear on the UI."""
381
+ self._clients.run.unarchive_run(self._clients.auth_header, self.rid)
382
+
380
383
  @classmethod
381
384
  def _from_conjure(cls, clients: _Clients, run: scout_run_api.Run) -> Self:
382
385
  return cls(
nominal/core/video.py CHANGED
@@ -14,6 +14,7 @@ from nominal_api import api, ingest_api, scout_video, scout_video_api, upload_ap
14
14
  from typing_extensions import Self
15
15
 
16
16
  from nominal.core._clientsbunch import HasScoutParams
17
+ from nominal.core._types import PathLike
17
18
  from nominal.core._utils.api_tools import HasRid, RefreshableMixin
18
19
  from nominal.core._utils.multipart import path_upload_name, upload_multipart_io
19
20
  from nominal.core.exceptions import NominalIngestError, NominalIngestFailed
@@ -118,7 +119,7 @@ class Video(HasRid, RefreshableMixin[scout_video_api.Video]):
118
119
 
119
120
  def add_file(
120
121
  self,
121
- path: pathlib.Path | str,
122
+ path: PathLike,
122
123
  start: datetime | IntegralNanosecondsUTC | None = None,
123
124
  frame_timestamps: Sequence[IntegralNanosecondsUTC] | None = None,
124
125
  description: str | None = None,
@@ -212,7 +213,7 @@ class Video(HasRid, RefreshableMixin[scout_video_api.Video]):
212
213
 
213
214
  def add_mcap(
214
215
  self,
215
- path: pathlib.Path,
216
+ path: PathLike,
216
217
  topic: str,
217
218
  description: str | None = None,
218
219
  ) -> VideoFile: