nominal 1.109.0__py3-none-any.whl → 1.111.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +31 -0
- nominal/core/_checklist_types.py +48 -0
- nominal/core/_clientsbunch.py +0 -3
- nominal/core/_utils/api_tools.py +16 -2
- nominal/core/_video_types.py +16 -0
- nominal/core/asset.py +34 -18
- nominal/core/bounds.py +8 -1
- nominal/core/channel.py +0 -15
- nominal/core/checklist.py +11 -25
- nominal/core/client.py +25 -29
- nominal/core/data_review.py +32 -11
- nominal/core/dataset.py +41 -2
- nominal/core/dataset_file.py +6 -0
- nominal/core/datasource.py +0 -3
- nominal/core/run.py +25 -11
- nominal/core/streaming_checklist.py +25 -0
- nominal/core/video.py +71 -13
- nominal/core/video_file.py +62 -2
- nominal/experimental/migration/migration_utils.py +343 -42
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/METADATA +2 -2
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/RECORD +24 -21
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/WHEEL +0 -0
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/entry_points.txt +0 -0
- {nominal-1.109.0.dist-info → nominal-1.111.0.dist-info}/licenses/LICENSE +0 -0
CHANGELOG.md
CHANGED
|
@@ -1,5 +1,36 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.111.0](https://github.com/nominal-io/nominal-client/compare/v1.110.0...v1.111.0) (2026-01-29)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* add RID mapping logging to migration utils ([#593](https://github.com/nominal-io/nominal-client/issues/593)) ([bf19a8b](https://github.com/nominal-io/nominal-client/commit/bf19a8b99f176d0d25586c9f134278a684cedcd2))
|
|
9
|
+
* copy over bounds in clone dataset method ([#597](https://github.com/nominal-io/nominal-client/issues/597)) ([49f0cc1](https://github.com/nominal-io/nominal-client/commit/49f0cc119895768f48fd94a70719ad516bf51a94))
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
### Bug Fixes
|
|
13
|
+
|
|
14
|
+
* remove logical series service references ([#591](https://github.com/nominal-io/nominal-client/issues/591)) ([ec3ce3b](https://github.com/nominal-io/nominal-client/commit/ec3ce3bc6ddfad7a3e07cfc51f0681bbb719f952))
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
### Documentation
|
|
18
|
+
|
|
19
|
+
* update avro stream docstring with new schema types ([#594](https://github.com/nominal-io/nominal-client/issues/594)) ([690e5a3](https://github.com/nominal-io/nominal-client/commit/690e5a34dc782601df13d72034c452844fc644a4))
|
|
20
|
+
|
|
21
|
+
## [1.110.0](https://github.com/nominal-io/nominal-client/compare/v1.109.0...v1.110.0) (2026-01-23)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
### Features
|
|
25
|
+
|
|
26
|
+
* add data review/checklist methods to assets and runs ([#567](https://github.com/nominal-io/nominal-client/issues/567)) ([4b080d9](https://github.com/nominal-io/nominal-client/commit/4b080d96700fc51555b69a093d592cb03b92e073))
|
|
27
|
+
* add video clone capability in experimental and refactor video methods ([#576](https://github.com/nominal-io/nominal-client/issues/576)) ([f58a89c](https://github.com/nominal-io/nominal-client/commit/f58a89c1fe2220677f439fdf306f0c743e441548))
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
### Bug Fixes
|
|
31
|
+
|
|
32
|
+
* properly handle dataset files in the new deletion states ([#590](https://github.com/nominal-io/nominal-client/issues/590)) ([e71bcc9](https://github.com/nominal-io/nominal-client/commit/e71bcc9f8fd752257dc2b33789380f0ffb7c5410))
|
|
33
|
+
|
|
3
34
|
## [1.109.0](https://github.com/nominal-io/nominal-client/compare/v1.108.0...v1.109.0) (2026-01-23)
|
|
4
35
|
|
|
5
36
|
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from enum import IntEnum
|
|
4
|
+
|
|
5
|
+
from nominal_api import scout_api
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Priority(IntEnum):
|
|
9
|
+
P0 = 0
|
|
10
|
+
P1 = 1
|
|
11
|
+
P2 = 2
|
|
12
|
+
P3 = 3
|
|
13
|
+
P4 = 4
|
|
14
|
+
|
|
15
|
+
@classmethod
|
|
16
|
+
def _from_conjure(cls, priority: scout_api.Priority) -> Priority:
|
|
17
|
+
match priority.name:
|
|
18
|
+
case "P0":
|
|
19
|
+
return cls.P0
|
|
20
|
+
case "P1":
|
|
21
|
+
return cls.P1
|
|
22
|
+
case "P2":
|
|
23
|
+
return cls.P2
|
|
24
|
+
case "P3":
|
|
25
|
+
return cls.P3
|
|
26
|
+
case "P4":
|
|
27
|
+
return cls.P4
|
|
28
|
+
case _:
|
|
29
|
+
raise ValueError(f"unknown priority '{priority}', expected one of {list(cls)}")
|
|
30
|
+
|
|
31
|
+
def _to_conjure(self) -> scout_api.Priority:
|
|
32
|
+
match self:
|
|
33
|
+
case Priority.P0:
|
|
34
|
+
return scout_api.Priority.P0
|
|
35
|
+
case Priority.P1:
|
|
36
|
+
return scout_api.Priority.P1
|
|
37
|
+
case Priority.P2:
|
|
38
|
+
return scout_api.Priority.P2
|
|
39
|
+
case Priority.P3:
|
|
40
|
+
return scout_api.Priority.P3
|
|
41
|
+
case Priority.P4:
|
|
42
|
+
return scout_api.Priority.P4
|
|
43
|
+
case _:
|
|
44
|
+
raise ValueError(f"unknown priority '{self}', expected one of {list(Priority)}")
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _conjure_priority_to_priority(priority: scout_api.Priority) -> Priority:
|
|
48
|
+
return Priority._from_conjure(priority)
|
nominal/core/_clientsbunch.py
CHANGED
|
@@ -27,7 +27,6 @@ from nominal_api import (
|
|
|
27
27
|
storage_datasource_api,
|
|
28
28
|
storage_writer_api,
|
|
29
29
|
timeseries_channelmetadata,
|
|
30
|
-
timeseries_logicalseries,
|
|
31
30
|
timeseries_metadata,
|
|
32
31
|
upload_api,
|
|
33
32
|
)
|
|
@@ -124,7 +123,6 @@ class ClientsBunch:
|
|
|
124
123
|
dataexport: scout_dataexport_api.DataExportService
|
|
125
124
|
datasource: scout_datasource.DataSourceService
|
|
126
125
|
ingest: ingest_api.IngestService
|
|
127
|
-
logical_series: timeseries_logicalseries.LogicalSeriesService
|
|
128
126
|
run: scout.RunService
|
|
129
127
|
units: scout.UnitsService
|
|
130
128
|
upload: upload_api.UploadService
|
|
@@ -165,7 +163,6 @@ class ClientsBunch:
|
|
|
165
163
|
dataexport=client_factory(scout_dataexport_api.DataExportService),
|
|
166
164
|
datasource=client_factory(scout_datasource.DataSourceService),
|
|
167
165
|
ingest=client_factory(ingest_api.IngestService),
|
|
168
|
-
logical_series=client_factory(timeseries_logicalseries.LogicalSeriesService),
|
|
169
166
|
run=client_factory(scout.RunService),
|
|
170
167
|
units=client_factory(scout.UnitsService),
|
|
171
168
|
upload=client_factory(upload_api.UploadService),
|
nominal/core/_utils/api_tools.py
CHANGED
|
@@ -5,13 +5,15 @@ import importlib.metadata
|
|
|
5
5
|
import logging
|
|
6
6
|
import platform
|
|
7
7
|
import sys
|
|
8
|
-
from typing import Any, Generic, Mapping, Protocol, Sequence, TypeAlias, TypedDict, TypeVar, runtime_checkable
|
|
8
|
+
from typing import Any, Generic, Literal, Mapping, Protocol, Sequence, TypeAlias, TypedDict, TypeVar, runtime_checkable
|
|
9
9
|
|
|
10
|
-
from nominal_api import scout_compute_api, scout_run_api
|
|
10
|
+
from nominal_api import scout_asset_api, scout_compute_api, scout_run_api
|
|
11
11
|
from typing_extensions import NotRequired, Self
|
|
12
12
|
|
|
13
13
|
from nominal._utils.dataclass_tools import update_dataclass
|
|
14
14
|
|
|
15
|
+
ScopeTypeSpecifier: TypeAlias = Literal["connection", "dataset", "video"]
|
|
16
|
+
|
|
15
17
|
logger = logging.getLogger(__name__)
|
|
16
18
|
|
|
17
19
|
T = TypeVar("T")
|
|
@@ -94,3 +96,15 @@ def create_api_tags(tags: Mapping[str, str] | None = None) -> dict[str, scout_co
|
|
|
94
96
|
return {}
|
|
95
97
|
|
|
96
98
|
return {key: scout_compute_api.StringConstant(literal=value) for key, value in tags.items()}
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def filter_scopes(
|
|
102
|
+
scopes: Sequence[scout_asset_api.DataScope], scope_type: ScopeTypeSpecifier
|
|
103
|
+
) -> Sequence[scout_asset_api.DataScope]:
|
|
104
|
+
return [scope for scope in scopes if scope.data_source.type.lower() == scope_type]
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def filter_scope_rids(scopes: Sequence[scout_asset_api.DataScope], scope_type: ScopeTypeSpecifier) -> Mapping[str, str]:
|
|
108
|
+
return {
|
|
109
|
+
scope.data_scope_name: getattr(scope.data_source, scope_type) for scope in filter_scopes(scopes, scope_type)
|
|
110
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
from nominal.ts import IntegralNanosecondsUTC
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@dataclass(init=True, repr=False, eq=False, order=False, unsafe_hash=False)
|
|
7
|
+
class McapVideoDetails:
|
|
8
|
+
mcap_channel_locator_topic: str
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass(init=True, repr=False, eq=False, order=False, unsafe_hash=False)
|
|
12
|
+
class TimestampOptions:
|
|
13
|
+
starting_timestamp: IntegralNanosecondsUTC
|
|
14
|
+
ending_timestamp: IntegralNanosecondsUTC
|
|
15
|
+
scaling_factor: float
|
|
16
|
+
true_framerate: float
|
nominal/core/asset.py
CHANGED
|
@@ -4,7 +4,7 @@ import datetime
|
|
|
4
4
|
import logging
|
|
5
5
|
from dataclasses import dataclass, field
|
|
6
6
|
from types import MappingProxyType
|
|
7
|
-
from typing import Iterable,
|
|
7
|
+
from typing import Iterable, Mapping, Protocol, Sequence, TypeAlias
|
|
8
8
|
|
|
9
9
|
from nominal_api import (
|
|
10
10
|
event,
|
|
@@ -15,6 +15,7 @@ from nominal_api import (
|
|
|
15
15
|
)
|
|
16
16
|
from typing_extensions import Self
|
|
17
17
|
|
|
18
|
+
from nominal.core import data_review, streaming_checklist
|
|
18
19
|
from nominal.core._clientsbunch import HasScoutParams
|
|
19
20
|
from nominal.core._event_types import EventType, SearchEventOriginType
|
|
20
21
|
from nominal.core._utils.api_tools import (
|
|
@@ -22,7 +23,10 @@ from nominal.core._utils.api_tools import (
|
|
|
22
23
|
Link,
|
|
23
24
|
LinkDict,
|
|
24
25
|
RefreshableMixin,
|
|
26
|
+
ScopeTypeSpecifier,
|
|
25
27
|
create_links,
|
|
28
|
+
filter_scope_rids,
|
|
29
|
+
filter_scopes,
|
|
26
30
|
rid_from_instance_or_string,
|
|
27
31
|
)
|
|
28
32
|
from nominal.core._utils.pagination_tools import search_runs_by_asset_paginated
|
|
@@ -35,25 +39,10 @@ from nominal.core.video import Video, _create_video, _get_video
|
|
|
35
39
|
from nominal.ts import IntegralNanosecondsDuration, IntegralNanosecondsUTC, _SecondsNanos
|
|
36
40
|
|
|
37
41
|
ScopeType: TypeAlias = Connection | Dataset | Video
|
|
38
|
-
ScopeTypeSpecifier: TypeAlias = Literal["connection", "dataset", "video"]
|
|
39
42
|
|
|
40
43
|
logger = logging.getLogger(__name__)
|
|
41
44
|
|
|
42
45
|
|
|
43
|
-
def _filter_scopes(
|
|
44
|
-
scopes: Sequence[scout_asset_api.DataScope], scope_type: ScopeTypeSpecifier
|
|
45
|
-
) -> Sequence[scout_asset_api.DataScope]:
|
|
46
|
-
return [scope for scope in scopes if scope.data_source.type.lower() == scope_type]
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def _filter_scope_rids(
|
|
50
|
-
scopes: Sequence[scout_asset_api.DataScope], scope_type: ScopeTypeSpecifier
|
|
51
|
-
) -> Mapping[str, str]:
|
|
52
|
-
return {
|
|
53
|
-
scope.data_scope_name: getattr(scope.data_source, scope_type) for scope in _filter_scopes(scopes, scope_type)
|
|
54
|
-
}
|
|
55
|
-
|
|
56
|
-
|
|
57
46
|
@dataclass(frozen=True)
|
|
58
47
|
class Asset(_DatasetWrapper, HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
59
48
|
rid: str
|
|
@@ -70,6 +59,7 @@ class Asset(_DatasetWrapper, HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
70
59
|
Video._Clients,
|
|
71
60
|
Attachment._Clients,
|
|
72
61
|
Event._Clients,
|
|
62
|
+
data_review.DataReview._Clients,
|
|
73
63
|
HasScoutParams,
|
|
74
64
|
Protocol,
|
|
75
65
|
):
|
|
@@ -94,11 +84,11 @@ class Asset(_DatasetWrapper, HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
94
84
|
return response[self.rid]
|
|
95
85
|
|
|
96
86
|
def _list_dataset_scopes(self) -> Sequence[scout_asset_api.DataScope]:
|
|
97
|
-
return
|
|
87
|
+
return filter_scopes(self._get_latest_api().data_scopes, "dataset")
|
|
98
88
|
|
|
99
89
|
def _scope_rids(self, scope_type: ScopeTypeSpecifier) -> Mapping[str, str]:
|
|
100
90
|
asset = self._get_latest_api()
|
|
101
|
-
return
|
|
91
|
+
return filter_scope_rids(asset.data_scopes, scope_type)
|
|
102
92
|
|
|
103
93
|
def update(
|
|
104
94
|
self,
|
|
@@ -540,6 +530,32 @@ class Asset(_DatasetWrapper, HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
540
530
|
origin_types=origin_types,
|
|
541
531
|
)
|
|
542
532
|
|
|
533
|
+
def search_data_reviews(
|
|
534
|
+
self,
|
|
535
|
+
runs: Sequence[Run | str] | None = None,
|
|
536
|
+
) -> Sequence[data_review.DataReview]:
|
|
537
|
+
"""Search for data reviews associated with this Asset. See nominal.core.client.search_data_reviews
|
|
538
|
+
for details.
|
|
539
|
+
"""
|
|
540
|
+
return list(
|
|
541
|
+
data_review._iter_search_data_reviews(
|
|
542
|
+
self._clients,
|
|
543
|
+
assets=[self.rid],
|
|
544
|
+
runs=[rid_from_instance_or_string(run) for run in (runs or [])],
|
|
545
|
+
)
|
|
546
|
+
)
|
|
547
|
+
|
|
548
|
+
def list_streaming_checklists(self) -> Sequence[str]:
|
|
549
|
+
"""List all Streaming Checklists associated with this Asset. See
|
|
550
|
+
nominal.core.client.list_streaming_checklists for details.
|
|
551
|
+
"""
|
|
552
|
+
return list(
|
|
553
|
+
streaming_checklist._iter_list_streaming_checklists(
|
|
554
|
+
self._clients,
|
|
555
|
+
asset_rid=self.rid,
|
|
556
|
+
)
|
|
557
|
+
)
|
|
558
|
+
|
|
543
559
|
def remove_attachments(self, attachments: Iterable[Attachment] | Iterable[str]) -> None:
|
|
544
560
|
"""Remove attachments from this asset.
|
|
545
561
|
Does not remove the attachments from Nominal.
|
nominal/core/bounds.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
|
|
5
|
-
from nominal_api import scout_catalog
|
|
5
|
+
from nominal_api import datasource, scout_catalog
|
|
6
6
|
from typing_extensions import Self
|
|
7
7
|
|
|
8
8
|
from nominal.ts import (
|
|
@@ -22,3 +22,10 @@ class Bounds:
|
|
|
22
22
|
start=_SecondsNanos.from_api(bounds.start).to_nanoseconds(),
|
|
23
23
|
end=_SecondsNanos.from_api(bounds.end).to_nanoseconds(),
|
|
24
24
|
)
|
|
25
|
+
|
|
26
|
+
def _to_conjure(self) -> scout_catalog.Bounds:
|
|
27
|
+
return scout_catalog.Bounds(
|
|
28
|
+
type=datasource.TimestampType.ABSOLUTE,
|
|
29
|
+
start=_SecondsNanos.from_nanoseconds(self.start).to_api(),
|
|
30
|
+
end=_SecondsNanos.from_nanoseconds(self.end).to_api(),
|
|
31
|
+
)
|
nominal/core/channel.py
CHANGED
|
@@ -15,7 +15,6 @@ from nominal_api import (
|
|
|
15
15
|
storage_series_api,
|
|
16
16
|
timeseries_channelmetadata,
|
|
17
17
|
timeseries_channelmetadata_api,
|
|
18
|
-
timeseries_logicalseries_api,
|
|
19
18
|
)
|
|
20
19
|
from typing_extensions import Self
|
|
21
20
|
|
|
@@ -298,20 +297,6 @@ class Channel(RefreshableMixin[timeseries_channelmetadata_api.ChannelMetadata]):
|
|
|
298
297
|
_clients=clients,
|
|
299
298
|
)
|
|
300
299
|
|
|
301
|
-
@classmethod
|
|
302
|
-
def _from_conjure_logicalseries_api(
|
|
303
|
-
cls, clients: _Clients, series: timeseries_logicalseries_api.LogicalSeries
|
|
304
|
-
) -> Self:
|
|
305
|
-
channel_data_type = ChannelDataType._from_conjure(series.series_data_type) if series.series_data_type else None
|
|
306
|
-
return cls(
|
|
307
|
-
name=series.channel,
|
|
308
|
-
data_source=series.data_source_rid,
|
|
309
|
-
unit=series.unit,
|
|
310
|
-
description=series.description,
|
|
311
|
-
data_type=channel_data_type,
|
|
312
|
-
_clients=clients,
|
|
313
|
-
)
|
|
314
|
-
|
|
315
300
|
@classmethod
|
|
316
301
|
def _from_channel_metadata_api(
|
|
317
302
|
cls, clients: _Clients, channel: timeseries_channelmetadata_api.ChannelMetadata
|
nominal/core/checklist.py
CHANGED
|
@@ -2,10 +2,11 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass, field
|
|
4
4
|
from datetime import timedelta
|
|
5
|
-
from typing import
|
|
5
|
+
from typing import Mapping, Protocol, Sequence
|
|
6
6
|
|
|
7
7
|
from nominal_api import (
|
|
8
|
-
|
|
8
|
+
event,
|
|
9
|
+
scout,
|
|
9
10
|
scout_checklistexecution_api,
|
|
10
11
|
scout_checks_api,
|
|
11
12
|
scout_datareview_api,
|
|
@@ -13,11 +14,11 @@ from nominal_api import (
|
|
|
13
14
|
)
|
|
14
15
|
from typing_extensions import Self
|
|
15
16
|
|
|
17
|
+
from nominal.core import run as core_run
|
|
16
18
|
from nominal.core._clientsbunch import HasScoutParams
|
|
17
19
|
from nominal.core._utils.api_tools import HasRid, rid_from_instance_or_string
|
|
18
20
|
from nominal.core.asset import Asset
|
|
19
21
|
from nominal.core.data_review import DataReview
|
|
20
|
-
from nominal.core.run import Run
|
|
21
22
|
from nominal.ts import _to_api_duration
|
|
22
23
|
|
|
23
24
|
|
|
@@ -30,13 +31,17 @@ class Checklist(HasRid):
|
|
|
30
31
|
labels: Sequence[str]
|
|
31
32
|
_clients: _Clients = field(repr=False)
|
|
32
33
|
|
|
33
|
-
class _Clients(
|
|
34
|
+
class _Clients(HasScoutParams, Protocol):
|
|
34
35
|
@property
|
|
35
36
|
def checklist(self) -> scout_checks_api.ChecklistService: ...
|
|
36
37
|
@property
|
|
37
38
|
def checklist_execution(self) -> scout_checklistexecution_api.ChecklistExecutionService: ...
|
|
38
39
|
@property
|
|
39
40
|
def datareview(self) -> scout_datareview_api.DataReviewService: ...
|
|
41
|
+
@property
|
|
42
|
+
def event(self) -> event.EventService: ...
|
|
43
|
+
@property
|
|
44
|
+
def run(self) -> scout.RunService: ...
|
|
40
45
|
|
|
41
46
|
@classmethod
|
|
42
47
|
def _from_conjure(cls, clients: _Clients, checklist: scout_checks_api.VersionedChecklist) -> Self:
|
|
@@ -53,7 +58,7 @@ class Checklist(HasRid):
|
|
|
53
58
|
_clients=clients,
|
|
54
59
|
)
|
|
55
60
|
|
|
56
|
-
def execute(self, run: Run | str, commit: str | None = None) -> DataReview:
|
|
61
|
+
def execute(self, run: core_run.Run | str, commit: str | None = None) -> DataReview:
|
|
57
62
|
"""Execute a checklist against a run.
|
|
58
63
|
|
|
59
64
|
Args:
|
|
@@ -152,26 +157,7 @@ class Checklist(HasRid):
|
|
|
152
157
|
"""Returns a link to the page for this checklist in the Nominal app"""
|
|
153
158
|
return f"{self._clients.app_base_url}/checklists/{self.rid}"
|
|
154
159
|
|
|
155
|
-
def preview_for_run_url(self, run: Run | str) -> str:
|
|
160
|
+
def preview_for_run_url(self, run: core_run.Run | str) -> str:
|
|
156
161
|
"""Returns a link to the page for previewing this checklist on a given run in the Nominal app"""
|
|
157
162
|
run_rid = rid_from_instance_or_string(run)
|
|
158
163
|
return f"{self.nominal_url}?previewRunRid={run_rid}"
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
Priority = Literal[0, 1, 2, 3, 4]
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
_priority_to_conjure_map: dict[Priority, scout_api.Priority] = {
|
|
165
|
-
0: scout_api.Priority.P0,
|
|
166
|
-
1: scout_api.Priority.P1,
|
|
167
|
-
2: scout_api.Priority.P2,
|
|
168
|
-
3: scout_api.Priority.P3,
|
|
169
|
-
4: scout_api.Priority.P4,
|
|
170
|
-
}
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
def _conjure_priority_to_priority(priority: scout_api.Priority) -> Priority:
|
|
174
|
-
inverted_map = {v: k for k, v in _priority_to_conjure_map.items()}
|
|
175
|
-
if priority in inverted_map:
|
|
176
|
-
return inverted_map[priority]
|
|
177
|
-
raise ValueError(f"unknown priority '{priority}', expected one of {_priority_to_conjure_map.values()}")
|
nominal/core/client.py
CHANGED
|
@@ -48,11 +48,8 @@ from nominal.core._utils.multipart import (
|
|
|
48
48
|
upload_multipart_io,
|
|
49
49
|
)
|
|
50
50
|
from nominal.core._utils.pagination_tools import (
|
|
51
|
-
list_streaming_checklists_for_asset_paginated,
|
|
52
|
-
list_streaming_checklists_paginated,
|
|
53
51
|
search_assets_paginated,
|
|
54
52
|
search_checklists_paginated,
|
|
55
|
-
search_data_reviews_paginated,
|
|
56
53
|
search_datasets_paginated,
|
|
57
54
|
search_runs_by_asset_paginated,
|
|
58
55
|
search_runs_paginated,
|
|
@@ -84,7 +81,7 @@ from nominal.core.containerized_extractors import (
|
|
|
84
81
|
FileExtractionInput,
|
|
85
82
|
FileOutputFormat,
|
|
86
83
|
)
|
|
87
|
-
from nominal.core.data_review import DataReview, DataReviewBuilder
|
|
84
|
+
from nominal.core.data_review import DataReview, DataReviewBuilder, _iter_search_data_reviews
|
|
88
85
|
from nominal.core.dataset import (
|
|
89
86
|
Dataset,
|
|
90
87
|
_create_dataset,
|
|
@@ -97,6 +94,7 @@ from nominal.core.exceptions import NominalConfigError, NominalError, NominalMet
|
|
|
97
94
|
from nominal.core.filetype import FileType, FileTypes
|
|
98
95
|
from nominal.core.run import Run, _create_run
|
|
99
96
|
from nominal.core.secret import Secret
|
|
97
|
+
from nominal.core.streaming_checklist import _iter_list_streaming_checklists
|
|
100
98
|
from nominal.core.unit import Unit, _available_units
|
|
101
99
|
from nominal.core.user import User
|
|
102
100
|
from nominal.core.video import Video, _create_video
|
|
@@ -1162,21 +1160,17 @@ class NominalClient:
|
|
|
1162
1160
|
)
|
|
1163
1161
|
return list(self._iter_search_assets(query))
|
|
1164
1162
|
|
|
1165
|
-
def
|
|
1166
|
-
if asset is None:
|
|
1167
|
-
return list_streaming_checklists_paginated(self._clients.checklist_execution, self._clients.auth_header)
|
|
1168
|
-
return list_streaming_checklists_for_asset_paginated(
|
|
1169
|
-
self._clients.checklist_execution, self._clients.auth_header, asset
|
|
1170
|
-
)
|
|
1171
|
-
|
|
1172
|
-
def list_streaming_checklists(self, asset: Asset | str | None = None) -> Iterable[str]:
|
|
1163
|
+
def list_streaming_checklists(self, asset: Asset | str | None = None) -> Sequence[str]:
|
|
1173
1164
|
"""List all Streaming Checklists.
|
|
1174
1165
|
|
|
1175
1166
|
Args:
|
|
1176
1167
|
asset: if provided, only return checklists associated with the given asset.
|
|
1168
|
+
|
|
1169
|
+
Returns:
|
|
1170
|
+
All streaming checklist RIDs that match the provided conditions
|
|
1177
1171
|
"""
|
|
1178
|
-
|
|
1179
|
-
return list(self.
|
|
1172
|
+
asset_rid = None if asset is None else rid_from_instance_or_string(asset)
|
|
1173
|
+
return list(_iter_list_streaming_checklists(self._clients, asset_rid))
|
|
1180
1174
|
|
|
1181
1175
|
def data_review_builder(self) -> DataReviewBuilder:
|
|
1182
1176
|
return DataReviewBuilder([], [], [], _clients=self._clients)
|
|
@@ -1220,27 +1214,29 @@ class NominalClient:
|
|
|
1220
1214
|
responses = self._clients.event.batch_get_events(self._clients.auth_header, list(rids))
|
|
1221
1215
|
return [Event._from_conjure(self._clients, response) for response in responses]
|
|
1222
1216
|
|
|
1223
|
-
def _iter_search_data_reviews(
|
|
1224
|
-
self,
|
|
1225
|
-
assets: Sequence[Asset | str] | None = None,
|
|
1226
|
-
runs: Sequence[Run | str] | None = None,
|
|
1227
|
-
) -> Iterable[DataReview]:
|
|
1228
|
-
for review in search_data_reviews_paginated(
|
|
1229
|
-
self._clients.datareview,
|
|
1230
|
-
self._clients.auth_header,
|
|
1231
|
-
assets=[rid_from_instance_or_string(asset) for asset in assets] if assets else None,
|
|
1232
|
-
runs=[rid_from_instance_or_string(run) for run in runs] if runs else None,
|
|
1233
|
-
):
|
|
1234
|
-
yield DataReview._from_conjure(self._clients, review)
|
|
1235
|
-
|
|
1236
1217
|
def search_data_reviews(
|
|
1237
1218
|
self,
|
|
1238
1219
|
assets: Sequence[Asset | str] | None = None,
|
|
1239
1220
|
runs: Sequence[Run | str] | None = None,
|
|
1240
1221
|
) -> Sequence[DataReview]:
|
|
1241
|
-
"""Search for
|
|
1222
|
+
"""Search for data reviews meeting the specified filters.
|
|
1223
|
+
Filters are ANDed together, e.g. `(data_review.asset == asset) AND (data_review.run == run)`
|
|
1224
|
+
|
|
1225
|
+
Args:
|
|
1226
|
+
assets: List of assets that must be associated with a data review to be included.
|
|
1227
|
+
runs: List of runs that must be associated with a data review to be included.
|
|
1228
|
+
|
|
1229
|
+
Returns:
|
|
1230
|
+
All data reviews which match all of the provided conditions
|
|
1231
|
+
"""
|
|
1242
1232
|
# TODO (drake-nominal): Expose checklist_refs to users
|
|
1243
|
-
return list(
|
|
1233
|
+
return list(
|
|
1234
|
+
_iter_search_data_reviews(
|
|
1235
|
+
clients=self._clients,
|
|
1236
|
+
assets=[rid_from_instance_or_string(asset) for asset in assets] if assets else None,
|
|
1237
|
+
runs=[rid_from_instance_or_string(run) for run in runs] if runs else None,
|
|
1238
|
+
)
|
|
1239
|
+
)
|
|
1244
1240
|
|
|
1245
1241
|
def search_events(
|
|
1246
1242
|
self,
|
nominal/core/data_review.py
CHANGED
|
@@ -3,7 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
from dataclasses import dataclass, field
|
|
4
4
|
from datetime import timedelta
|
|
5
5
|
from time import sleep
|
|
6
|
-
from typing import Protocol, Sequence
|
|
6
|
+
from typing import TYPE_CHECKING, Iterable, Protocol, Sequence
|
|
7
7
|
|
|
8
8
|
from nominal_api import (
|
|
9
9
|
event as event_api,
|
|
@@ -18,14 +18,19 @@ from nominal_api import (
|
|
|
18
18
|
)
|
|
19
19
|
from typing_extensions import Self, deprecated
|
|
20
20
|
|
|
21
|
-
from nominal.core import
|
|
21
|
+
from nominal.core._checklist_types import Priority, _conjure_priority_to_priority
|
|
22
22
|
from nominal.core._clientsbunch import HasScoutParams
|
|
23
23
|
from nominal.core._utils.api_tools import HasRid, rid_from_instance_or_string
|
|
24
|
-
from nominal.core.
|
|
24
|
+
from nominal.core._utils.pagination_tools import search_data_reviews_paginated
|
|
25
|
+
from nominal.core.event import Event
|
|
25
26
|
from nominal.core.exceptions import NominalMethodRemovedError
|
|
26
|
-
from nominal.core.run import Run
|
|
27
27
|
from nominal.ts import IntegralNanosecondsUTC, _SecondsNanos
|
|
28
28
|
|
|
29
|
+
if TYPE_CHECKING:
|
|
30
|
+
from nominal.core.asset import Asset
|
|
31
|
+
from nominal.core.checklist import Checklist
|
|
32
|
+
from nominal.core.run import Run
|
|
33
|
+
|
|
29
34
|
|
|
30
35
|
@dataclass(frozen=True)
|
|
31
36
|
class DataReview(HasRid):
|
|
@@ -66,8 +71,10 @@ class DataReview(HasRid):
|
|
|
66
71
|
_clients=clients,
|
|
67
72
|
)
|
|
68
73
|
|
|
69
|
-
def get_checklist(self) ->
|
|
70
|
-
|
|
74
|
+
def get_checklist(self) -> "Checklist":
|
|
75
|
+
from nominal.core.checklist import Checklist
|
|
76
|
+
|
|
77
|
+
return Checklist._from_conjure(
|
|
71
78
|
self._clients,
|
|
72
79
|
self._clients.checklist.get(self._clients.auth_header, self.checklist_rid, commit=self.checklist_commit),
|
|
73
80
|
)
|
|
@@ -83,7 +90,7 @@ class DataReview(HasRid):
|
|
|
83
90
|
"use 'nominal.core.DataReview.get_events()' instead",
|
|
84
91
|
)
|
|
85
92
|
|
|
86
|
-
def get_events(self) -> Sequence[
|
|
93
|
+
def get_events(self) -> Sequence[Event]:
|
|
87
94
|
"""Retrieves the list of events for the data review."""
|
|
88
95
|
data_review_response = self._clients.datareview.get(self._clients.auth_header, self.rid).check_evaluations
|
|
89
96
|
all_event_rids = [
|
|
@@ -93,7 +100,7 @@ class DataReview(HasRid):
|
|
|
93
100
|
for event_rid in check.state._generated_alerts.event_rids
|
|
94
101
|
]
|
|
95
102
|
event_response = self._clients.event.batch_get_events(self._clients.auth_header, all_event_rids)
|
|
96
|
-
return [
|
|
103
|
+
return [Event._from_conjure(self._clients, data_review_event) for data_review_event in event_response]
|
|
97
104
|
|
|
98
105
|
def reload(self) -> DataReview:
|
|
99
106
|
"""Reloads the data review from the server."""
|
|
@@ -137,7 +144,7 @@ class CheckViolation:
|
|
|
137
144
|
name: str
|
|
138
145
|
start: IntegralNanosecondsUTC
|
|
139
146
|
end: IntegralNanosecondsUTC | None
|
|
140
|
-
priority:
|
|
147
|
+
priority: Priority | None
|
|
141
148
|
|
|
142
149
|
@classmethod
|
|
143
150
|
def _from_conjure(cls, check_alert: scout_datareview_api.CheckAlert) -> CheckViolation:
|
|
@@ -147,7 +154,7 @@ class CheckViolation:
|
|
|
147
154
|
name=check_alert.name,
|
|
148
155
|
start=_SecondsNanos.from_api(check_alert.start).to_nanoseconds(),
|
|
149
156
|
end=_SecondsNanos.from_api(check_alert.end).to_nanoseconds() if check_alert.end is not None else None,
|
|
150
|
-
priority=
|
|
157
|
+
priority=_conjure_priority_to_priority(check_alert.priority)
|
|
151
158
|
if check_alert.priority is not scout_api.Priority.UNKNOWN
|
|
152
159
|
else None,
|
|
153
160
|
)
|
|
@@ -192,7 +199,7 @@ class DataReviewBuilder:
|
|
|
192
199
|
def execute_checklist(
|
|
193
200
|
self,
|
|
194
201
|
run: str | Run,
|
|
195
|
-
checklist: str |
|
|
202
|
+
checklist: str | Checklist,
|
|
196
203
|
*,
|
|
197
204
|
commit: str | None = None,
|
|
198
205
|
asset: str | Asset | None = None,
|
|
@@ -273,3 +280,17 @@ def poll_until_completed(
|
|
|
273
280
|
data_reviews: Sequence[DataReview], interval: timedelta = timedelta(seconds=2)
|
|
274
281
|
) -> Sequence[DataReview]:
|
|
275
282
|
return [review.poll_for_completion(interval) for review in data_reviews]
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def _iter_search_data_reviews(
|
|
286
|
+
clients: DataReview._Clients,
|
|
287
|
+
assets: Sequence[str] | None = None,
|
|
288
|
+
runs: Sequence[str] | None = None,
|
|
289
|
+
) -> Iterable[DataReview]:
|
|
290
|
+
for review in search_data_reviews_paginated(
|
|
291
|
+
clients.datareview,
|
|
292
|
+
clients.auth_header,
|
|
293
|
+
assets=assets,
|
|
294
|
+
runs=runs,
|
|
295
|
+
):
|
|
296
|
+
yield DataReview._from_conjure(clients, review)
|