nominal 1.100.0__py3-none-any.whl → 1.102.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +32 -0
- nominal/core/__init__.py +3 -1
- nominal/core/_event_types.py +100 -0
- nominal/core/_utils/query_tools.py +50 -47
- nominal/core/asset.py +174 -45
- nominal/core/client.py +127 -95
- nominal/core/dataset.py +284 -1
- nominal/core/datasource.py +6 -1
- nominal/core/event.py +73 -34
- nominal/core/filetype.py +2 -1
- nominal/core/run.py +85 -4
- nominal/experimental/migration/migration_utils.py +107 -8
- nominal/experimental/rust_streaming/rust_write_stream.py +1 -3
- {nominal-1.100.0.dist-info → nominal-1.102.0.dist-info}/METADATA +2 -2
- {nominal-1.100.0.dist-info → nominal-1.102.0.dist-info}/RECORD +18 -17
- {nominal-1.100.0.dist-info → nominal-1.102.0.dist-info}/WHEEL +0 -0
- {nominal-1.100.0.dist-info → nominal-1.102.0.dist-info}/entry_points.txt +0 -0
- {nominal-1.100.0.dist-info → nominal-1.102.0.dist-info}/licenses/LICENSE +0 -0
CHANGELOG.md
CHANGED
|
@@ -1,5 +1,37 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.102.0](https://github.com/nominal-io/nominal-client/compare/v1.101.0...v1.102.0) (2026-01-05)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* add run creation on an asset ([#558](https://github.com/nominal-io/nominal-client/issues/558)) ([9c0718a](https://github.com/nominal-io/nominal-client/commit/9c0718ae2706d4192087fb787e99f76d702a18c9))
|
|
9
|
+
* add util to search events by origin type and add clone/copy events ([#555](https://github.com/nominal-io/nominal-client/issues/555)) ([bc427d6](https://github.com/nominal-io/nominal-client/commit/bc427d6b97a777e6802dc8139c8d389582558a77))
|
|
10
|
+
* allow unarchiving a run ([#560](https://github.com/nominal-io/nominal-client/issues/560)) ([0221a56](https://github.com/nominal-io/nominal-client/commit/0221a56c73d133581567cc5183fda237607d79b5))
|
|
11
|
+
* clean up and deprecate old video creation methods in NominalClient ([#561](https://github.com/nominal-io/nominal-client/issues/561)) ([e61919c](https://github.com/nominal-io/nominal-client/commit/e61919ca338a5e0de818471431d750246d87977c))
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
### Bug Fixes
|
|
15
|
+
|
|
16
|
+
* update how workspaces are selected in migration_utils ([#559](https://github.com/nominal-io/nominal-client/issues/559)) ([4379121](https://github.com/nominal-io/nominal-client/commit/4379121f44411eed19248ee16540672bbed743a0))
|
|
17
|
+
|
|
18
|
+
## [1.101.0](https://github.com/nominal-io/nominal-client/compare/v1.100.0...v1.101.0) (2025-12-23)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
### Features
|
|
22
|
+
|
|
23
|
+
* add search_events to asset ([#553](https://github.com/nominal-io/nominal-client/issues/553)) ([3d291b7](https://github.com/nominal-io/nominal-client/commit/3d291b7e4b851676de882ac88d7695e49e9da0d3))
|
|
24
|
+
* added avi video file type ([#552](https://github.com/nominal-io/nominal-client/issues/552)) ([84bd35f](https://github.com/nominal-io/nominal-client/commit/84bd35ff83aa87cf8f7a718b5d71c1ca1445e9e9))
|
|
25
|
+
* allow adding data directly to runs, assets ([#543](https://github.com/nominal-io/nominal-client/issues/543)) ([6630717](https://github.com/nominal-io/nominal-client/commit/6630717827a35d50ee6008ede14b9c8e355f239c))
|
|
26
|
+
* allow creating events on runs, assets, _create_event helper method ([#540](https://github.com/nominal-io/nominal-client/issues/540)) ([dc84028](https://github.com/nominal-io/nominal-client/commit/dc84028d78df256f50ba58879416bb3b5f8752ed))
|
|
27
|
+
* allow creating events on runs, assets, use helper method ([dc84028](https://github.com/nominal-io/nominal-client/commit/dc84028d78df256f50ba58879416bb3b5f8752ed))
|
|
28
|
+
* reusable helper method for creating runs, create multi-asset runs in client ([#539](https://github.com/nominal-io/nominal-client/issues/539)) ([3118b43](https://github.com/nominal-io/nominal-client/commit/3118b43be4df552eb7418ffed08ed0afafbe88f4))
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
### Bug Fixes
|
|
32
|
+
|
|
33
|
+
* make rust streaming optional unless supported architecture ([#556](https://github.com/nominal-io/nominal-client/issues/556)) ([24a2b98](https://github.com/nominal-io/nominal-client/commit/24a2b98218d025affb171411a72ad80b2dd2dd87))
|
|
34
|
+
|
|
3
35
|
## [1.100.0](https://github.com/nominal-io/nominal-client/compare/v1.99.0...v1.100.0) (2025-12-19)
|
|
4
36
|
|
|
5
37
|
|
nominal/core/__init__.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from nominal.core._event_types import EventType, SearchEventOriginType
|
|
1
2
|
from nominal.core._stream.write_stream import WriteStream
|
|
2
3
|
from nominal.core._utils.api_tools import LinkDict
|
|
3
4
|
from nominal.core.asset import Asset
|
|
@@ -19,7 +20,7 @@ from nominal.core.data_review import CheckViolation, DataReview, DataReviewBuild
|
|
|
19
20
|
from nominal.core.dataset import Dataset, poll_until_ingestion_completed
|
|
20
21
|
from nominal.core.dataset_file import DatasetFile, IngestWaitType, as_files_ingested, wait_for_files_to_ingest
|
|
21
22
|
from nominal.core.datasource import DataSource
|
|
22
|
-
from nominal.core.event import Event
|
|
23
|
+
from nominal.core.event import Event
|
|
23
24
|
from nominal.core.filetype import FileType, FileTypes
|
|
24
25
|
from nominal.core.log import LogPoint
|
|
25
26
|
from nominal.core.run import Run
|
|
@@ -60,6 +61,7 @@ __all__ = [
|
|
|
60
61
|
"NominalClient",
|
|
61
62
|
"poll_until_ingestion_completed",
|
|
62
63
|
"Run",
|
|
64
|
+
"SearchEventOriginType",
|
|
63
65
|
"Secret",
|
|
64
66
|
"TagDetails",
|
|
65
67
|
"TimestampMetadata",
|
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Iterable, NamedTuple
|
|
5
|
+
|
|
6
|
+
from nominal_api import event
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class EventType(Enum):
|
|
10
|
+
INFO = "INFO"
|
|
11
|
+
FLAG = "FLAG"
|
|
12
|
+
ERROR = "ERROR"
|
|
13
|
+
SUCCESS = "SUCCESS"
|
|
14
|
+
UNKNOWN = "UNKNOWN"
|
|
15
|
+
|
|
16
|
+
@classmethod
|
|
17
|
+
def from_api_event_type(cls, event: event.EventType) -> EventType:
|
|
18
|
+
if event.name == "INFO":
|
|
19
|
+
return cls.INFO
|
|
20
|
+
elif event.name == "FLAG":
|
|
21
|
+
return cls.FLAG
|
|
22
|
+
elif event.name == "ERROR":
|
|
23
|
+
return cls.ERROR
|
|
24
|
+
elif event.name == "SUCCESS":
|
|
25
|
+
return cls.SUCCESS
|
|
26
|
+
else:
|
|
27
|
+
return cls.UNKNOWN
|
|
28
|
+
|
|
29
|
+
def _to_api_event_type(self) -> event.EventType:
|
|
30
|
+
if self.name == "INFO":
|
|
31
|
+
return event.EventType.INFO
|
|
32
|
+
elif self.name == "FLAG":
|
|
33
|
+
return event.EventType.FLAG
|
|
34
|
+
elif self.name == "ERROR":
|
|
35
|
+
return event.EventType.ERROR
|
|
36
|
+
elif self.name == "SUCCESS":
|
|
37
|
+
return event.EventType.SUCCESS
|
|
38
|
+
else:
|
|
39
|
+
return event.EventType.UNKNOWN
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class EventCreationType(Enum):
|
|
43
|
+
MANUAL = "MANUAL"
|
|
44
|
+
BY_EXTERNAL_RESOURCE = "BY_EXTERNAL_RESOURCE"
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class SearchEventOriginType(NamedTuple):
|
|
48
|
+
name: str
|
|
49
|
+
creation_type: EventCreationType
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
def from_api_origin_type(cls, event: event.SearchEventOriginType) -> SearchEventOriginType:
|
|
53
|
+
if event.name == "WORKBOOK":
|
|
54
|
+
return SearchEventOriginTypes.WORKBOOK
|
|
55
|
+
elif event.name == "TEMPLATE":
|
|
56
|
+
return SearchEventOriginTypes.TEMPLATE
|
|
57
|
+
elif event.name == "API":
|
|
58
|
+
return SearchEventOriginTypes.API
|
|
59
|
+
elif event.name == "DATA_REVIEW":
|
|
60
|
+
return SearchEventOriginTypes.DATA_REVIEW
|
|
61
|
+
elif event.name == "PROCEDURE":
|
|
62
|
+
return SearchEventOriginTypes.PROCEDURE
|
|
63
|
+
elif event.name == "STREAMING_CHECKLIST":
|
|
64
|
+
return SearchEventOriginTypes.STREAMING_CHECKLIST
|
|
65
|
+
else:
|
|
66
|
+
raise ValueError(f"Unexpected Event Origin {event.name}")
|
|
67
|
+
|
|
68
|
+
def _to_api_search_event_origin_type(self) -> event.SearchEventOriginType:
|
|
69
|
+
if self.name == "WORKBOOK":
|
|
70
|
+
return event.SearchEventOriginType.WORKBOOK
|
|
71
|
+
elif self.name == "TEMPLATE":
|
|
72
|
+
return event.SearchEventOriginType.TEMPLATE
|
|
73
|
+
elif self.name == "API":
|
|
74
|
+
return event.SearchEventOriginType.API
|
|
75
|
+
elif self.name == "DATA_REVIEW":
|
|
76
|
+
return event.SearchEventOriginType.DATA_REVIEW
|
|
77
|
+
elif self.name == "PROCEDURE":
|
|
78
|
+
return event.SearchEventOriginType.PROCEDURE
|
|
79
|
+
elif self.name == "STREAMING_CHECKLIST":
|
|
80
|
+
return event.SearchEventOriginType.STREAMING_CHECKLIST
|
|
81
|
+
else:
|
|
82
|
+
raise ValueError(f"Unexpected Event Origin {self.name}")
|
|
83
|
+
|
|
84
|
+
@classmethod
|
|
85
|
+
def get_manual_origin_types(cls) -> Iterable[SearchEventOriginType]:
|
|
86
|
+
"""Return all origin types that are manually created."""
|
|
87
|
+
return [
|
|
88
|
+
origin_type
|
|
89
|
+
for origin_type in SearchEventOriginTypes.__dict__.values()
|
|
90
|
+
if isinstance(origin_type, SearchEventOriginType) and origin_type.creation_type == EventCreationType.MANUAL
|
|
91
|
+
]
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
class SearchEventOriginTypes:
|
|
95
|
+
WORKBOOK = SearchEventOriginType("WORKBOOK", EventCreationType.MANUAL)
|
|
96
|
+
TEMPLATE = SearchEventOriginType("TEMPLATE", EventCreationType.MANUAL)
|
|
97
|
+
API = SearchEventOriginType("API", EventCreationType.MANUAL)
|
|
98
|
+
DATA_REVIEW = SearchEventOriginType("DATA_REVIEW", EventCreationType.BY_EXTERNAL_RESOURCE)
|
|
99
|
+
PROCEDURE = SearchEventOriginType("PROCEDURE", EventCreationType.BY_EXTERNAL_RESOURCE)
|
|
100
|
+
STREAMING_CHECKLIST = SearchEventOriginType("STREAMING_CHECKLIST", EventCreationType.BY_EXTERNAL_RESOURCE)
|
|
@@ -19,7 +19,6 @@ from nominal_api import (
|
|
|
19
19
|
secrets_api,
|
|
20
20
|
)
|
|
21
21
|
|
|
22
|
-
from nominal.core.event import EventType
|
|
23
22
|
from nominal.ts import IntegralNanosecondsUTC, _SecondsNanos
|
|
24
23
|
|
|
25
24
|
|
|
@@ -198,52 +197,6 @@ def create_search_datasets_query(
|
|
|
198
197
|
return scout_catalog.SearchDatasetsQuery(and_=queries)
|
|
199
198
|
|
|
200
199
|
|
|
201
|
-
def create_search_events_query( # noqa: PLR0912
|
|
202
|
-
search_text: str | None = None,
|
|
203
|
-
after: str | datetime | IntegralNanosecondsUTC | None = None,
|
|
204
|
-
before: str | datetime | IntegralNanosecondsUTC | None = None,
|
|
205
|
-
assets: Iterable[str] | None = None,
|
|
206
|
-
labels: Iterable[str] | None = None,
|
|
207
|
-
properties: Mapping[str, str] | None = None,
|
|
208
|
-
created_by: str | None = None,
|
|
209
|
-
workbook: str | None = None,
|
|
210
|
-
data_review: str | None = None,
|
|
211
|
-
assignee: str | None = None,
|
|
212
|
-
event_type: EventType | None = None,
|
|
213
|
-
workspace_rid: str | None = None,
|
|
214
|
-
) -> event.SearchQuery:
|
|
215
|
-
queries = []
|
|
216
|
-
if search_text is not None:
|
|
217
|
-
queries.append(event.SearchQuery(search_text=search_text))
|
|
218
|
-
if after is not None:
|
|
219
|
-
queries.append(event.SearchQuery(after=_SecondsNanos.from_flexible(after).to_api()))
|
|
220
|
-
if before is not None:
|
|
221
|
-
queries.append(event.SearchQuery(before=_SecondsNanos.from_flexible(before).to_api()))
|
|
222
|
-
if assets:
|
|
223
|
-
for asset in assets:
|
|
224
|
-
queries.append(event.SearchQuery(asset=asset))
|
|
225
|
-
if labels:
|
|
226
|
-
for label in labels:
|
|
227
|
-
queries.append(event.SearchQuery(label=label))
|
|
228
|
-
if properties:
|
|
229
|
-
for name, value in properties.items():
|
|
230
|
-
queries.append(event.SearchQuery(property=api.Property(name=name, value=value)))
|
|
231
|
-
if created_by:
|
|
232
|
-
queries.append(event.SearchQuery(created_by=created_by))
|
|
233
|
-
if workbook is not None:
|
|
234
|
-
queries.append(event.SearchQuery(workbook=workbook))
|
|
235
|
-
if data_review is not None:
|
|
236
|
-
queries.append(event.SearchQuery(data_review=data_review))
|
|
237
|
-
if assignee is not None:
|
|
238
|
-
queries.append(event.SearchQuery(assignee=assignee))
|
|
239
|
-
if event_type is not None:
|
|
240
|
-
queries.append(event.SearchQuery(event_type=event_type._to_api_event_type()))
|
|
241
|
-
if workspace_rid is not None:
|
|
242
|
-
queries.append(event.SearchQuery(workspace=workspace_rid))
|
|
243
|
-
|
|
244
|
-
return event.SearchQuery(and_=queries)
|
|
245
|
-
|
|
246
|
-
|
|
247
200
|
def create_search_runs_query(
|
|
248
201
|
start: str | datetime | IntegralNanosecondsUTC | None = None,
|
|
249
202
|
end: str | datetime | IntegralNanosecondsUTC | None = None,
|
|
@@ -398,3 +351,53 @@ def create_search_workbook_templates_query(
|
|
|
398
351
|
queries.append(scout_template_api.SearchTemplatesQuery(is_published=published))
|
|
399
352
|
|
|
400
353
|
return scout_template_api.SearchTemplatesQuery(and_=queries)
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def _create_search_events_query( # noqa: PLR0912
|
|
357
|
+
search_text: str | None = None,
|
|
358
|
+
after: str | datetime | IntegralNanosecondsUTC | None = None,
|
|
359
|
+
before: str | datetime | IntegralNanosecondsUTC | None = None,
|
|
360
|
+
asset_rids: Iterable[str] | None = None,
|
|
361
|
+
labels: Iterable[str] | None = None,
|
|
362
|
+
properties: Mapping[str, str] | None = None,
|
|
363
|
+
created_by_rid: str | None = None,
|
|
364
|
+
workbook_rid: str | None = None,
|
|
365
|
+
data_review_rid: str | None = None,
|
|
366
|
+
assignee_rid: str | None = None,
|
|
367
|
+
event_type: event.EventType | None = None,
|
|
368
|
+
origin_types: Iterable[event.SearchEventOriginType] | None = None,
|
|
369
|
+
workspace_rid: str | None = None,
|
|
370
|
+
) -> event.SearchQuery:
|
|
371
|
+
queries = []
|
|
372
|
+
if search_text is not None:
|
|
373
|
+
queries.append(event.SearchQuery(search_text=search_text))
|
|
374
|
+
if after is not None:
|
|
375
|
+
queries.append(event.SearchQuery(after=_SecondsNanos.from_flexible(after).to_api()))
|
|
376
|
+
if before is not None:
|
|
377
|
+
queries.append(event.SearchQuery(before=_SecondsNanos.from_flexible(before).to_api()))
|
|
378
|
+
if asset_rids:
|
|
379
|
+
for asset in asset_rids:
|
|
380
|
+
queries.append(event.SearchQuery(asset=asset))
|
|
381
|
+
if labels:
|
|
382
|
+
for label in labels:
|
|
383
|
+
queries.append(event.SearchQuery(label=label))
|
|
384
|
+
if properties:
|
|
385
|
+
for name, value in properties.items():
|
|
386
|
+
queries.append(event.SearchQuery(property=api.Property(name=name, value=value)))
|
|
387
|
+
if created_by_rid:
|
|
388
|
+
queries.append(event.SearchQuery(created_by=created_by_rid))
|
|
389
|
+
if workbook_rid is not None:
|
|
390
|
+
queries.append(event.SearchQuery(workbook=workbook_rid))
|
|
391
|
+
if data_review_rid is not None:
|
|
392
|
+
queries.append(event.SearchQuery(data_review=data_review_rid))
|
|
393
|
+
if assignee_rid is not None:
|
|
394
|
+
queries.append(event.SearchQuery(assignee=assignee_rid))
|
|
395
|
+
if event_type is not None:
|
|
396
|
+
queries.append(event.SearchQuery(event_type=event_type))
|
|
397
|
+
if origin_types is not None:
|
|
398
|
+
origin_type_filter = event.OriginTypesFilter(api.SetOperator.OR, list(origin_types))
|
|
399
|
+
queries.append(event.SearchQuery(origin_types=origin_type_filter))
|
|
400
|
+
if workspace_rid is not None:
|
|
401
|
+
queries.append(event.SearchQuery(workspace=workspace_rid))
|
|
402
|
+
|
|
403
|
+
return event.SearchQuery(and_=queries)
|
nominal/core/asset.py
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import datetime
|
|
3
4
|
import logging
|
|
4
5
|
from dataclasses import dataclass, field
|
|
5
6
|
from types import MappingProxyType
|
|
6
|
-
from typing import Iterable, Literal, Mapping, Protocol, Sequence, TypeAlias
|
|
7
|
+
from typing import Iterable, Literal, Mapping, Protocol, Sequence, TypeAlias
|
|
7
8
|
|
|
8
9
|
from nominal_api import (
|
|
10
|
+
event,
|
|
9
11
|
scout,
|
|
10
12
|
scout_asset_api,
|
|
11
13
|
scout_assets,
|
|
@@ -14,22 +16,46 @@ from nominal_api import (
|
|
|
14
16
|
from typing_extensions import Self
|
|
15
17
|
|
|
16
18
|
from nominal.core._clientsbunch import HasScoutParams
|
|
17
|
-
from nominal.core.
|
|
19
|
+
from nominal.core._event_types import EventType, SearchEventOriginType
|
|
20
|
+
from nominal.core._utils.api_tools import (
|
|
21
|
+
HasRid,
|
|
22
|
+
Link,
|
|
23
|
+
LinkDict,
|
|
24
|
+
RefreshableMixin,
|
|
25
|
+
create_links,
|
|
26
|
+
rid_from_instance_or_string,
|
|
27
|
+
)
|
|
18
28
|
from nominal.core._utils.pagination_tools import search_runs_by_asset_paginated
|
|
19
29
|
from nominal.core.attachment import Attachment, _iter_get_attachments
|
|
20
30
|
from nominal.core.connection import Connection, _get_connections
|
|
21
|
-
from nominal.core.dataset import Dataset, _create_dataset, _get_datasets
|
|
31
|
+
from nominal.core.dataset import Dataset, _create_dataset, _DatasetWrapper, _get_datasets
|
|
22
32
|
from nominal.core.datasource import DataSource
|
|
33
|
+
from nominal.core.event import Event, _create_event, _search_events
|
|
23
34
|
from nominal.core.video import Video, _create_video, _get_video
|
|
24
|
-
from nominal.ts import IntegralNanosecondsUTC, _SecondsNanos
|
|
35
|
+
from nominal.ts import IntegralNanosecondsDuration, IntegralNanosecondsUTC, _SecondsNanos
|
|
25
36
|
|
|
26
37
|
ScopeType: TypeAlias = Connection | Dataset | Video
|
|
38
|
+
ScopeTypeSpecifier: TypeAlias = Literal["connection", "dataset", "video"]
|
|
27
39
|
|
|
28
40
|
logger = logging.getLogger(__name__)
|
|
29
41
|
|
|
30
42
|
|
|
43
|
+
def _filter_scopes(
|
|
44
|
+
scopes: Sequence[scout_asset_api.DataScope], scope_type: ScopeTypeSpecifier
|
|
45
|
+
) -> Sequence[scout_asset_api.DataScope]:
|
|
46
|
+
return [scope for scope in scopes if scope.data_source.type.lower() == scope_type]
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _filter_scope_rids(
|
|
50
|
+
scopes: Sequence[scout_asset_api.DataScope], scope_type: ScopeTypeSpecifier
|
|
51
|
+
) -> Mapping[str, str]:
|
|
52
|
+
return {
|
|
53
|
+
scope.data_scope_name: getattr(scope.data_source, scope_type) for scope in _filter_scopes(scopes, scope_type)
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
|
|
31
57
|
@dataclass(frozen=True)
|
|
32
|
-
class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
58
|
+
class Asset(_DatasetWrapper, HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
33
59
|
rid: str
|
|
34
60
|
name: str
|
|
35
61
|
description: str | None
|
|
@@ -43,6 +69,7 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
43
69
|
DataSource._Clients,
|
|
44
70
|
Video._Clients,
|
|
45
71
|
Attachment._Clients,
|
|
72
|
+
Event._Clients,
|
|
46
73
|
HasScoutParams,
|
|
47
74
|
Protocol,
|
|
48
75
|
):
|
|
@@ -50,6 +77,8 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
50
77
|
def assets(self) -> scout_assets.AssetService: ...
|
|
51
78
|
@property
|
|
52
79
|
def run(self) -> scout.RunService: ...
|
|
80
|
+
@property
|
|
81
|
+
def event(self) -> event.EventService: ...
|
|
53
82
|
|
|
54
83
|
@property
|
|
55
84
|
def nominal_url(self) -> str:
|
|
@@ -64,6 +93,13 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
64
93
|
raise ValueError(f"multiple assets found with RID {self.rid!r}: {response!r}")
|
|
65
94
|
return response[self.rid]
|
|
66
95
|
|
|
96
|
+
def _list_dataset_scopes(self) -> Sequence[scout_asset_api.DataScope]:
|
|
97
|
+
return _filter_scopes(self._get_latest_api().data_scopes, "dataset")
|
|
98
|
+
|
|
99
|
+
def _scope_rids(self, scope_type: ScopeTypeSpecifier) -> Mapping[str, str]:
|
|
100
|
+
asset = self._get_latest_api()
|
|
101
|
+
return _filter_scope_rids(asset.data_scopes, scope_type)
|
|
102
|
+
|
|
67
103
|
def update(
|
|
68
104
|
self,
|
|
69
105
|
*,
|
|
@@ -97,14 +133,6 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
97
133
|
api_asset = self._clients.assets.update_asset(self._clients.auth_header, request, self.rid)
|
|
98
134
|
return self._refresh_from_api(api_asset)
|
|
99
135
|
|
|
100
|
-
def _scope_rid(self, stype: Literal["dataset", "video", "connection"]) -> dict[str, str]:
|
|
101
|
-
asset = self._get_latest_api()
|
|
102
|
-
return {
|
|
103
|
-
scope.data_scope_name: cast(str, getattr(scope.data_source, stype))
|
|
104
|
-
for scope in asset.data_scopes
|
|
105
|
-
if scope.data_source.type.lower() == stype
|
|
106
|
-
}
|
|
107
|
-
|
|
108
136
|
def promote(self) -> Self:
|
|
109
137
|
"""Promote this asset to be a standard, searchable, and displayable asset.
|
|
110
138
|
|
|
@@ -137,23 +165,25 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
137
165
|
"""
|
|
138
166
|
return (*self.list_datasets(), *self.list_connections(), *self.list_videos())
|
|
139
167
|
|
|
140
|
-
def
|
|
168
|
+
def remove_data_scopes(
|
|
141
169
|
self,
|
|
142
170
|
*,
|
|
143
|
-
|
|
144
|
-
|
|
171
|
+
names: Sequence[str] | None = None,
|
|
172
|
+
scopes: Sequence[ScopeType | str] | None = None,
|
|
145
173
|
) -> None:
|
|
146
|
-
|
|
147
|
-
data_sources = data_sources or []
|
|
148
|
-
|
|
149
|
-
if isinstance(data_sources, str):
|
|
150
|
-
raise RuntimeError("Expect `data_sources` to be a sequence, not a string")
|
|
174
|
+
"""Remove data scopes from this asset.
|
|
151
175
|
|
|
152
|
-
|
|
176
|
+
Args:
|
|
177
|
+
names: Names of datascopes to remove
|
|
178
|
+
scopes: Rids or instances of scope types (dataset, video, connection) to remove.
|
|
179
|
+
"""
|
|
180
|
+
scope_names_to_remove = names or []
|
|
181
|
+
data_scopes_to_remove = scopes or []
|
|
153
182
|
|
|
183
|
+
scope_rids_to_remove = {rid_from_instance_or_string(ds) for ds in data_scopes_to_remove}
|
|
154
184
|
conjure_asset = self._get_latest_api()
|
|
155
185
|
|
|
156
|
-
|
|
186
|
+
data_scopes_to_keep = [
|
|
157
187
|
scout_asset_api.CreateAssetDataScope(
|
|
158
188
|
data_scope_name=ds.data_scope_name,
|
|
159
189
|
data_source=ds.data_source,
|
|
@@ -161,31 +191,21 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
161
191
|
offset=ds.offset,
|
|
162
192
|
)
|
|
163
193
|
for ds in conjure_asset.data_scopes
|
|
164
|
-
if ds.data_scope_name not in
|
|
165
|
-
and (
|
|
194
|
+
if ds.data_scope_name not in scope_names_to_remove
|
|
195
|
+
and all(
|
|
196
|
+
rid not in scope_rids_to_remove
|
|
197
|
+
for rid in (ds.data_source.dataset, ds.data_source.connection, ds.data_source.video)
|
|
198
|
+
)
|
|
166
199
|
]
|
|
167
200
|
|
|
168
|
-
|
|
201
|
+
updated_asset = self._clients.assets.update_asset(
|
|
169
202
|
self._clients.auth_header,
|
|
170
203
|
scout_asset_api.UpdateAssetRequest(
|
|
171
|
-
data_scopes=
|
|
204
|
+
data_scopes=data_scopes_to_keep,
|
|
172
205
|
),
|
|
173
206
|
self.rid,
|
|
174
207
|
)
|
|
175
|
-
self._refresh_from_api(
|
|
176
|
-
|
|
177
|
-
def remove_data_scopes(
|
|
178
|
-
self,
|
|
179
|
-
*,
|
|
180
|
-
names: Sequence[str] | None = None,
|
|
181
|
-
scopes: Sequence[ScopeType | str] | None = None,
|
|
182
|
-
) -> None:
|
|
183
|
-
"""Remove data scopes from this asset.
|
|
184
|
-
|
|
185
|
-
`names` are scope names.
|
|
186
|
-
`scopes` are rids or scope objects.
|
|
187
|
-
"""
|
|
188
|
-
self._remove_data_sources(data_scope_names=names, data_sources=scopes)
|
|
208
|
+
self._refresh_from_api(updated_asset)
|
|
189
209
|
|
|
190
210
|
def add_dataset(
|
|
191
211
|
self,
|
|
@@ -329,6 +349,83 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
329
349
|
self.add_video(data_scope_name, video)
|
|
330
350
|
return video
|
|
331
351
|
|
|
352
|
+
def create_event(
|
|
353
|
+
self,
|
|
354
|
+
name: str,
|
|
355
|
+
type: EventType,
|
|
356
|
+
start: datetime.datetime | IntegralNanosecondsUTC,
|
|
357
|
+
duration: datetime.timedelta | IntegralNanosecondsDuration = 0,
|
|
358
|
+
*,
|
|
359
|
+
description: str | None = None,
|
|
360
|
+
properties: Mapping[str, str] | None = None,
|
|
361
|
+
labels: Sequence[str] | None = None,
|
|
362
|
+
) -> Event:
|
|
363
|
+
"""Create an event associated with this Asset at a given point in time.
|
|
364
|
+
|
|
365
|
+
Args:
|
|
366
|
+
name: Name of the event
|
|
367
|
+
type: Verbosity level of the event.
|
|
368
|
+
start: Starting timestamp of the event
|
|
369
|
+
duration: Duration of the event, or 0 for an event without duration.
|
|
370
|
+
description: Optionally, a human readable description of the event to create
|
|
371
|
+
properties: Key-value pairs to use as properties on the created event
|
|
372
|
+
labels: Sequence of labels to use on the created event.
|
|
373
|
+
|
|
374
|
+
Returns:
|
|
375
|
+
The created event that is associated with the asset.
|
|
376
|
+
"""
|
|
377
|
+
return _create_event(
|
|
378
|
+
self._clients,
|
|
379
|
+
name=name,
|
|
380
|
+
type=type,
|
|
381
|
+
start=start,
|
|
382
|
+
duration=duration,
|
|
383
|
+
description=description,
|
|
384
|
+
assets=[self],
|
|
385
|
+
properties=properties,
|
|
386
|
+
labels=labels,
|
|
387
|
+
)
|
|
388
|
+
|
|
389
|
+
def create_run(
|
|
390
|
+
self,
|
|
391
|
+
name: str,
|
|
392
|
+
start: datetime.datetime | IntegralNanosecondsUTC,
|
|
393
|
+
end: datetime.datetime | IntegralNanosecondsUTC | None,
|
|
394
|
+
*,
|
|
395
|
+
description: str | None = None,
|
|
396
|
+
properties: Mapping[str, str] | None = None,
|
|
397
|
+
labels: Sequence[str] = (),
|
|
398
|
+
links: Sequence[str | Link | LinkDict] = (),
|
|
399
|
+
attachments: Iterable[Attachment] | Iterable[str] = (),
|
|
400
|
+
) -> Run:
|
|
401
|
+
"""Create a run associated with this Asset for a given span of time.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
name: Name of the run.
|
|
405
|
+
start: Starting timestamp of the run.
|
|
406
|
+
end: Ending timestamp of the run, or None for an unbounded run.
|
|
407
|
+
description: Optionally, a human readable description of the run to create.
|
|
408
|
+
properties: Key-value pairs to use as properties on the created run.
|
|
409
|
+
labels: Sequence of labels to use on the created run.
|
|
410
|
+
links: Link metadata to add to the created run.
|
|
411
|
+
attachments: Attachments to associate with the created run.
|
|
412
|
+
|
|
413
|
+
Returns:
|
|
414
|
+
Returns the created run
|
|
415
|
+
"""
|
|
416
|
+
return _create_run(
|
|
417
|
+
self._clients,
|
|
418
|
+
name=name,
|
|
419
|
+
start=start,
|
|
420
|
+
end=end,
|
|
421
|
+
description=description,
|
|
422
|
+
properties=properties,
|
|
423
|
+
labels=labels,
|
|
424
|
+
links=links,
|
|
425
|
+
attachments=attachments,
|
|
426
|
+
asset_rids=[self.rid],
|
|
427
|
+
)
|
|
428
|
+
|
|
332
429
|
def get_dataset(self, data_scope_name: str) -> Dataset:
|
|
333
430
|
"""Retrieve a dataset by data scope name, or raise ValueError if one is not found."""
|
|
334
431
|
dataset = self.get_data_scope(data_scope_name)
|
|
@@ -357,7 +454,7 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
357
454
|
"""List the datasets associated with this asset.
|
|
358
455
|
Returns (data_scope_name, dataset) pairs for each dataset.
|
|
359
456
|
"""
|
|
360
|
-
scope_rid = self.
|
|
457
|
+
scope_rid = self._scope_rids(scope_type="dataset")
|
|
361
458
|
if not scope_rid:
|
|
362
459
|
return []
|
|
363
460
|
|
|
@@ -375,7 +472,7 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
375
472
|
"""List the connections associated with this asset.
|
|
376
473
|
Returns (data_scope_name, connection) pairs for each connection.
|
|
377
474
|
"""
|
|
378
|
-
scope_rid = self.
|
|
475
|
+
scope_rid = self._scope_rids(scope_type="connection")
|
|
379
476
|
connections_meta = _get_connections(self._clients, list(scope_rid.values()))
|
|
380
477
|
return [
|
|
381
478
|
(scope, Connection._from_conjure(self._clients, connection))
|
|
@@ -386,7 +483,7 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
386
483
|
"""List the videos associated with this asset.
|
|
387
484
|
Returns (data_scope_name, dataset) pairs for each video.
|
|
388
485
|
"""
|
|
389
|
-
scope_rid = self.
|
|
486
|
+
scope_rid = self._scope_rids(scope_type="video")
|
|
390
487
|
return [
|
|
391
488
|
(scope, Video._from_conjure(self._clients, _get_video(self._clients, rid)))
|
|
392
489
|
for (scope, rid) in scope_rid.items()
|
|
@@ -411,6 +508,38 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
411
508
|
)
|
|
412
509
|
]
|
|
413
510
|
|
|
511
|
+
def search_events(
|
|
512
|
+
self,
|
|
513
|
+
*,
|
|
514
|
+
search_text: str | None = None,
|
|
515
|
+
after: str | datetime.datetime | IntegralNanosecondsUTC | None = None,
|
|
516
|
+
before: str | datetime.datetime | IntegralNanosecondsUTC | None = None,
|
|
517
|
+
labels: Iterable[str] | None = None,
|
|
518
|
+
properties: Mapping[str, str] | None = None,
|
|
519
|
+
created_by_rid: str | None = None,
|
|
520
|
+
workbook_rid: str | None = None,
|
|
521
|
+
data_review_rid: str | None = None,
|
|
522
|
+
assignee_rid: str | None = None,
|
|
523
|
+
event_type: EventType | None = None,
|
|
524
|
+
origin_types: Iterable[SearchEventOriginType] | None = None,
|
|
525
|
+
) -> Sequence[Event]:
|
|
526
|
+
"""Search for events associated with this Asset. See nominal.core.event._search_events for details."""
|
|
527
|
+
return _search_events(
|
|
528
|
+
self._clients,
|
|
529
|
+
search_text=search_text,
|
|
530
|
+
after=after,
|
|
531
|
+
before=before,
|
|
532
|
+
asset_rids=[self.rid],
|
|
533
|
+
labels=labels,
|
|
534
|
+
properties=properties,
|
|
535
|
+
created_by_rid=created_by_rid,
|
|
536
|
+
workbook_rid=workbook_rid,
|
|
537
|
+
data_review_rid=data_review_rid,
|
|
538
|
+
assignee_rid=assignee_rid,
|
|
539
|
+
event_type=event_type,
|
|
540
|
+
origin_types=origin_types,
|
|
541
|
+
)
|
|
542
|
+
|
|
414
543
|
def remove_attachments(self, attachments: Iterable[Attachment] | Iterable[str]) -> None:
|
|
415
544
|
"""Remove attachments from this asset.
|
|
416
545
|
Does not remove the attachments from Nominal.
|
|
@@ -445,4 +574,4 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
445
574
|
|
|
446
575
|
|
|
447
576
|
# Moving to bottom to deal with circular dependencies
|
|
448
|
-
from nominal.core.run import Run # noqa: E402
|
|
577
|
+
from nominal.core.run import Run, _create_run # noqa: E402
|