nominal 1.98.0__py3-none-any.whl → 1.100.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +26 -0
- nominal/core/_utils/multipart.py +8 -3
- nominal/core/_utils/multipart_downloader.py +2 -16
- nominal/core/_utils/networking.py +72 -7
- nominal/core/asset.py +43 -0
- nominal/core/client.py +4 -0
- nominal/core/containerized_extractors.py +22 -3
- nominal/core/dataset.py +121 -2
- nominal/core/filetype.py +1 -0
- nominal/core/run.py +6 -8
- nominal/experimental/migration/__init__.py +19 -0
- nominal/experimental/migration/migration_utils.py +552 -0
- {nominal-1.98.0.dist-info → nominal-1.100.0.dist-info}/METADATA +3 -2
- {nominal-1.98.0.dist-info → nominal-1.100.0.dist-info}/RECORD +17 -15
- {nominal-1.98.0.dist-info → nominal-1.100.0.dist-info}/WHEEL +0 -0
- {nominal-1.98.0.dist-info → nominal-1.100.0.dist-info}/entry_points.txt +0 -0
- {nominal-1.98.0.dist-info → nominal-1.100.0.dist-info}/licenses/LICENSE +0 -0
CHANGELOG.md
CHANGED
|
@@ -1,5 +1,31 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.100.0](https://github.com/nominal-io/nominal-client/compare/v1.99.0...v1.100.0) (2025-12-19)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* add args and timestamp types for containerized ingest ([#551](https://github.com/nominal-io/nominal-client/issues/551)) ([1dd259a](https://github.com/nominal-io/nominal-client/commit/1dd259a299d83c55b482f443542f7feb63896795))
|
|
9
|
+
* add clone/copy_from methods for assets, datasets, templates ([#548](https://github.com/nominal-io/nominal-client/issues/548)) ([f04c468](https://github.com/nominal-io/nominal-client/commit/f04c46800569da0ad6fb9440a336a7d8cac3542a))
|
|
10
|
+
* added prefix_tree_delimiter parameter to get_or_create_dataset method ([#550](https://github.com/nominal-io/nominal-client/issues/550)) ([c204d94](https://github.com/nominal-io/nominal-client/commit/c204d9450d33a9742c0ae6b6a2d78e8baa796d2c))
|
|
11
|
+
* allow listing runs on asset, deprecate search run by asset in client ([#541](https://github.com/nominal-io/nominal-client/issues/541)) ([35464e5](https://github.com/nominal-io/nominal-client/commit/35464e56ebc81577094b4ce0862d830e7d7bc92e))
|
|
12
|
+
* allow promoting assets ([#542](https://github.com/nominal-io/nominal-client/issues/542)) ([1ce1082](https://github.com/nominal-io/nominal-client/commit/1ce1082dd025371bed4920c0e10d5ec93ac37687))
|
|
13
|
+
* allow using truststore for ssl bypass ([#472](https://github.com/nominal-io/nominal-client/issues/472)) ([55a43c2](https://github.com/nominal-io/nominal-client/commit/55a43c23cb5a32b6cf90b2d50c5f0e79eeafaca7))
|
|
14
|
+
* create new clone workbook and associated helpers in experimental ([#546](https://github.com/nominal-io/nominal-client/issues/546)) ([aeffb44](https://github.com/nominal-io/nominal-client/commit/aeffb44827b46e90101404d4a9a18d036aac49ab))
|
|
15
|
+
* expose truststore.SSLContext across requests usage to permit usage in corporate networks ([55a43c2](https://github.com/nominal-io/nominal-client/commit/55a43c23cb5a32b6cf90b2d50c5f0e79eeafaca7))
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
### Bug Fixes
|
|
19
|
+
|
|
20
|
+
* fix log message missing argument ([#547](https://github.com/nominal-io/nominal-client/issues/547)) ([c684caa](https://github.com/nominal-io/nominal-client/commit/c684caa559544f7c5c2f13e6d94fd8f2e718827c))
|
|
21
|
+
|
|
22
|
+
## [1.99.0](https://github.com/nominal-io/nominal-client/compare/v1.98.0...v1.99.0) (2025-12-04)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
### Features
|
|
26
|
+
|
|
27
|
+
* allow ingesting .avro files ([#544](https://github.com/nominal-io/nominal-client/issues/544)) ([f5c4561](https://github.com/nominal-io/nominal-client/commit/f5c4561e1db6174a56d6b32b388ed7ad94679fdf))
|
|
28
|
+
|
|
3
29
|
## [1.98.0](https://github.com/nominal-io/nominal-client/compare/v1.97.0...v1.98.0) (2025-12-04)
|
|
4
30
|
|
|
5
31
|
|
nominal/core/_utils/multipart.py
CHANGED
|
@@ -11,6 +11,7 @@ from typing import BinaryIO, Iterable
|
|
|
11
11
|
import requests
|
|
12
12
|
from nominal_api import ingest_api, upload_api
|
|
13
13
|
|
|
14
|
+
from nominal.core._utils.networking import create_multipart_request_session
|
|
14
15
|
from nominal.core.exceptions import NominalMultipartUploadFailed
|
|
15
16
|
from nominal.core.filetype import FileType
|
|
16
17
|
|
|
@@ -22,6 +23,7 @@ DEFAULT_NUM_WORKERS = 8
|
|
|
22
23
|
|
|
23
24
|
def _sign_and_upload_part_job(
|
|
24
25
|
upload_client: upload_api.UploadService,
|
|
26
|
+
multipart_session: requests.Session,
|
|
25
27
|
auth_header: str,
|
|
26
28
|
key: str,
|
|
27
29
|
upload_id: str,
|
|
@@ -45,8 +47,8 @@ def _sign_and_upload_part_job(
|
|
|
45
47
|
extra={"response.url": sign_response.url, **log_extras},
|
|
46
48
|
)
|
|
47
49
|
|
|
48
|
-
logger.debug("Pushing part %d for multipart upload", extra=log_extras)
|
|
49
|
-
put_response =
|
|
50
|
+
logger.debug("Pushing part %d for multipart upload", part, extra=log_extras)
|
|
51
|
+
put_response = multipart_session.put(
|
|
50
52
|
sign_response.url,
|
|
51
53
|
data=data,
|
|
52
54
|
headers=sign_response.headers,
|
|
@@ -141,7 +143,10 @@ def put_multipart_upload(
|
|
|
141
143
|
)
|
|
142
144
|
initiate_response = upload_client.initiate_multipart_upload(auth_header, initiate_request)
|
|
143
145
|
key, upload_id = initiate_response.key, initiate_response.upload_id
|
|
144
|
-
|
|
146
|
+
multipart_session = create_multipart_request_session(pool_size=max_workers)
|
|
147
|
+
_sign_and_upload_part = partial(
|
|
148
|
+
_sign_and_upload_part_job, upload_client, multipart_session, auth_header, key, upload_id, q
|
|
149
|
+
)
|
|
145
150
|
|
|
146
151
|
jobs: list[concurrent.futures.Future[requests.Response]] = []
|
|
147
152
|
|
|
@@ -13,11 +13,10 @@ from types import TracebackType
|
|
|
13
13
|
from typing import Callable, Iterable, Mapping, Sequence, Type
|
|
14
14
|
|
|
15
15
|
import requests
|
|
16
|
-
from requests.adapters import HTTPAdapter
|
|
17
16
|
from typing_extensions import Self
|
|
18
|
-
from urllib3.util.retry import Retry
|
|
19
17
|
|
|
20
18
|
from nominal.core._utils.multipart import DEFAULT_CHUNK_SIZE
|
|
19
|
+
from nominal.core._utils.networking import create_multipart_request_session
|
|
21
20
|
|
|
22
21
|
logger = logging.getLogger(__name__)
|
|
23
22
|
|
|
@@ -129,25 +128,12 @@ class MultipartFileDownloader:
|
|
|
129
128
|
max_workers = multiprocessing.cpu_count()
|
|
130
129
|
logger.info("Inferring core count as %d", max_workers)
|
|
131
130
|
|
|
132
|
-
session =
|
|
131
|
+
session = create_multipart_request_session(pool_size=max_workers)
|
|
133
132
|
pool = ThreadPoolExecutor(max_workers=max_workers)
|
|
134
133
|
return cls(max_workers, timeout, max_part_retries, _session=session, _pool=pool, _closed=False)
|
|
135
134
|
|
|
136
135
|
# ---- lifecycle ----
|
|
137
136
|
|
|
138
|
-
@staticmethod
|
|
139
|
-
def _make_session(pool_size: int) -> requests.Session:
|
|
140
|
-
retries = Retry(
|
|
141
|
-
total=5,
|
|
142
|
-
backoff_factor=0.5,
|
|
143
|
-
status_forcelist=(429, 500, 502, 503, 504),
|
|
144
|
-
allowed_methods=frozenset(["GET", "HEAD"]),
|
|
145
|
-
)
|
|
146
|
-
s = requests.Session()
|
|
147
|
-
adapter = HTTPAdapter(max_retries=retries, pool_maxsize=pool_size)
|
|
148
|
-
s.mount("https://", adapter)
|
|
149
|
-
return s
|
|
150
|
-
|
|
151
137
|
def close(self) -> None:
|
|
152
138
|
if not self._closed:
|
|
153
139
|
try:
|
|
@@ -1,20 +1,69 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import gzip
|
|
4
|
+
import logging
|
|
4
5
|
import os
|
|
6
|
+
import ssl
|
|
5
7
|
from typing import Any, Callable, Mapping, Type, TypeVar
|
|
6
8
|
|
|
7
9
|
import requests
|
|
10
|
+
import truststore
|
|
8
11
|
from conjure_python_client import ServiceConfiguration
|
|
9
|
-
from conjure_python_client._http.requests_client import
|
|
10
|
-
from requests.adapters import CaseInsensitiveDict
|
|
12
|
+
from conjure_python_client._http.requests_client import KEEP_ALIVE_SOCKET_OPTIONS, RetryWithJitter
|
|
13
|
+
from requests.adapters import DEFAULT_POOLSIZE, CaseInsensitiveDict, HTTPAdapter
|
|
14
|
+
from urllib3.connection import HTTPConnection
|
|
15
|
+
from urllib3.util.retry import Retry
|
|
16
|
+
|
|
17
|
+
logger = logging.getLogger(__name__)
|
|
11
18
|
|
|
12
19
|
T = TypeVar("T")
|
|
13
20
|
|
|
14
21
|
GZIP_COMPRESSION_LEVEL = 1
|
|
15
22
|
|
|
16
23
|
|
|
17
|
-
class
|
|
24
|
+
class SslBypassRequestsAdapter(HTTPAdapter):
|
|
25
|
+
"""Transport adapter that allows customizing SSL options and forwarding host truststore.
|
|
26
|
+
|
|
27
|
+
NOTE: based on a combination of injecting `truststore.SSLContext` into
|
|
28
|
+
`conjure_python_client._http.requests_client.TransportAdapter`.
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
ENABLE_KEEP_ALIVE_ATTR = "_enable_keep_alive"
|
|
32
|
+
__attrs__ = [*HTTPAdapter.__attrs__, ENABLE_KEEP_ALIVE_ATTR]
|
|
33
|
+
|
|
34
|
+
def __init__(self, *args: Any, enable_keep_alive: bool = False, **kwargs: Any):
|
|
35
|
+
self._enable_keep_alive = enable_keep_alive
|
|
36
|
+
super().__init__(*args, **kwargs)
|
|
37
|
+
|
|
38
|
+
def init_poolmanager(
|
|
39
|
+
self,
|
|
40
|
+
connections: int,
|
|
41
|
+
maxsize: int,
|
|
42
|
+
block: bool = False,
|
|
43
|
+
**pool_kwargs: Mapping[str, Any],
|
|
44
|
+
) -> None:
|
|
45
|
+
"""Wrapper around the standard init_poolmanager from HTTPAdapter with modifications
|
|
46
|
+
to support keep-alive settings and injecting SSL context.
|
|
47
|
+
"""
|
|
48
|
+
if self._enable_keep_alive:
|
|
49
|
+
keep_alive_kwargs: dict[str, Any] = {
|
|
50
|
+
"socket_options": [
|
|
51
|
+
*HTTPConnection.default_socket_options,
|
|
52
|
+
*KEEP_ALIVE_SOCKET_OPTIONS,
|
|
53
|
+
]
|
|
54
|
+
}
|
|
55
|
+
pool_kwargs = {**pool_kwargs, **keep_alive_kwargs}
|
|
56
|
+
|
|
57
|
+
pool_kwargs["ssl_context"] = truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
|
58
|
+
|
|
59
|
+
super().init_poolmanager(connections, maxsize, block, **pool_kwargs) # type: ignore[no-untyped-call]
|
|
60
|
+
|
|
61
|
+
def __setstate__(self, state: dict[str, Any]) -> None:
|
|
62
|
+
state[self.ENABLE_KEEP_ALIVE_ATTR] = state.get(self.ENABLE_KEEP_ALIVE_ATTR, False)
|
|
63
|
+
super().__setstate__(state) # type: ignore[misc]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class NominalRequestsAdapter(SslBypassRequestsAdapter):
|
|
18
67
|
"""Adapter used with `requests` library for sending gzip-compressed data.
|
|
19
68
|
|
|
20
69
|
Based on: https://github.com/psf/requests/issues/1753#issuecomment-417806737
|
|
@@ -69,7 +118,7 @@ class GzipRequestsAdapter(TransportAdapter):
|
|
|
69
118
|
return super().send(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
|
|
70
119
|
|
|
71
120
|
|
|
72
|
-
def
|
|
121
|
+
def create_conjure_service_client(
|
|
73
122
|
service_class: Type[T],
|
|
74
123
|
user_agent: str,
|
|
75
124
|
service_config: ServiceConfiguration,
|
|
@@ -104,7 +153,7 @@ def create_gzip_service_client(
|
|
|
104
153
|
status_forcelist=[308, 429, 503],
|
|
105
154
|
backoff_factor=float(service_config.backoff_slot_size) / 1000,
|
|
106
155
|
)
|
|
107
|
-
transport_adapter =
|
|
156
|
+
transport_adapter = NominalRequestsAdapter(max_retries=retry)
|
|
108
157
|
# create a session, for shared connection polling, user agent, etc
|
|
109
158
|
session = requests.Session()
|
|
110
159
|
session.headers = CaseInsensitiveDict({"User-Agent": user_agent})
|
|
@@ -131,11 +180,11 @@ def create_conjure_client_factory(
|
|
|
131
180
|
) -> Callable[[Type[T]], T]:
|
|
132
181
|
"""Create factory method for creating conjure clients given the respective conjure service type
|
|
133
182
|
|
|
134
|
-
See `
|
|
183
|
+
See `create_conjure_service_client` for documentation on parameters.
|
|
135
184
|
"""
|
|
136
185
|
|
|
137
186
|
def factory(service_class: Type[T]) -> T:
|
|
138
|
-
return
|
|
187
|
+
return create_conjure_service_client(
|
|
139
188
|
service_class,
|
|
140
189
|
user_agent=user_agent,
|
|
141
190
|
service_config=service_config,
|
|
@@ -143,3 +192,19 @@ def create_conjure_client_factory(
|
|
|
143
192
|
)
|
|
144
193
|
|
|
145
194
|
return factory
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def create_multipart_request_session(
|
|
198
|
+
*,
|
|
199
|
+
pool_size: int = DEFAULT_POOLSIZE,
|
|
200
|
+
num_retries: int = 5,
|
|
201
|
+
) -> requests.Session:
|
|
202
|
+
retries = Retry(
|
|
203
|
+
total=num_retries,
|
|
204
|
+
backoff_factor=0.5,
|
|
205
|
+
status_forcelist=(429, 500, 502, 503, 504),
|
|
206
|
+
)
|
|
207
|
+
session = requests.Session()
|
|
208
|
+
adapter = SslBypassRequestsAdapter(max_retries=retries, pool_maxsize=pool_size)
|
|
209
|
+
session.mount("https://", adapter)
|
|
210
|
+
return session
|
nominal/core/asset.py
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import logging
|
|
3
4
|
from dataclasses import dataclass, field
|
|
4
5
|
from types import MappingProxyType
|
|
5
6
|
from typing import Iterable, Literal, Mapping, Protocol, Sequence, TypeAlias, cast
|
|
6
7
|
|
|
7
8
|
from nominal_api import (
|
|
9
|
+
scout,
|
|
8
10
|
scout_asset_api,
|
|
9
11
|
scout_assets,
|
|
10
12
|
scout_run_api,
|
|
@@ -13,6 +15,7 @@ from typing_extensions import Self
|
|
|
13
15
|
|
|
14
16
|
from nominal.core._clientsbunch import HasScoutParams
|
|
15
17
|
from nominal.core._utils.api_tools import HasRid, Link, RefreshableMixin, create_links, rid_from_instance_or_string
|
|
18
|
+
from nominal.core._utils.pagination_tools import search_runs_by_asset_paginated
|
|
16
19
|
from nominal.core.attachment import Attachment, _iter_get_attachments
|
|
17
20
|
from nominal.core.connection import Connection, _get_connections
|
|
18
21
|
from nominal.core.dataset import Dataset, _create_dataset, _get_datasets
|
|
@@ -22,6 +25,8 @@ from nominal.ts import IntegralNanosecondsUTC, _SecondsNanos
|
|
|
22
25
|
|
|
23
26
|
ScopeType: TypeAlias = Connection | Dataset | Video
|
|
24
27
|
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
25
30
|
|
|
26
31
|
@dataclass(frozen=True)
|
|
27
32
|
class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
@@ -43,6 +48,8 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
43
48
|
):
|
|
44
49
|
@property
|
|
45
50
|
def assets(self) -> scout_assets.AssetService: ...
|
|
51
|
+
@property
|
|
52
|
+
def run(self) -> scout.RunService: ...
|
|
46
53
|
|
|
47
54
|
@property
|
|
48
55
|
def nominal_url(self) -> str:
|
|
@@ -98,6 +105,22 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
98
105
|
if scope.data_source.type.lower() == stype
|
|
99
106
|
}
|
|
100
107
|
|
|
108
|
+
def promote(self) -> Self:
|
|
109
|
+
"""Promote this asset to be a standard, searchable, and displayable asset.
|
|
110
|
+
|
|
111
|
+
This method is only useful for assets that were created implicitly from creating a run directly on a dataset.
|
|
112
|
+
Nothing will happen from calling this method (aside from a logged warning) if called on a non-staged
|
|
113
|
+
asset (e.g. an asset created by create_asset, or an asset that's already been promoted).
|
|
114
|
+
"""
|
|
115
|
+
if self._get_latest_api().is_staged:
|
|
116
|
+
request = scout_asset_api.UpdateAssetRequest(is_staged=False)
|
|
117
|
+
updated_asset = self._clients.assets.update_asset(self._clients.auth_header, request, self.rid)
|
|
118
|
+
self._refresh_from_api(updated_asset)
|
|
119
|
+
else:
|
|
120
|
+
logger.warning("Not promoting asset %s-- already promoted!", self.rid)
|
|
121
|
+
|
|
122
|
+
return self
|
|
123
|
+
|
|
101
124
|
def get_data_scope(self, data_scope_name: str) -> ScopeType:
|
|
102
125
|
"""Retrieve a datascope by data scope name, or raise ValueError if one is not found."""
|
|
103
126
|
for scope, data in self.list_data_scopes():
|
|
@@ -257,6 +280,7 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
257
280
|
description: str | None = None,
|
|
258
281
|
labels: Sequence[str] = (),
|
|
259
282
|
properties: Mapping[str, str] | None = None,
|
|
283
|
+
prefix_tree_delimiter: str | None = None,
|
|
260
284
|
) -> Dataset:
|
|
261
285
|
"""Retrieve a dataset by data scope name, or create a new one if it does not exist."""
|
|
262
286
|
try:
|
|
@@ -272,6 +296,10 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
272
296
|
workspace_rid=self._clients.workspace_rid,
|
|
273
297
|
)
|
|
274
298
|
dataset = Dataset._from_conjure(self._clients, enriched_dataset)
|
|
299
|
+
|
|
300
|
+
if prefix_tree_delimiter is not None:
|
|
301
|
+
dataset.set_channel_prefix_tree(prefix_tree_delimiter)
|
|
302
|
+
|
|
275
303
|
self.add_dataset(data_scope_name, dataset)
|
|
276
304
|
return dataset
|
|
277
305
|
|
|
@@ -372,6 +400,17 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
372
400
|
def list_attachments(self) -> Sequence[Attachment]:
|
|
373
401
|
return list(self._iter_list_attachments())
|
|
374
402
|
|
|
403
|
+
def list_runs(self) -> Sequence[Run]:
|
|
404
|
+
"""List all runs associated with this Asset."""
|
|
405
|
+
return [
|
|
406
|
+
Run._from_conjure(self._clients, run)
|
|
407
|
+
for run in search_runs_by_asset_paginated(
|
|
408
|
+
self._clients.run,
|
|
409
|
+
self._clients.auth_header,
|
|
410
|
+
self.rid,
|
|
411
|
+
)
|
|
412
|
+
]
|
|
413
|
+
|
|
375
414
|
def remove_attachments(self, attachments: Iterable[Attachment] | Iterable[str]) -> None:
|
|
376
415
|
"""Remove attachments from this asset.
|
|
377
416
|
Does not remove the attachments from Nominal.
|
|
@@ -403,3 +442,7 @@ class Asset(HasRid, RefreshableMixin[scout_asset_api.Asset]):
|
|
|
403
442
|
created_at=_SecondsNanos.from_flexible(asset.created_at).to_nanoseconds(),
|
|
404
443
|
_clients=clients,
|
|
405
444
|
)
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
# Moving to bottom to deal with circular dependencies
|
|
448
|
+
from nominal.core.run import Run # noqa: E402
|
nominal/core/client.py
CHANGED
|
@@ -608,6 +608,10 @@ class NominalClient:
|
|
|
608
608
|
)
|
|
609
609
|
)
|
|
610
610
|
|
|
611
|
+
@deprecated(
|
|
612
|
+
"NominalClient.search_runs_by_asset is deprecated and will be removed in a future version. "
|
|
613
|
+
"Use Asset.list_runs() instead."
|
|
614
|
+
)
|
|
611
615
|
def search_runs_by_asset(self, asset: Asset | str) -> Sequence[Run]:
|
|
612
616
|
"""Search for all runs associated with a given asset:
|
|
613
617
|
|
|
@@ -5,7 +5,7 @@ from enum import Enum
|
|
|
5
5
|
from typing import Mapping, Protocol, Sequence
|
|
6
6
|
|
|
7
7
|
from nominal_api import ingest_api
|
|
8
|
-
from typing_extensions import Self
|
|
8
|
+
from typing_extensions import Self, deprecated
|
|
9
9
|
|
|
10
10
|
from nominal._utils.dataclass_tools import update_dataclass
|
|
11
11
|
from nominal.core._clientsbunch import HasScoutParams
|
|
@@ -203,9 +203,22 @@ class ContainerizedExtractor(HasRid):
|
|
|
203
203
|
inputs: Sequence[FileExtractionInput]
|
|
204
204
|
properties: Mapping[str, str]
|
|
205
205
|
labels: Sequence[str]
|
|
206
|
-
|
|
206
|
+
default_timestamp_metadata: TimestampMetadata | None
|
|
207
207
|
_clients: _Clients = field(repr=False)
|
|
208
208
|
|
|
209
|
+
@property
|
|
210
|
+
@deprecated(
|
|
211
|
+
"The `timestamp_metadata` field of a ContainerizedExtractor is deprecated and will be removed in a future "
|
|
212
|
+
"release. Use the `default_timestamp_metadata` field instead."
|
|
213
|
+
)
|
|
214
|
+
def timestamp_metadata(self) -> TimestampMetadata:
|
|
215
|
+
if self.default_timestamp_metadata is None:
|
|
216
|
+
raise ValueError(
|
|
217
|
+
f"Containerized extractor {self.name} ({self.rid}) has no default configured timestamp metadata"
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
return self.default_timestamp_metadata
|
|
221
|
+
|
|
209
222
|
class _Clients(HasScoutParams, Protocol):
|
|
210
223
|
@property
|
|
211
224
|
def containerized_extractors(self) -> ingest_api.ContainerizedExtractorService: ...
|
|
@@ -251,6 +264,12 @@ class ContainerizedExtractor(HasRid):
|
|
|
251
264
|
|
|
252
265
|
@classmethod
|
|
253
266
|
def _from_conjure(cls, clients: _Clients, raw_extractor: ingest_api.ContainerizedExtractor) -> Self:
|
|
267
|
+
timestamp_metadata = (
|
|
268
|
+
None
|
|
269
|
+
if raw_extractor.timestamp_metadata is None
|
|
270
|
+
else TimestampMetadata._from_conjure(raw_extractor.timestamp_metadata)
|
|
271
|
+
)
|
|
272
|
+
|
|
254
273
|
return cls(
|
|
255
274
|
rid=raw_extractor.rid,
|
|
256
275
|
name=raw_extractor.name,
|
|
@@ -259,6 +278,6 @@ class ContainerizedExtractor(HasRid):
|
|
|
259
278
|
inputs=[FileExtractionInput._from_conjure(raw_input) for raw_input in raw_extractor.inputs],
|
|
260
279
|
properties=raw_extractor.properties,
|
|
261
280
|
labels=raw_extractor.labels,
|
|
262
|
-
|
|
281
|
+
default_timestamp_metadata=timestamp_metadata,
|
|
263
282
|
_clients=clients,
|
|
264
283
|
)
|
nominal/core/dataset.py
CHANGED
|
@@ -6,7 +6,7 @@ from datetime import timedelta
|
|
|
6
6
|
from io import TextIOBase
|
|
7
7
|
from pathlib import Path
|
|
8
8
|
from types import MappingProxyType
|
|
9
|
-
from typing import BinaryIO, Iterable, Mapping, Sequence, TypeAlias
|
|
9
|
+
from typing import BinaryIO, Iterable, Mapping, Sequence, TypeAlias, overload
|
|
10
10
|
|
|
11
11
|
from nominal_api import api, ingest_api, scout_catalog
|
|
12
12
|
from typing_extensions import Self, deprecated
|
|
@@ -203,6 +203,78 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
203
203
|
# Backward compatibility
|
|
204
204
|
add_to_dataset_from_io = add_from_io
|
|
205
205
|
|
|
206
|
+
def add_avro_stream(
|
|
207
|
+
self,
|
|
208
|
+
path: Path | str,
|
|
209
|
+
) -> DatasetFile:
|
|
210
|
+
"""Upload an avro stream file with a specific schema, described below.
|
|
211
|
+
|
|
212
|
+
This is a "stream-like" file format to support
|
|
213
|
+
use cases where a columnar/tabular format does not make sense. This closely matches Nominal's streaming
|
|
214
|
+
API, making it useful for use cases where network connection drops during streaming and a backup file needs
|
|
215
|
+
to be created.
|
|
216
|
+
|
|
217
|
+
If this schema is not used, will result in a failed ingestion.
|
|
218
|
+
{
|
|
219
|
+
"type": "record",
|
|
220
|
+
"name": "AvroStream",
|
|
221
|
+
"namespace": "io.nominal.ingest",
|
|
222
|
+
"fields": [
|
|
223
|
+
{
|
|
224
|
+
"name": "channel",
|
|
225
|
+
"type": "string",
|
|
226
|
+
"doc": "Channel/series name (e.g., 'vehicle_id', 'col_1', 'temperature')",
|
|
227
|
+
},
|
|
228
|
+
{
|
|
229
|
+
"name": "timestamps",
|
|
230
|
+
"type": {"type": "array", "items": "long"},
|
|
231
|
+
"doc": "Array of Unix timestamps in nanoseconds",
|
|
232
|
+
},
|
|
233
|
+
{
|
|
234
|
+
"name": "values",
|
|
235
|
+
"type": {"type": "array", "items": ["double", "string"]},
|
|
236
|
+
"doc": "Array of values. Can either be doubles or strings",
|
|
237
|
+
},
|
|
238
|
+
{
|
|
239
|
+
"name": "tags",
|
|
240
|
+
"type": {"type": "map", "values": "string"},
|
|
241
|
+
"default": {},
|
|
242
|
+
"doc": "Key-value metadata tags",
|
|
243
|
+
},
|
|
244
|
+
],
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
Args:
|
|
248
|
+
path: Path to the .avro file to upload
|
|
249
|
+
|
|
250
|
+
Returns:
|
|
251
|
+
Reference to the ingesting DatasetFile
|
|
252
|
+
|
|
253
|
+
"""
|
|
254
|
+
avro_path = Path(path)
|
|
255
|
+
s3_path = upload_multipart_file(
|
|
256
|
+
self._clients.auth_header,
|
|
257
|
+
self._clients.workspace_rid,
|
|
258
|
+
avro_path,
|
|
259
|
+
self._clients.upload,
|
|
260
|
+
file_type=FileTypes.AVRO_STREAM,
|
|
261
|
+
)
|
|
262
|
+
target = ingest_api.DatasetIngestTarget(
|
|
263
|
+
existing=ingest_api.ExistingDatasetIngestDestination(dataset_rid=self.rid)
|
|
264
|
+
)
|
|
265
|
+
resp = self._clients.ingest.ingest(
|
|
266
|
+
self._clients.auth_header,
|
|
267
|
+
ingest_api.IngestRequest(
|
|
268
|
+
options=ingest_api.IngestOptions(
|
|
269
|
+
avro_stream=ingest_api.AvroStreamOpts(
|
|
270
|
+
source=ingest_api.IngestSource(s3=ingest_api.S3IngestSource(s3_path)),
|
|
271
|
+
target=target,
|
|
272
|
+
)
|
|
273
|
+
)
|
|
274
|
+
),
|
|
275
|
+
)
|
|
276
|
+
return self._handle_ingest_response(resp)
|
|
277
|
+
|
|
206
278
|
def add_journal_json(
|
|
207
279
|
self,
|
|
208
280
|
path: Path | str,
|
|
@@ -338,11 +410,38 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
338
410
|
# Backward compatibility
|
|
339
411
|
add_ardupilot_dataflash_to_dataset = add_ardupilot_dataflash
|
|
340
412
|
|
|
413
|
+
@overload
|
|
341
414
|
def add_containerized(
|
|
342
415
|
self,
|
|
343
416
|
extractor: str | ContainerizedExtractor,
|
|
344
417
|
sources: Mapping[str, Path | str],
|
|
345
418
|
tag: str | None = None,
|
|
419
|
+
*,
|
|
420
|
+
arguments: Mapping[str, str] | None = None,
|
|
421
|
+
tags: Mapping[str, str] | None = None,
|
|
422
|
+
) -> DatasetFile: ...
|
|
423
|
+
@overload
|
|
424
|
+
def add_containerized(
|
|
425
|
+
self,
|
|
426
|
+
extractor: str | ContainerizedExtractor,
|
|
427
|
+
sources: Mapping[str, Path | str],
|
|
428
|
+
tag: str | None = None,
|
|
429
|
+
*,
|
|
430
|
+
arguments: Mapping[str, str] | None = None,
|
|
431
|
+
tags: Mapping[str, str] | None = None,
|
|
432
|
+
timestamp_column: str,
|
|
433
|
+
timestamp_type: _AnyTimestampType,
|
|
434
|
+
) -> DatasetFile: ...
|
|
435
|
+
def add_containerized(
|
|
436
|
+
self,
|
|
437
|
+
extractor: str | ContainerizedExtractor,
|
|
438
|
+
sources: Mapping[str, Path | str],
|
|
439
|
+
tag: str | None = None,
|
|
440
|
+
*,
|
|
441
|
+
arguments: Mapping[str, str] | None = None,
|
|
442
|
+
tags: Mapping[str, str] | None = None,
|
|
443
|
+
timestamp_column: str | None = None,
|
|
444
|
+
timestamp_type: _AnyTimestampType | None = None,
|
|
346
445
|
) -> DatasetFile:
|
|
347
446
|
"""Add data from proprietary data formats using a pre-registered custom extractor.
|
|
348
447
|
|
|
@@ -352,7 +451,24 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
352
451
|
NOTE: these must match the registered inputs of the containerized extractor exactly
|
|
353
452
|
tag: Tag of the Docker container which hosts the extractor.
|
|
354
453
|
NOTE: if not provided, the default registered docker tag will be used.
|
|
454
|
+
arguments: Mapping of key-value pairs of input arguments to the extractor.
|
|
455
|
+
tags: Key-value pairs of tags to apply to all data ingested from the containerized extractor run.
|
|
456
|
+
timestamp_column: the column in the dataset that contains the timestamp data.
|
|
457
|
+
NOTE: this is applied uniformly to all output files
|
|
458
|
+
NOTE: must be provided with a `timestamp_type` or a ValueError will be raised
|
|
459
|
+
timestamp_type: the type of timestamp data in the dataset.
|
|
460
|
+
NOTE: this is applied uniformly to all output files
|
|
461
|
+
NOTE: must be provided with a `timestamp_column` or a ValueError will be raised
|
|
355
462
|
"""
|
|
463
|
+
timestamp_metadata = None
|
|
464
|
+
if timestamp_column is not None and timestamp_type is not None:
|
|
465
|
+
timestamp_metadata = ingest_api.TimestampMetadata(
|
|
466
|
+
series_name=timestamp_column,
|
|
467
|
+
timestamp_type=_to_typed_timestamp_type(timestamp_type)._to_conjure_ingest_api(),
|
|
468
|
+
)
|
|
469
|
+
elif None in (timestamp_column, timestamp_type):
|
|
470
|
+
raise ValueError("Only one of `timestamp_column` and `timestamp_type` provided!")
|
|
471
|
+
|
|
356
472
|
if isinstance(extractor, str):
|
|
357
473
|
extractor = ContainerizedExtractor._from_conjure(
|
|
358
474
|
self._clients,
|
|
@@ -379,13 +495,14 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
379
495
|
)
|
|
380
496
|
logger.info("Uploaded %s -> %s", source_path, s3_path)
|
|
381
497
|
s3_inputs[source] = s3_path
|
|
498
|
+
|
|
382
499
|
logger.info("Triggering custom extractor %s (tag=%s) with %s", extractor.name, tag, s3_inputs)
|
|
383
500
|
resp = self._clients.ingest.ingest(
|
|
384
501
|
self._clients.auth_header,
|
|
385
502
|
trigger_ingest=ingest_api.IngestRequest(
|
|
386
503
|
options=ingest_api.IngestOptions(
|
|
387
504
|
containerized=ingest_api.ContainerizedOpts(
|
|
388
|
-
arguments={},
|
|
505
|
+
arguments={**(arguments or {})},
|
|
389
506
|
extractor_rid=extractor.rid,
|
|
390
507
|
sources={
|
|
391
508
|
source: ingest_api.IngestSource(s3=ingest_api.S3IngestSource(path=s3_path))
|
|
@@ -395,6 +512,8 @@ class Dataset(DataSource, RefreshableMixin[scout_catalog.EnrichedDataset]):
|
|
|
395
512
|
existing=ingest_api.ExistingDatasetIngestDestination(self.rid)
|
|
396
513
|
),
|
|
397
514
|
tag=tag,
|
|
515
|
+
additional_file_tags={**(tags or {})},
|
|
516
|
+
timestamp_metadata=timestamp_metadata,
|
|
398
517
|
)
|
|
399
518
|
)
|
|
400
519
|
),
|
nominal/core/filetype.py
CHANGED
|
@@ -111,6 +111,7 @@ class FileType(NamedTuple):
|
|
|
111
111
|
|
|
112
112
|
|
|
113
113
|
class FileTypes:
|
|
114
|
+
AVRO_STREAM: FileType = FileType(".avro", "application/avro")
|
|
114
115
|
BINARY: FileType = FileType("", "application/octet-stream")
|
|
115
116
|
CSV: FileType = FileType(".csv", "text/csv")
|
|
116
117
|
CSV_GZ: FileType = FileType(".csv.gz", "text/csv")
|
nominal/core/run.py
CHANGED
|
@@ -6,11 +6,11 @@ from types import MappingProxyType
|
|
|
6
6
|
from typing import Iterable, Mapping, Protocol, Sequence, cast
|
|
7
7
|
|
|
8
8
|
from nominal_api import (
|
|
9
|
-
scout,
|
|
10
9
|
scout_run_api,
|
|
11
10
|
)
|
|
12
11
|
from typing_extensions import Self
|
|
13
12
|
|
|
13
|
+
from nominal.core import asset as core_asset
|
|
14
14
|
from nominal.core._clientsbunch import HasScoutParams
|
|
15
15
|
from nominal.core._utils.api_tools import (
|
|
16
16
|
HasRid,
|
|
@@ -20,7 +20,6 @@ from nominal.core._utils.api_tools import (
|
|
|
20
20
|
create_links,
|
|
21
21
|
rid_from_instance_or_string,
|
|
22
22
|
)
|
|
23
|
-
from nominal.core.asset import Asset
|
|
24
23
|
from nominal.core.attachment import Attachment, _iter_get_attachments
|
|
25
24
|
from nominal.core.connection import Connection, _get_connections
|
|
26
25
|
from nominal.core.dataset import Dataset, _get_datasets
|
|
@@ -45,12 +44,11 @@ class Run(HasRid, RefreshableMixin[scout_run_api.Run]):
|
|
|
45
44
|
_clients: _Clients = field(repr=False)
|
|
46
45
|
|
|
47
46
|
class _Clients(
|
|
48
|
-
Asset._Clients,
|
|
47
|
+
core_asset.Asset._Clients,
|
|
49
48
|
HasScoutParams,
|
|
50
49
|
Protocol,
|
|
51
50
|
):
|
|
52
|
-
|
|
53
|
-
def run(self) -> scout.RunService: ...
|
|
51
|
+
pass
|
|
54
52
|
|
|
55
53
|
@property
|
|
56
54
|
def nominal_url(self) -> str:
|
|
@@ -304,13 +302,13 @@ class Run(HasRid, RefreshableMixin[scout_run_api.Run]):
|
|
|
304
302
|
"""List a sequence of Attachments associated with this Run."""
|
|
305
303
|
return list(self._iter_list_attachments())
|
|
306
304
|
|
|
307
|
-
def _iter_list_assets(self) -> Iterable[Asset]:
|
|
305
|
+
def _iter_list_assets(self) -> Iterable[core_asset.Asset]:
|
|
308
306
|
run = self._get_latest_api()
|
|
309
307
|
assets = self._clients.assets.get_assets(self._clients.auth_header, run.assets)
|
|
310
308
|
for a in assets.values():
|
|
311
|
-
yield Asset._from_conjure(self._clients, a)
|
|
309
|
+
yield core_asset.Asset._from_conjure(self._clients, a)
|
|
312
310
|
|
|
313
|
-
def list_assets(self) -> Sequence[Asset]:
|
|
311
|
+
def list_assets(self) -> Sequence[core_asset.Asset]:
|
|
314
312
|
"""List assets associated with this run."""
|
|
315
313
|
return list(self._iter_list_assets())
|
|
316
314
|
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from nominal.experimental.migration.migration_utils import (
|
|
2
|
+
clone_asset,
|
|
3
|
+
clone_dataset,
|
|
4
|
+
clone_workbook_template,
|
|
5
|
+
copy_asset_from,
|
|
6
|
+
copy_dataset_from,
|
|
7
|
+
copy_resources_to_destination_client,
|
|
8
|
+
copy_workbook_template_from,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
__all__ = [
|
|
12
|
+
"clone_asset",
|
|
13
|
+
"clone_dataset",
|
|
14
|
+
"clone_workbook_template",
|
|
15
|
+
"copy_asset_from",
|
|
16
|
+
"copy_dataset_from",
|
|
17
|
+
"copy_resources_to_destination_client",
|
|
18
|
+
"copy_workbook_template_from",
|
|
19
|
+
]
|
|
@@ -0,0 +1,552 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import re
|
|
4
|
+
import uuid
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, BinaryIO, Mapping, Sequence, TypeVar, Union, cast, overload
|
|
7
|
+
|
|
8
|
+
import requests
|
|
9
|
+
from conjure_python_client import ConjureBeanType, ConjureEnumType, ConjureUnionType
|
|
10
|
+
from conjure_python_client._serde.decoder import ConjureDecoder
|
|
11
|
+
from conjure_python_client._serde.encoder import ConjureEncoder
|
|
12
|
+
from nominal_api import scout_layout_api, scout_template_api, scout_workbookcommon_api
|
|
13
|
+
|
|
14
|
+
from nominal.core import Asset, Dataset, DatasetFile, FileType, NominalClient, Workbook, WorkbookTemplate
|
|
15
|
+
|
|
16
|
+
logger = logging.getLogger(__name__)
|
|
17
|
+
|
|
18
|
+
ConjureType = Union[ConjureBeanType, ConjureUnionType, ConjureEnumType]
|
|
19
|
+
|
|
20
|
+
# Regex pattern to match strings that have a UUID format with a prefix.
|
|
21
|
+
UUID_PATTERN = re.compile(r"^(.*)([0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12})$")
|
|
22
|
+
|
|
23
|
+
# Keeping tight control over ids we consider to be UUIDs.
|
|
24
|
+
UUID_KEYS = ("id", "rid", "functionUuid", "plotId", "yAxisId", "chartRid")
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _convert_if_json(s: str) -> tuple[Any, bool]:
|
|
28
|
+
"""If the string is a valid JSON, convert it. Otherwise, return original object.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
s: The string to to convert or return as is.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
The parsed JSON object if s is valid JSON, else the original string. Also returns the action taken in a boolean.
|
|
35
|
+
"""
|
|
36
|
+
try:
|
|
37
|
+
return (json.loads(s), True)
|
|
38
|
+
except (ValueError, TypeError):
|
|
39
|
+
return (s, False)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _check_and_add_uuid_to_mapping(input_str: str, mapping: dict[str, str]) -> None:
|
|
43
|
+
"""Check if a string matches the UUID pattern and add to mapping if not already present.
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
input_str: The string to check for UUID pattern.
|
|
47
|
+
mapping: The mapping dictionary to add to if a new UUID is found.
|
|
48
|
+
"""
|
|
49
|
+
match = UUID_PATTERN.search(input_str)
|
|
50
|
+
if match and input_str not in mapping:
|
|
51
|
+
mapping[input_str] = f"{match.group(1)}{str(uuid.uuid4())}"
|
|
52
|
+
logger.debug("Found UUID and added to mapping: %s -> %s", input_str, mapping[input_str])
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _extract_uuids_from_obj(obj: Any, mapping: dict[str, str]) -> None:
|
|
56
|
+
"""Recursively extract UUIDs from a nested JSON object, and populate the mapping.
|
|
57
|
+
|
|
58
|
+
Searches for UUIDs in:
|
|
59
|
+
- Values of specific keys (defined in UUID_KEYS)
|
|
60
|
+
- Dictionary keys that match the UUID pattern
|
|
61
|
+
- Nested JSON strings that are parsed and searched recursively
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
obj: The object to search (dict, list, or primitive).
|
|
65
|
+
mapping: Dictionary to populate with found UUIDs as keys.
|
|
66
|
+
"""
|
|
67
|
+
# TODO (Sean): Refactor to remove expensive recursion strategy.
|
|
68
|
+
if isinstance(obj, dict):
|
|
69
|
+
for key, value in obj.items():
|
|
70
|
+
if key in UUID_KEYS and isinstance(value, str):
|
|
71
|
+
_check_and_add_uuid_to_mapping(value, mapping)
|
|
72
|
+
else:
|
|
73
|
+
_check_and_add_uuid_to_mapping(key, mapping)
|
|
74
|
+
# Some values may be JSON strings that need to be parsed.
|
|
75
|
+
_extract_uuids_from_obj(_convert_if_json(value)[0] if isinstance(value, str) else value, mapping)
|
|
76
|
+
elif isinstance(obj, list):
|
|
77
|
+
for item in obj:
|
|
78
|
+
_extract_uuids_from_obj(item, mapping)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def _generate_uuid_mapping(objs: list[Any]) -> dict[str, str]:
|
|
82
|
+
"""Search for all UUIDs in a list of objects and generate a mapping to new UUIDs.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
objs: List of objects to search for UUIDs.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
A mapping of all UUIDs found in the objects with their new generated UUIDs.
|
|
89
|
+
"""
|
|
90
|
+
mapping: dict[str, str] = {}
|
|
91
|
+
for obj in objs:
|
|
92
|
+
_extract_uuids_from_obj(obj, mapping)
|
|
93
|
+
return mapping
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def create_workbook_template_with_content_and_layout(
|
|
97
|
+
client: NominalClient,
|
|
98
|
+
title: str,
|
|
99
|
+
layout: scout_layout_api.WorkbookLayout,
|
|
100
|
+
content: scout_workbookcommon_api.WorkbookContent,
|
|
101
|
+
workspace_rid: str,
|
|
102
|
+
*,
|
|
103
|
+
description: str | None = None,
|
|
104
|
+
labels: Sequence[str] | None = None,
|
|
105
|
+
properties: Mapping[str, str] | None = None,
|
|
106
|
+
commit_message: str | None = None,
|
|
107
|
+
) -> WorkbookTemplate:
|
|
108
|
+
"""Create a workbook template with specified content and layout.
|
|
109
|
+
|
|
110
|
+
This is a helper method that constructs and creates a workbook template
|
|
111
|
+
request with the provided parameters, including layout and content. Method is considered experimental and may
|
|
112
|
+
change in future releases. The template is created in the target workspace and is not discoverable by default.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
client: The NominalClient to use for creating the template.
|
|
116
|
+
title: The title of the template.
|
|
117
|
+
layout: The workbook layout to use.
|
|
118
|
+
content: The workbook content to use.
|
|
119
|
+
workspace_rid: The resource ID of the workspace to create the template in.
|
|
120
|
+
description: The description of the template.
|
|
121
|
+
labels: List of labels to apply to the template.
|
|
122
|
+
properties: Dictionary of properties for the template.
|
|
123
|
+
commit_message: The commit message for the template creation.
|
|
124
|
+
|
|
125
|
+
Returns:
|
|
126
|
+
The newly created WorkbookTemplate.
|
|
127
|
+
"""
|
|
128
|
+
request = scout_template_api.CreateTemplateRequest(
|
|
129
|
+
title=title,
|
|
130
|
+
description=description if description is not None else "",
|
|
131
|
+
labels=list(labels) if labels is not None else [],
|
|
132
|
+
properties=dict(properties) if properties is not None else {},
|
|
133
|
+
is_published=False,
|
|
134
|
+
layout=layout,
|
|
135
|
+
content=content,
|
|
136
|
+
message=commit_message if commit_message is not None else "",
|
|
137
|
+
workspace=client._workspace_rid_for_search(workspace_rid),
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
template = client._clients.template.create(client._clients.auth_header, request)
|
|
141
|
+
return WorkbookTemplate._from_conjure(client._clients, template)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _replace_uuids_in_obj(obj: Any, mapping: dict[str, str]) -> Any:
|
|
145
|
+
"""Recursively replace UUIDs in a nested JSON object.
|
|
146
|
+
|
|
147
|
+
Replaces UUIDs found in:
|
|
148
|
+
- Dictionary keys that match UUID pattern and are in the mapping
|
|
149
|
+
- String values that are in the mapping
|
|
150
|
+
- Nested JSON strings that are parsed, processed, and re-serialized
|
|
151
|
+
|
|
152
|
+
Args:
|
|
153
|
+
obj: The object to process (dict, list, or primitive).
|
|
154
|
+
mapping: Dictionary mapping old UUIDs to new UUIDs.
|
|
155
|
+
|
|
156
|
+
Returns:
|
|
157
|
+
A new object with all UUIDs replaced according to the mapping.
|
|
158
|
+
Primitive values are returned unchanged if they don't match any UUIDs.
|
|
159
|
+
"""
|
|
160
|
+
if isinstance(obj, dict):
|
|
161
|
+
new_obj = {}
|
|
162
|
+
for key, value in obj.items():
|
|
163
|
+
if isinstance(key, str) and re.search(UUID_PATTERN, key) and key in mapping:
|
|
164
|
+
new_key = mapping[key]
|
|
165
|
+
new_obj[new_key] = _replace_uuids_in_obj(value, mapping)
|
|
166
|
+
elif isinstance(value, str) and value in mapping:
|
|
167
|
+
new_obj[key] = mapping[value]
|
|
168
|
+
elif isinstance(value, str):
|
|
169
|
+
parsed_value, was_json = _convert_if_json(value)
|
|
170
|
+
if was_json:
|
|
171
|
+
new_obj[key] = json.dumps(_replace_uuids_in_obj(parsed_value, mapping), separators=(",", ":"))
|
|
172
|
+
else:
|
|
173
|
+
new_obj[key] = _replace_uuids_in_obj(value, mapping)
|
|
174
|
+
else:
|
|
175
|
+
new_obj[key] = _replace_uuids_in_obj(value, mapping)
|
|
176
|
+
return new_obj
|
|
177
|
+
elif isinstance(obj, list):
|
|
178
|
+
return [_replace_uuids_in_obj(item, mapping) for item in obj]
|
|
179
|
+
else:
|
|
180
|
+
return obj
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
T1 = TypeVar("T1", bound=ConjureType)
|
|
184
|
+
T2 = TypeVar("T2", bound=ConjureType)
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@overload
|
|
188
|
+
def _clone_conjure_objects_with_new_uuids(
|
|
189
|
+
objs: tuple[T1, T2],
|
|
190
|
+
) -> tuple[T1, T2]: ...
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
@overload
|
|
194
|
+
def _clone_conjure_objects_with_new_uuids(objs: list[ConjureType]) -> list[ConjureType]: ...
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _clone_conjure_objects_with_new_uuids(
|
|
198
|
+
objs: tuple[ConjureType, ...] | list[ConjureType],
|
|
199
|
+
) -> tuple[ConjureType, ...] | list[ConjureType]:
|
|
200
|
+
"""Clone Conjure objects by replacing all UUIDs with new ones.
|
|
201
|
+
|
|
202
|
+
This function:
|
|
203
|
+
1. Converts Conjure objects to JSON
|
|
204
|
+
2. Finds all UUIDs in the JSON structures, according to defined keys
|
|
205
|
+
3. Generates new UUIDs for each old UUID, maintaining prefixes
|
|
206
|
+
4. Replaces all UUIDs in the objects
|
|
207
|
+
5. Returns the cloned objects with new UUIDs
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
objs: List of Conjure objects to clone.
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
List of cloned Conjure objects with new UUIDs. The structure and content
|
|
214
|
+
are identical to the originals, but all UUIDs have been replaced.
|
|
215
|
+
"""
|
|
216
|
+
original_types = [type(obj) for obj in objs]
|
|
217
|
+
|
|
218
|
+
json_objs = [ConjureEncoder.do_encode(obj) for obj in objs]
|
|
219
|
+
|
|
220
|
+
mapping = _generate_uuid_mapping(json_objs)
|
|
221
|
+
|
|
222
|
+
new_json_objs = [_replace_uuids_in_obj(json_obj, mapping) for json_obj in json_objs]
|
|
223
|
+
|
|
224
|
+
# Deserialize each dict back to its original type
|
|
225
|
+
decoder = ConjureDecoder()
|
|
226
|
+
result = [
|
|
227
|
+
decoder.do_decode(new_json_obj, obj_type) for new_json_obj, obj_type in zip(new_json_objs, original_types)
|
|
228
|
+
]
|
|
229
|
+
|
|
230
|
+
return tuple(result) if isinstance(objs, tuple) else result
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
# TODO (Sean): Once we move this out of experimental, make clone/copy_resource_from abstract methods in the HasRid class
|
|
234
|
+
def clone_workbook_template(
|
|
235
|
+
source_template: WorkbookTemplate,
|
|
236
|
+
destination_client: NominalClient,
|
|
237
|
+
) -> WorkbookTemplate:
|
|
238
|
+
"""Clones a workbook template, maintaining all properties and content.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
source_template (WorkbookTemplate): The template to copy
|
|
242
|
+
destination_client (NominalClient): The client to copy to
|
|
243
|
+
Returns:
|
|
244
|
+
The cloned template.
|
|
245
|
+
"""
|
|
246
|
+
return copy_workbook_template_from(
|
|
247
|
+
source_template=source_template,
|
|
248
|
+
destination_client=destination_client,
|
|
249
|
+
include_content_and_layout=True,
|
|
250
|
+
)
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def copy_workbook_template_from(
|
|
254
|
+
source_template: WorkbookTemplate,
|
|
255
|
+
destination_client: NominalClient,
|
|
256
|
+
*,
|
|
257
|
+
new_template_title: str | None = None,
|
|
258
|
+
new_template_description: str | None = None,
|
|
259
|
+
new_template_labels: Sequence[str] | None = None,
|
|
260
|
+
new_template_properties: Mapping[str, str] | None = None,
|
|
261
|
+
include_content_and_layout: bool = False,
|
|
262
|
+
) -> WorkbookTemplate:
|
|
263
|
+
"""Clone a workbook template from the source to the target workspace.
|
|
264
|
+
|
|
265
|
+
Retrieves the source template, clones its layout and content. For safety, we replace
|
|
266
|
+
all unique identifiers with new UUIDs. We then creates a new template in the target workspace.
|
|
267
|
+
The cloned template maintains all metadata (title, description, labels, properties).
|
|
268
|
+
|
|
269
|
+
Args:
|
|
270
|
+
source_template: The source WorkbookTemplate to clone.
|
|
271
|
+
destination_client: The NominalClient to create the cloned template in.
|
|
272
|
+
new_template_title: Optional new name for the cloned template. If not provided, the original is used.
|
|
273
|
+
new_template_description: Optional new name for the cloned template. If not provided, the original is used.
|
|
274
|
+
new_template_labels: Optional new labels for the cloned template. If not provided, the original is used.
|
|
275
|
+
new_template_properties: Optional new properties for the cloned template. If not provided, the original is used.
|
|
276
|
+
include_content_and_layout: If True, copy layout and content from template. Otherwise, use blank content.
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
The newly created WorkbookTemplate in the target workspace.
|
|
280
|
+
"""
|
|
281
|
+
log_extras = {"destination_client_workspace": destination_client.get_workspace().rid}
|
|
282
|
+
logger.debug(
|
|
283
|
+
"Cloning workbook template: %s (rid: %s)", source_template.title, source_template.rid, extra=log_extras
|
|
284
|
+
)
|
|
285
|
+
raw_source_template = source_template._clients.template.get(
|
|
286
|
+
source_template._clients.auth_header, source_template.rid
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
if include_content_and_layout:
|
|
290
|
+
template_layout = raw_source_template.layout
|
|
291
|
+
template_content = raw_source_template.content
|
|
292
|
+
(new_template_layout, new_workbook_content) = _clone_conjure_objects_with_new_uuids(
|
|
293
|
+
(template_layout, template_content)
|
|
294
|
+
)
|
|
295
|
+
else:
|
|
296
|
+
new_template_layout = scout_layout_api.WorkbookLayout(
|
|
297
|
+
v1=scout_layout_api.WorkbookLayoutV1(
|
|
298
|
+
root_panel=scout_layout_api.Panel(
|
|
299
|
+
tabbed=scout_layout_api.TabbedPanel(
|
|
300
|
+
v1=scout_layout_api.TabbedPanelV1(
|
|
301
|
+
id=str(uuid.uuid4()),
|
|
302
|
+
tabs=[],
|
|
303
|
+
)
|
|
304
|
+
)
|
|
305
|
+
)
|
|
306
|
+
)
|
|
307
|
+
)
|
|
308
|
+
new_workbook_content = scout_workbookcommon_api.WorkbookContent(channel_variables={}, charts={})
|
|
309
|
+
new_workbook_template = create_workbook_template_with_content_and_layout(
|
|
310
|
+
client=destination_client,
|
|
311
|
+
title=new_template_title or raw_source_template.metadata.title,
|
|
312
|
+
description=new_template_description or raw_source_template.metadata.description,
|
|
313
|
+
labels=new_template_labels or raw_source_template.metadata.labels,
|
|
314
|
+
properties=new_template_properties or raw_source_template.metadata.properties,
|
|
315
|
+
layout=new_template_layout,
|
|
316
|
+
content=new_workbook_content,
|
|
317
|
+
commit_message="Cloned from template",
|
|
318
|
+
workspace_rid=destination_client.get_workspace().rid,
|
|
319
|
+
)
|
|
320
|
+
logger.debug(
|
|
321
|
+
"New workbook template created %s from %s (rid: %s)",
|
|
322
|
+
new_workbook_template.title,
|
|
323
|
+
source_template.title,
|
|
324
|
+
source_template.rid,
|
|
325
|
+
extra=log_extras,
|
|
326
|
+
)
|
|
327
|
+
return new_workbook_template
|
|
328
|
+
|
|
329
|
+
|
|
330
|
+
def copy_file_to_dataset(
|
|
331
|
+
source_file: DatasetFile,
|
|
332
|
+
destination_dataset: Dataset,
|
|
333
|
+
) -> DatasetFile:
|
|
334
|
+
"""Copy a dataset file from the source to the destination dataset.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
source_file: The source DatasetFile to copy.
|
|
338
|
+
destination_dataset: The Dataset to create the copied file in.
|
|
339
|
+
|
|
340
|
+
Returns:
|
|
341
|
+
The dataset file in the new dataset.
|
|
342
|
+
"""
|
|
343
|
+
log_extras = {"destination_client_workspace": destination_dataset._clients.workspace_rid}
|
|
344
|
+
logger.debug("Copying dataset file: %s", source_file.name, extra=log_extras)
|
|
345
|
+
source_api_file = source_file._get_latest_api()
|
|
346
|
+
if (
|
|
347
|
+
source_api_file.handle.s3 is not None
|
|
348
|
+
and source_file.timestamp_channel is not None
|
|
349
|
+
and source_file.timestamp_type is not None
|
|
350
|
+
):
|
|
351
|
+
old_file_uri = source_file._clients.catalog.get_dataset_file_uri(
|
|
352
|
+
source_file._clients.auth_header, source_file.dataset_rid, source_file.id
|
|
353
|
+
).uri
|
|
354
|
+
|
|
355
|
+
response = requests.get(old_file_uri, stream=True)
|
|
356
|
+
response.raise_for_status()
|
|
357
|
+
|
|
358
|
+
file_name = source_api_file.handle.s3.key.split("/")[-1]
|
|
359
|
+
file_type = FileType.from_path(file_name)
|
|
360
|
+
file_stem = Path(file_name).stem
|
|
361
|
+
|
|
362
|
+
new_file = destination_dataset.add_from_io(
|
|
363
|
+
dataset=cast(BinaryIO, response.raw),
|
|
364
|
+
timestamp_column=source_file.timestamp_channel,
|
|
365
|
+
timestamp_type=source_file.timestamp_type,
|
|
366
|
+
file_type=file_type,
|
|
367
|
+
file_name=file_stem,
|
|
368
|
+
tag_columns=source_file.tag_columns,
|
|
369
|
+
tags=source_file.file_tags,
|
|
370
|
+
)
|
|
371
|
+
logger.debug(
|
|
372
|
+
"New file created %s in dataset: %s (rid: %s)",
|
|
373
|
+
new_file.name,
|
|
374
|
+
destination_dataset.name,
|
|
375
|
+
destination_dataset.rid,
|
|
376
|
+
)
|
|
377
|
+
return new_file
|
|
378
|
+
else: # Because these fields are optional, need to check for None. We shouldn't ever run into this.
|
|
379
|
+
raise ValueError("Unsupported file handle type or missing timestamp information.")
|
|
380
|
+
|
|
381
|
+
|
|
382
|
+
def clone_dataset(source_dataset: Dataset, destination_client: NominalClient) -> Dataset:
|
|
383
|
+
"""Clones a dataset, maintaining all properties and files.
|
|
384
|
+
|
|
385
|
+
Args:
|
|
386
|
+
source_dataset (Dataset): The dataset to copy from.
|
|
387
|
+
destination_client (NominalClient): The destination client.
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
The cloned dataset.
|
|
391
|
+
"""
|
|
392
|
+
return copy_dataset_from(source_dataset=source_dataset, destination_client=destination_client, include_files=True)
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
def copy_dataset_from(
|
|
396
|
+
source_dataset: Dataset,
|
|
397
|
+
destination_client: NominalClient,
|
|
398
|
+
*,
|
|
399
|
+
new_dataset_name: str | None = None,
|
|
400
|
+
new_dataset_description: str | None = None,
|
|
401
|
+
new_dataset_properties: dict[str, Any] | None = None,
|
|
402
|
+
new_dataset_labels: Sequence[str] | None = None,
|
|
403
|
+
include_files: bool = False,
|
|
404
|
+
) -> Dataset:
|
|
405
|
+
"""Copy a dataset from the source to the destination client.
|
|
406
|
+
|
|
407
|
+
Args:
|
|
408
|
+
source_dataset: The source Dataset to copy.
|
|
409
|
+
destination_client: The NominalClient to create the copied dataset in.
|
|
410
|
+
new_dataset_name: Optional new name for the copied dataset. If not provided, the original name is used.
|
|
411
|
+
new_dataset_description: Optional new description for the copied dataset.
|
|
412
|
+
If not provided, the original description is used.
|
|
413
|
+
new_dataset_properties: Optional new properties for the copied dataset. If not provided, the original
|
|
414
|
+
properties are used.
|
|
415
|
+
new_dataset_labels: Optional new labels for the copied dataset. If not provided, the original labels are used.
|
|
416
|
+
include_files: Whether to include files in the copied dataset.
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
The newly created Dataset in the destination client.
|
|
420
|
+
"""
|
|
421
|
+
log_extras = {"destination_client_workspace": destination_client.get_workspace().rid}
|
|
422
|
+
logger.debug(
|
|
423
|
+
"Copying dataset %s (rid: %s)",
|
|
424
|
+
source_dataset.name,
|
|
425
|
+
source_dataset.rid,
|
|
426
|
+
extra=log_extras,
|
|
427
|
+
)
|
|
428
|
+
new_dataset = destination_client.create_dataset(
|
|
429
|
+
name=new_dataset_name if new_dataset_name is not None else source_dataset.name,
|
|
430
|
+
description=new_dataset_description if new_dataset_description is not None else source_dataset.description,
|
|
431
|
+
properties=new_dataset_properties if new_dataset_properties is not None else source_dataset.properties,
|
|
432
|
+
labels=new_dataset_labels if new_dataset_labels is not None else source_dataset.labels,
|
|
433
|
+
)
|
|
434
|
+
if include_files:
|
|
435
|
+
for source_file in source_dataset.list_files():
|
|
436
|
+
copy_file_to_dataset(source_file, new_dataset)
|
|
437
|
+
logger.debug("New dataset created: %s (rid: %s)", new_dataset.name, new_dataset.rid, extra=log_extras)
|
|
438
|
+
return new_dataset
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
def clone_asset(
|
|
442
|
+
source_asset: Asset,
|
|
443
|
+
destination_client: NominalClient,
|
|
444
|
+
) -> Asset:
|
|
445
|
+
"""Clone an asset from the source to the target client.
|
|
446
|
+
|
|
447
|
+
Args:
|
|
448
|
+
source_asset: The source Asset to clone.
|
|
449
|
+
destination_client: The NominalClient to create the cloned asset in.
|
|
450
|
+
|
|
451
|
+
Returns:
|
|
452
|
+
The newly created Asset in the target client.
|
|
453
|
+
"""
|
|
454
|
+
return copy_asset_from(source_asset=source_asset, destination_client=destination_client, include_data=True)
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
def copy_asset_from(
|
|
458
|
+
source_asset: Asset,
|
|
459
|
+
destination_client: NominalClient,
|
|
460
|
+
*,
|
|
461
|
+
new_asset_name: str | None = None,
|
|
462
|
+
new_asset_description: str | None = None,
|
|
463
|
+
new_asset_properties: dict[str, Any] | None = None,
|
|
464
|
+
new_asset_labels: Sequence[str] | None = None,
|
|
465
|
+
include_data: bool = False,
|
|
466
|
+
) -> Asset:
|
|
467
|
+
"""Copy an asset from the source to the destination client.
|
|
468
|
+
|
|
469
|
+
Args:
|
|
470
|
+
source_asset: The source Asset to copy.
|
|
471
|
+
destination_client: The NominalClient to create the copied asset in.
|
|
472
|
+
new_asset_name: Optional new name for the copied asset. If not provided, the original name is used.
|
|
473
|
+
new_asset_description: Optional new description for the copied asset. If not provided, original description used
|
|
474
|
+
new_asset_properties: Optional new properties for the copied asset. If not provided, original properties used.
|
|
475
|
+
new_asset_labels: Optional new labels for the copied asset. If not provided, the original labels are used.
|
|
476
|
+
include_data: Whether to include data in the copied asset.
|
|
477
|
+
|
|
478
|
+
Returns:
|
|
479
|
+
The new asset created.
|
|
480
|
+
"""
|
|
481
|
+
log_extras = {"destination_client_workspace": destination_client.get_workspace().rid}
|
|
482
|
+
logger.debug("Copying asset %s (rid: %s)", source_asset.name, source_asset.rid, extra=log_extras)
|
|
483
|
+
new_asset = destination_client.create_asset(
|
|
484
|
+
name=new_asset_name if new_asset_name is not None else source_asset.name,
|
|
485
|
+
description=new_asset_description if new_asset_description is not None else source_asset.description,
|
|
486
|
+
properties=new_asset_properties if new_asset_properties is not None else source_asset.properties,
|
|
487
|
+
labels=new_asset_labels if new_asset_labels is not None else source_asset.labels,
|
|
488
|
+
)
|
|
489
|
+
if include_data:
|
|
490
|
+
source_datasets = source_asset.list_datasets()
|
|
491
|
+
new_datasets = []
|
|
492
|
+
for data_scope, source_dataset in source_datasets:
|
|
493
|
+
new_dataset = clone_dataset(
|
|
494
|
+
source_dataset=source_dataset,
|
|
495
|
+
destination_client=destination_client,
|
|
496
|
+
)
|
|
497
|
+
new_datasets.append(new_dataset)
|
|
498
|
+
new_asset.add_dataset(data_scope, new_dataset)
|
|
499
|
+
logger.debug("New asset created: %s (rid: %s)", new_asset, new_asset.rid, extra=log_extras)
|
|
500
|
+
return new_asset
|
|
501
|
+
|
|
502
|
+
|
|
503
|
+
def copy_resources_to_destination_client(
|
|
504
|
+
destination_client: NominalClient,
|
|
505
|
+
source_assets: Sequence[Asset],
|
|
506
|
+
source_workbook_templates: Sequence[WorkbookTemplate],
|
|
507
|
+
) -> tuple[Sequence[tuple[str, Dataset]], Sequence[Asset], Sequence[WorkbookTemplate], Sequence[Workbook]]:
|
|
508
|
+
"""Based on a list of assets and workbook templates, copy resources to destination client, creating
|
|
509
|
+
new datasets, datafiles, and workbooks along the way.
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
destination_client (NominalClient): client of the tenant/workspace to copy resources to.
|
|
513
|
+
source_assets (Sequence[Asset]): a list of assets to copy (with data)
|
|
514
|
+
source_workbook_templates (Sequence[WorkbookTemplate]): a list of workbook templates to clone
|
|
515
|
+
and create workbooks from.
|
|
516
|
+
|
|
517
|
+
Returns:
|
|
518
|
+
All of the created resources.
|
|
519
|
+
"""
|
|
520
|
+
log_extras = {
|
|
521
|
+
"destination_client_workspace": destination_client.get_workspace().rid,
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
if len(source_assets) != 1:
|
|
525
|
+
raise ValueError("Currently, only single asset can be used to create workbook from template")
|
|
526
|
+
|
|
527
|
+
new_assets = []
|
|
528
|
+
new_data_scopes_and_datasets: list[tuple[str, Dataset]] = []
|
|
529
|
+
for source_asset in source_assets:
|
|
530
|
+
new_asset = clone_asset(source_asset, destination_client)
|
|
531
|
+
new_assets.append(new_asset)
|
|
532
|
+
new_data_scopes_and_datasets.extend(new_asset.list_datasets())
|
|
533
|
+
new_templates = []
|
|
534
|
+
new_workbooks = []
|
|
535
|
+
|
|
536
|
+
for source_workbook_template in source_workbook_templates:
|
|
537
|
+
new_template = clone_workbook_template(source_workbook_template, destination_client)
|
|
538
|
+
new_templates.append(new_template)
|
|
539
|
+
new_workbook = new_template.create_workbook(
|
|
540
|
+
title=new_template.title, description=new_template.description, asset=new_assets[0]
|
|
541
|
+
)
|
|
542
|
+
logger.debug(
|
|
543
|
+
"Created new workbook %s (rid: %s) from template %s (rid: %s)",
|
|
544
|
+
new_workbook.title,
|
|
545
|
+
new_workbook.rid,
|
|
546
|
+
new_template.title,
|
|
547
|
+
new_template.rid,
|
|
548
|
+
extra=log_extras,
|
|
549
|
+
)
|
|
550
|
+
new_workbooks.append(new_workbook)
|
|
551
|
+
|
|
552
|
+
return (new_data_scopes_and_datasets, new_assets, new_templates, new_workbooks)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nominal
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.100.0
|
|
4
4
|
Summary: Automate Nominal workflows in Python
|
|
5
5
|
Project-URL: Homepage, https://nominal.io
|
|
6
6
|
Project-URL: Documentation, https://docs.nominal.io
|
|
@@ -20,7 +20,7 @@ Requires-Dist: cachetools>=6.1.0
|
|
|
20
20
|
Requires-Dist: click<9,>=8
|
|
21
21
|
Requires-Dist: conjure-python-client<4,>=3.1.0
|
|
22
22
|
Requires-Dist: ffmpeg-python>=0.2.0
|
|
23
|
-
Requires-Dist: nominal-api==0.
|
|
23
|
+
Requires-Dist: nominal-api==0.1032.0
|
|
24
24
|
Requires-Dist: nominal-streaming==0.5.8
|
|
25
25
|
Requires-Dist: openpyxl>=0.0.0
|
|
26
26
|
Requires-Dist: pandas>=0.0.0
|
|
@@ -30,6 +30,7 @@ Requires-Dist: pyyaml>=0.0.0
|
|
|
30
30
|
Requires-Dist: requests>=0.0.0
|
|
31
31
|
Requires-Dist: rich>=14.1.0
|
|
32
32
|
Requires-Dist: tabulate<0.10,>=0.9.0
|
|
33
|
+
Requires-Dist: truststore>=0.10.4
|
|
33
34
|
Requires-Dist: types-cachetools>=6.0.0.20250525
|
|
34
35
|
Requires-Dist: typing-extensions<5,>=4
|
|
35
36
|
Provides-Extra: protos
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
CHANGELOG.md,sha256=
|
|
1
|
+
CHANGELOG.md,sha256=jvj73GmZ1DOpXVyEcRVZ7Wm2ejFmH2UhBPO66Yt8wu0,83100
|
|
2
2
|
LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
3
3
|
README.md,sha256=KKe0dxh_pHXCtB7I9G4qWGQYvot_BZU8yW6MJyuyUHM,311
|
|
4
4
|
nominal/__init__.py,sha256=rbraORnXUrNn1hywLXM0XwSQCd9UmQt20PDYlsBalfE,2167
|
|
@@ -30,23 +30,23 @@ nominal/config/_config.py,sha256=yKq_H1iYJDoxRfLz2iXLbbVdoL0MTEY0FS4eVL12w0g,200
|
|
|
30
30
|
nominal/core/__init__.py,sha256=5eC2J0lzpV7JcuKDUimJCfgXuVL7HNgHrLhqxcy5NCc,2333
|
|
31
31
|
nominal/core/_clientsbunch.py,sha256=YwciugX7rQ9AOPHyvKuavG7b9SlX1PURRquP37nvLqE,8458
|
|
32
32
|
nominal/core/_constants.py,sha256=SrxgaSqAEB1MvTSrorgGam3eO29iCmRr6VIdajxX3gI,56
|
|
33
|
-
nominal/core/asset.py,sha256=
|
|
33
|
+
nominal/core/asset.py,sha256=vWi_5jNm1sBo4jCa4wTrL65IQ7b_lefZTLRsakoW7ro,18355
|
|
34
34
|
nominal/core/attachment.py,sha256=iJaDyF6JXsKxxBLA03I0WMmQF8U0bA-wRwvXMEhfWLU,4284
|
|
35
35
|
nominal/core/bounds.py,sha256=742BWmGL3FBryRAjoiJRg2N6aVinjYkQLxN7kfnJ40Q,581
|
|
36
36
|
nominal/core/channel.py,sha256=dbe8wpfMiWqHu98x66w6GOmC9Ro33Wv9AhBVx2DvtVk,18970
|
|
37
37
|
nominal/core/checklist.py,sha256=rO1RPDYV3o2miPKF7DcCiYpj6bUN-sdtZNhJkXzkfYE,7110
|
|
38
|
-
nominal/core/client.py,sha256=
|
|
38
|
+
nominal/core/client.py,sha256=zTaayeJf8IFA7BlNoVCaVpDA6cXIBaZGP934Tg6OhDI,67568
|
|
39
39
|
nominal/core/connection.py,sha256=ySbPN_a2takVa8wIU9mK4fB6vYLyZnN-qSmXVkLUxAY,5157
|
|
40
|
-
nominal/core/containerized_extractors.py,sha256=
|
|
40
|
+
nominal/core/containerized_extractors.py,sha256=fUz3-NHoNWYKqOCD15gLwGXDKVfdsW-x_kpXnkOI3BE,10224
|
|
41
41
|
nominal/core/data_review.py,sha256=bEnRsd8LI4x9YOBPcF2H3h5-e12A7Gh8gQfsNUAZmPQ,7922
|
|
42
|
-
nominal/core/dataset.py,sha256=
|
|
42
|
+
nominal/core/dataset.py,sha256=SUsn6qsbuceVLRYF47IquY_sW6OBRp4aExL1F3Bsaec,34802
|
|
43
43
|
nominal/core/dataset_file.py,sha256=oENANJ17A4K63cZ8Fr7lUm_kVPyA4fL2rUsZ3oXXk2U,16396
|
|
44
44
|
nominal/core/datasource.py,sha256=D9jHirAzUZ0pc3nW1XIURpw1UqQoA2E-nUUylZR1jbE,16707
|
|
45
45
|
nominal/core/event.py,sha256=D8qIX_dTjfSHN7jFW8vV-9htbQTaqk9VvRfK7t-sbbw,5891
|
|
46
46
|
nominal/core/exceptions.py,sha256=GUpwXRgdYamLl6684FE8ttCRHkBx6WEhOZ3NPE-ybD4,2671
|
|
47
|
-
nominal/core/filetype.py,sha256=
|
|
47
|
+
nominal/core/filetype.py,sha256=jAPe6F7pDT8ixsD2-Y8eJdHOxgimdEQte4RQybWwsos,5465
|
|
48
48
|
nominal/core/log.py,sha256=z3hI3CIEyMwpUSWjwBsJ6a3JNGzBbsmrVusSU6uI7CY,3885
|
|
49
|
-
nominal/core/run.py,sha256=
|
|
49
|
+
nominal/core/run.py,sha256=Rqy2o6sLE5RsAvvNnle7jRPJ-8UNfHmD-pdsRTOjA8Y,14792
|
|
50
50
|
nominal/core/secret.py,sha256=Ckq48m60i7rktxL9GY-nxHU5v8gHv9F1-JN7_MSf4bM,2863
|
|
51
51
|
nominal/core/unit.py,sha256=Wa-Bvu0hD-nzxVaQJSnn5YqAfnhUd2kWw2SswXnbMHY,3161
|
|
52
52
|
nominal/core/user.py,sha256=FV333TN4pQzcLh5b2CfxvBnnXyB1TrOP8Ppx1-XdaiE,481
|
|
@@ -63,9 +63,9 @@ nominal/core/_stream/write_stream_base.py,sha256=AxK3fAq3IBjNXZkxYFVXu3dGNWLCBhg
|
|
|
63
63
|
nominal/core/_utils/README.md,sha256=kWPQDc6kn-PjXFUsIH9u2nOA3RdGSXCOlxqeJSmUsPA,160
|
|
64
64
|
nominal/core/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
65
65
|
nominal/core/_utils/api_tools.py,sha256=Z--Et7NjpCH4if72WwGm45EyqyeqK2ambcBrtOSDMrY,2949
|
|
66
|
-
nominal/core/_utils/multipart.py,sha256=
|
|
67
|
-
nominal/core/_utils/multipart_downloader.py,sha256=
|
|
68
|
-
nominal/core/_utils/networking.py,sha256=
|
|
66
|
+
nominal/core/_utils/multipart.py,sha256=0dA2XcTHuOQIyS0139O8WZiCjwePaD1sYDUmTgmWG9w,10243
|
|
67
|
+
nominal/core/_utils/multipart_downloader.py,sha256=16OJEPqxCwOnfjptYdrlwQVuSUQYoe9_iiW60ZSjWos,13859
|
|
68
|
+
nominal/core/_utils/networking.py,sha256=n9ZqYtnpwPCjz9C-4eixsTkrhFh-DW6lknBJlHckHhg,8200
|
|
69
69
|
nominal/core/_utils/pagination_tools.py,sha256=cEBY1WiA1d3cWJEM0myYF_pX8JdQ_e-5asngVXrUc_Y,12152
|
|
70
70
|
nominal/core/_utils/query_tools.py,sha256=rabmhqUYw0POybZtGDoMyAwwXh4VMuYM6mMf-iAfWdc,15860
|
|
71
71
|
nominal/core/_utils/queueing.py,sha256=3qljc7dFI1UahlKjCaRVybM4poMCV5SayjyRPyXcPxg,3654
|
|
@@ -84,6 +84,8 @@ nominal/experimental/logging/__init__.py,sha256=9HrTkk_eyHv_w7EwqwOE1lf1Sa1Cm0cA
|
|
|
84
84
|
nominal/experimental/logging/click_log_handler.py,sha256=ANLf4IGgmh95V0kJlr756wQrjmQKp6DEpc9Cj7J5qHM,2331
|
|
85
85
|
nominal/experimental/logging/nominal_log_handler.py,sha256=hyTxyjsvFnE7vtyrDJpunAqADHmXekNWALwxXPIJGCk,5120
|
|
86
86
|
nominal/experimental/logging/rich_log_handler.py,sha256=8yz_VtxNgJg2oiesnXz2iXoBvQrUP5pAsYkxknOXgXA,1231
|
|
87
|
+
nominal/experimental/migration/__init__.py,sha256=E2IgWJLwJ5bN6jbl8k5nHECKFx5aT11jKAzVYcyXn3o,460
|
|
88
|
+
nominal/experimental/migration/migration_utils.py,sha256=j4In_sU_cWW1kScneMP2G8B7LHDcnY2YDE0fwIv8BiY,22831
|
|
87
89
|
nominal/experimental/rust_streaming/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
88
90
|
nominal/experimental/rust_streaming/rust_write_stream.py,sha256=UoJEujzRAPlsAq2c24fgrub3c0DO0HM10SGsIgZjPKk,1499
|
|
89
91
|
nominal/experimental/stream_v2/__init__.py,sha256=W39vK46pssx5sXvmsImMuJiEPs7iGtwrbYBI0bWnXCY,2313
|
|
@@ -102,8 +104,8 @@ nominal/thirdparty/polars/polars_export_handler.py,sha256=hGCSwXX9dC4MG01CmmjlTb
|
|
|
102
104
|
nominal/thirdparty/tdms/__init__.py,sha256=6n2ImFr2Wiil6JM1P5Q7Mpr0VzLcnDkmup_ftNpPq-s,142
|
|
103
105
|
nominal/thirdparty/tdms/_tdms.py,sha256=eiHFTUviyDPDClckNldjs_jTTSH_sdmboKDq0oIGChQ,8711
|
|
104
106
|
nominal/ts/__init__.py,sha256=hmd0ENvDhxRnzDKGLxIub6QG8LpcxCgcyAct029CaEs,21442
|
|
105
|
-
nominal-1.
|
|
106
|
-
nominal-1.
|
|
107
|
-
nominal-1.
|
|
108
|
-
nominal-1.
|
|
109
|
-
nominal-1.
|
|
107
|
+
nominal-1.100.0.dist-info/METADATA,sha256=v6eSXRLbr-cRBxvxBm3hsi5cfAQavQ2Zhng00SrS3xc,1981
|
|
108
|
+
nominal-1.100.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
109
|
+
nominal-1.100.0.dist-info/entry_points.txt,sha256=-mCLhxgg9R_lm5efT7vW9wuBH12izvY322R0a3TYxbE,66
|
|
110
|
+
nominal-1.100.0.dist-info/licenses/LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
111
|
+
nominal-1.100.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|