nominal 1.104.3__py3-none-any.whl → 1.106.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- CHANGELOG.md +19 -0
- nominal/core/dataset.py +4 -0
- nominal/experimental/compute/_buckets.py +35 -4
- nominal/experimental/dataset_utils/__init__.py +5 -0
- nominal/experimental/dataset_utils/_dataset_utils.py +52 -0
- nominal/experimental/migration/migration_resources.py +18 -0
- nominal/experimental/migration/migration_utils.py +56 -33
- {nominal-1.104.3.dist-info → nominal-1.106.0.dist-info}/METADATA +2 -2
- {nominal-1.104.3.dist-info → nominal-1.106.0.dist-info}/RECORD +12 -9
- {nominal-1.104.3.dist-info → nominal-1.106.0.dist-info}/WHEEL +0 -0
- {nominal-1.104.3.dist-info → nominal-1.106.0.dist-info}/entry_points.txt +0 -0
- {nominal-1.104.3.dist-info → nominal-1.106.0.dist-info}/licenses/LICENSE +0 -0
CHANGELOG.md
CHANGED
|
@@ -1,5 +1,24 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## [1.106.0](https://github.com/nominal-io/nominal-client/compare/v1.105.0...v1.106.0) (2026-01-21)
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
### Features
|
|
7
|
+
|
|
8
|
+
* create dataset with uuid endpoint for mgiration ([#580](https://github.com/nominal-io/nominal-client/issues/580)) ([f3a3b38](https://github.com/nominal-io/nominal-client/commit/f3a3b383d62e8466af617179479918ef0d1ee534))
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
### Bug Fixes
|
|
12
|
+
|
|
13
|
+
* handle case where compute returns a non-bucketed response for a bucketed query ([#579](https://github.com/nominal-io/nominal-client/issues/579)) ([58b84f5](https://github.com/nominal-io/nominal-client/commit/58b84f55d282a84c8eb6703816b16920e3b8ecf3))
|
|
14
|
+
|
|
15
|
+
## [1.105.0](https://github.com/nominal-io/nominal-client/compare/v1.104.3...v1.105.0) (2026-01-21)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
### Features
|
|
19
|
+
|
|
20
|
+
* refactor top-level migration config to allow for mapping of templates to assets ([#577](https://github.com/nominal-io/nominal-client/issues/577)) ([9f1f39d](https://github.com/nominal-io/nominal-client/commit/9f1f39d721ea54135441e00b0a31c8fdd78b381c))
|
|
21
|
+
|
|
3
22
|
## [1.104.3](https://github.com/nominal-io/nominal-client/compare/v1.104.2...v1.104.3) (2026-01-13)
|
|
4
23
|
|
|
5
24
|
|
nominal/core/dataset.py
CHANGED
|
@@ -1093,6 +1093,7 @@ def _construct_new_ingest_options(
|
|
|
1093
1093
|
tag_columns=tag_columns,
|
|
1094
1094
|
is_archive=file_type.is_parquet_archive(),
|
|
1095
1095
|
additional_file_tags={**tags} if tags else None,
|
|
1096
|
+
exclude_columns=[],
|
|
1096
1097
|
)
|
|
1097
1098
|
)
|
|
1098
1099
|
else:
|
|
@@ -1107,6 +1108,7 @@ def _construct_new_ingest_options(
|
|
|
1107
1108
|
channel_prefix=channel_prefix,
|
|
1108
1109
|
tag_columns=tag_columns,
|
|
1109
1110
|
additional_file_tags={**tags} if tags else None,
|
|
1111
|
+
exclude_columns=[],
|
|
1110
1112
|
)
|
|
1111
1113
|
)
|
|
1112
1114
|
|
|
@@ -1139,6 +1141,7 @@ def _construct_existing_ingest_options(
|
|
|
1139
1141
|
tag_columns=tag_columns,
|
|
1140
1142
|
is_archive=file_type.is_parquet_archive(),
|
|
1141
1143
|
additional_file_tags={**tags} if tags else None,
|
|
1144
|
+
exclude_columns=[],
|
|
1142
1145
|
)
|
|
1143
1146
|
)
|
|
1144
1147
|
else:
|
|
@@ -1152,5 +1155,6 @@ def _construct_existing_ingest_options(
|
|
|
1152
1155
|
timestamp_metadata=timestamp_metadata,
|
|
1153
1156
|
tag_columns=tag_columns,
|
|
1154
1157
|
additional_file_tags={**tags} if tags else None,
|
|
1158
|
+
exclude_columns=[],
|
|
1155
1159
|
)
|
|
1156
1160
|
)
|
|
@@ -317,10 +317,41 @@ def _compute_buckets(
|
|
|
317
317
|
def _numeric_buckets_from_compute_response(
|
|
318
318
|
response: scout_compute_api.ComputeNodeResponse,
|
|
319
319
|
) -> Iterable[tuple[api.Timestamp, scout_compute_api.NumericBucket]]:
|
|
320
|
-
if response.
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
320
|
+
if response.numeric_point is not None:
|
|
321
|
+
# single point would be returned-- create a synthetic bucket
|
|
322
|
+
val = response.numeric_point.value
|
|
323
|
+
yield (
|
|
324
|
+
response.numeric_point.timestamp,
|
|
325
|
+
scout_compute_api.NumericBucket(
|
|
326
|
+
count=1,
|
|
327
|
+
first_point=response.numeric_point,
|
|
328
|
+
max=val,
|
|
329
|
+
mean=val,
|
|
330
|
+
min=val,
|
|
331
|
+
variance=0,
|
|
332
|
+
last_point=response.numeric_point,
|
|
333
|
+
),
|
|
334
|
+
)
|
|
335
|
+
elif response.numeric is not None:
|
|
336
|
+
# Not enough points to reach the number of requested bucket count, so
|
|
337
|
+
# gets returned as all of the raw data.
|
|
338
|
+
for timestamp, value in zip(response.numeric.timestamps, response.numeric.values):
|
|
339
|
+
point = scout_compute_api.NumericPoint(timestamp, value)
|
|
340
|
+
yield (
|
|
341
|
+
timestamp,
|
|
342
|
+
scout_compute_api.NumericBucket(
|
|
343
|
+
count=1,
|
|
344
|
+
first_point=point,
|
|
345
|
+
max=value,
|
|
346
|
+
min=value,
|
|
347
|
+
mean=value,
|
|
348
|
+
variance=0,
|
|
349
|
+
last_point=point,
|
|
350
|
+
),
|
|
351
|
+
)
|
|
352
|
+
elif response.bucketed_numeric is not None:
|
|
353
|
+
# Actually bucketed data
|
|
354
|
+
yield from zip(response.bucketed_numeric.timestamps, response.bucketed_numeric.buckets)
|
|
324
355
|
|
|
325
356
|
|
|
326
357
|
def _timestamp_from_conjure(timestamp: api.Timestamp) -> params.NanosecondsUTC:
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from collections.abc import Mapping, Sequence
|
|
2
|
+
|
|
3
|
+
from nominal_api import scout_catalog
|
|
4
|
+
|
|
5
|
+
from nominal.core import Dataset, NominalClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def create_dataset_with_uuid(
|
|
9
|
+
client: NominalClient,
|
|
10
|
+
dataset_uuid: str,
|
|
11
|
+
name: str,
|
|
12
|
+
*,
|
|
13
|
+
description: str | None = None,
|
|
14
|
+
labels: Sequence[str] = (),
|
|
15
|
+
properties: Mapping[str, str] | None = None,
|
|
16
|
+
) -> Dataset:
|
|
17
|
+
"""Create a dataset with a specific UUID.
|
|
18
|
+
|
|
19
|
+
This is useful for migrations where the dataset UUID must be controlled by the caller.
|
|
20
|
+
Throws a conflict error if a dataset with the specified UUID already exists.
|
|
21
|
+
|
|
22
|
+
This endpoint is not intended for general use. Use `NominalClient.create_dataset` instead
|
|
23
|
+
to create a new dataset with an auto-generated UUID.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
client: The NominalClient to use for creating the dataset.
|
|
27
|
+
dataset_uuid: The UUID to assign to the new dataset.
|
|
28
|
+
name: Name of the dataset to create.
|
|
29
|
+
description: Human readable description of the dataset.
|
|
30
|
+
labels: Text labels to apply to the created dataset.
|
|
31
|
+
properties: Key-value properties to apply to the created dataset.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Reference to the created dataset in Nominal.
|
|
35
|
+
"""
|
|
36
|
+
create_dataset_request = scout_catalog.CreateDataset(
|
|
37
|
+
name=name,
|
|
38
|
+
description=description,
|
|
39
|
+
labels=list(labels),
|
|
40
|
+
properties={} if properties is None else dict(properties),
|
|
41
|
+
is_v2_dataset=True,
|
|
42
|
+
metadata={},
|
|
43
|
+
origin_metadata=scout_catalog.DatasetOriginMetadata(),
|
|
44
|
+
workspace=client._clients.workspace_rid,
|
|
45
|
+
marking_rids=[],
|
|
46
|
+
)
|
|
47
|
+
request = scout_catalog.CreateDatasetWithUuidRequest(
|
|
48
|
+
create_dataset=create_dataset_request,
|
|
49
|
+
uuid=dataset_uuid,
|
|
50
|
+
)
|
|
51
|
+
response = client._clients.catalog.create_dataset_with_uuid(client._clients.auth_header, request)
|
|
52
|
+
return Dataset._from_conjure(client._clients, response)
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import Sequence
|
|
5
|
+
|
|
6
|
+
from nominal.core.asset import Asset
|
|
7
|
+
from nominal.core.workbook_template import WorkbookTemplate
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass(frozen=True)
|
|
11
|
+
class AssetResources:
|
|
12
|
+
asset: Asset
|
|
13
|
+
source_workbook_templates: Sequence[WorkbookTemplate]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(frozen=True)
|
|
17
|
+
class MigrationResources:
|
|
18
|
+
source_assets: Sequence[AssetResources]
|
|
@@ -26,6 +26,8 @@ from nominal.core._event_types import EventType, SearchEventOriginType
|
|
|
26
26
|
from nominal.core._utils.api_tools import Link, LinkDict
|
|
27
27
|
from nominal.core.attachment import Attachment
|
|
28
28
|
from nominal.core.run import Run
|
|
29
|
+
from nominal.experimental.dataset_utils import create_dataset_with_uuid
|
|
30
|
+
from nominal.experimental.migration.migration_resources import MigrationResources
|
|
29
31
|
from nominal.ts import (
|
|
30
32
|
IntegralNanosecondsDuration,
|
|
31
33
|
IntegralNanosecondsUTC,
|
|
@@ -421,6 +423,7 @@ def copy_dataset_from(
|
|
|
421
423
|
new_dataset_properties: dict[str, Any] | None = None,
|
|
422
424
|
new_dataset_labels: Sequence[str] | None = None,
|
|
423
425
|
include_files: bool = False,
|
|
426
|
+
preserve_uuid: bool = False,
|
|
424
427
|
) -> Dataset:
|
|
425
428
|
"""Copy a dataset from the source to the destination client.
|
|
426
429
|
|
|
@@ -434,6 +437,9 @@ def copy_dataset_from(
|
|
|
434
437
|
properties are used.
|
|
435
438
|
new_dataset_labels: Optional new labels for the copied dataset. If not provided, the original labels are used.
|
|
436
439
|
include_files: Whether to include files in the copied dataset.
|
|
440
|
+
preserve_uuid: If True, create the dataset with the same UUID as the source dataset.
|
|
441
|
+
This is useful for migrations where references to datasets must be preserved.
|
|
442
|
+
Throws a conflict error if a dataset with the UUID already exists.
|
|
437
443
|
|
|
438
444
|
Returns:
|
|
439
445
|
The newly created Dataset in the destination client.
|
|
@@ -447,12 +453,34 @@ def copy_dataset_from(
|
|
|
447
453
|
source_dataset.rid,
|
|
448
454
|
extra=log_extras,
|
|
449
455
|
)
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
+
|
|
457
|
+
dataset_name = new_dataset_name if new_dataset_name is not None else source_dataset.name
|
|
458
|
+
dataset_description = new_dataset_description if new_dataset_description is not None else source_dataset.description
|
|
459
|
+
dataset_properties = new_dataset_properties if new_dataset_properties is not None else source_dataset.properties
|
|
460
|
+
dataset_labels = new_dataset_labels if new_dataset_labels is not None else source_dataset.labels
|
|
461
|
+
|
|
462
|
+
if preserve_uuid:
|
|
463
|
+
# Extract the UUID from the source dataset's rid
|
|
464
|
+
match = UUID_PATTERN.search(source_dataset.rid)
|
|
465
|
+
if not match:
|
|
466
|
+
raise ValueError(f"Could not extract UUID from dataset rid: {source_dataset.rid}")
|
|
467
|
+
source_uuid = match.group(2)
|
|
468
|
+
new_dataset = create_dataset_with_uuid(
|
|
469
|
+
client=destination_client,
|
|
470
|
+
dataset_uuid=source_uuid,
|
|
471
|
+
name=dataset_name,
|
|
472
|
+
description=dataset_description,
|
|
473
|
+
labels=dataset_labels,
|
|
474
|
+
properties=dataset_properties,
|
|
475
|
+
)
|
|
476
|
+
else:
|
|
477
|
+
new_dataset = destination_client.create_dataset(
|
|
478
|
+
name=dataset_name,
|
|
479
|
+
description=dataset_description,
|
|
480
|
+
properties=dataset_properties,
|
|
481
|
+
labels=dataset_labels,
|
|
482
|
+
)
|
|
483
|
+
|
|
456
484
|
if include_files:
|
|
457
485
|
for source_file in source_dataset.list_files():
|
|
458
486
|
copy_file_to_dataset(source_file, new_dataset)
|
|
@@ -681,17 +709,14 @@ def copy_asset_from(
|
|
|
681
709
|
|
|
682
710
|
def copy_resources_to_destination_client(
|
|
683
711
|
destination_client: NominalClient,
|
|
684
|
-
|
|
685
|
-
source_workbook_templates: Sequence[WorkbookTemplate],
|
|
712
|
+
migration_resources: MigrationResources,
|
|
686
713
|
) -> tuple[Sequence[tuple[str, Dataset]], Sequence[Asset], Sequence[WorkbookTemplate], Sequence[Workbook]]:
|
|
687
714
|
"""Based on a list of assets and workbook templates, copy resources to destination client, creating
|
|
688
715
|
new datasets, datafiles, and workbooks along the way.
|
|
689
716
|
|
|
690
717
|
Args:
|
|
691
718
|
destination_client (NominalClient): client of the tenant/workspace to copy resources to.
|
|
692
|
-
|
|
693
|
-
source_workbook_templates (Sequence[WorkbookTemplate]): a list of workbook templates to clone
|
|
694
|
-
and create workbooks from.
|
|
719
|
+
migration_resources (MigrationResources): resources to copy.
|
|
695
720
|
|
|
696
721
|
Returns:
|
|
697
722
|
All of the created resources.
|
|
@@ -700,32 +725,30 @@ def copy_resources_to_destination_client(
|
|
|
700
725
|
"destination_client_workspace": destination_client.get_workspace(destination_client._clients.workspace_rid).rid,
|
|
701
726
|
}
|
|
702
727
|
|
|
703
|
-
if len(source_assets) != 1:
|
|
704
|
-
raise ValueError("Currently, only single asset can be used to create workbook from template")
|
|
705
|
-
|
|
706
728
|
new_assets = []
|
|
729
|
+
new_templates = []
|
|
730
|
+
new_workbooks = []
|
|
731
|
+
|
|
707
732
|
new_data_scopes_and_datasets: list[tuple[str, Dataset]] = []
|
|
708
|
-
for source_asset in source_assets:
|
|
709
|
-
new_asset = clone_asset(source_asset, destination_client)
|
|
733
|
+
for source_asset in migration_resources.source_assets:
|
|
734
|
+
new_asset = clone_asset(source_asset.asset, destination_client)
|
|
710
735
|
new_assets.append(new_asset)
|
|
711
736
|
new_data_scopes_and_datasets.extend(new_asset.list_datasets())
|
|
712
|
-
new_templates = []
|
|
713
|
-
new_workbooks = []
|
|
714
737
|
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
738
|
+
for source_workbook_template in source_asset.source_workbook_templates:
|
|
739
|
+
new_template = clone_workbook_template(source_workbook_template, destination_client)
|
|
740
|
+
new_templates.append(new_template)
|
|
741
|
+
new_workbook = new_template.create_workbook(
|
|
742
|
+
title=new_template.title, description=new_template.description, asset=new_assets[0]
|
|
743
|
+
)
|
|
744
|
+
logger.debug(
|
|
745
|
+
"Created new workbook %s (rid: %s) from template %s (rid: %s)",
|
|
746
|
+
new_workbook.title,
|
|
747
|
+
new_workbook.rid,
|
|
748
|
+
new_template.title,
|
|
749
|
+
new_template.rid,
|
|
750
|
+
extra=log_extras,
|
|
751
|
+
)
|
|
752
|
+
new_workbooks.append(new_workbook)
|
|
730
753
|
|
|
731
754
|
return (new_data_scopes_and_datasets, new_assets, new_templates, new_workbooks)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: nominal
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.106.0
|
|
4
4
|
Summary: Automate Nominal workflows in Python
|
|
5
5
|
Project-URL: Homepage, https://nominal.io
|
|
6
6
|
Project-URL: Documentation, https://docs.nominal.io
|
|
@@ -20,7 +20,7 @@ Requires-Dist: cachetools>=6.1.0
|
|
|
20
20
|
Requires-Dist: click<9,>=8
|
|
21
21
|
Requires-Dist: conjure-python-client<4,>=3.1.0
|
|
22
22
|
Requires-Dist: ffmpeg-python>=0.2.0
|
|
23
|
-
Requires-Dist: nominal-api==0.
|
|
23
|
+
Requires-Dist: nominal-api==0.1075.0
|
|
24
24
|
Requires-Dist: nominal-streaming==0.5.8; platform_python_implementation == 'CPython' and python_version >= '3.10' and ((sys_platform == 'win32' and platform_machine == 'AMD64') or (sys_platform == 'darwin' and platform_machine == 'arm64') or (sys_platform == 'linux' and (platform_machine == 'x86_64' or platform_machine == 'armv7l')))
|
|
25
25
|
Requires-Dist: openpyxl>=0.0.0
|
|
26
26
|
Requires-Dist: pandas>=0.0.0
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
CHANGELOG.md,sha256=
|
|
1
|
+
CHANGELOG.md,sha256=roEVqpYae9DRMQRYCiJQm7ei603DhmGY7_Il7PmaoDg,88865
|
|
2
2
|
LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
3
3
|
README.md,sha256=KKe0dxh_pHXCtB7I9G4qWGQYvot_BZU8yW6MJyuyUHM,311
|
|
4
4
|
nominal/__init__.py,sha256=rbraORnXUrNn1hywLXM0XwSQCd9UmQt20PDYlsBalfE,2167
|
|
@@ -41,7 +41,7 @@ nominal/core/client.py,sha256=Awt9WPkE-YXBfOwJMTL7Su8AZFJY3UMH7IKp5hI26YQ,68328
|
|
|
41
41
|
nominal/core/connection.py,sha256=LYllr3a1H2xp8-i4MaX1M7yK8X-HnwuIkciyK9XgLtQ,5175
|
|
42
42
|
nominal/core/containerized_extractors.py,sha256=fUz3-NHoNWYKqOCD15gLwGXDKVfdsW-x_kpXnkOI3BE,10224
|
|
43
43
|
nominal/core/data_review.py,sha256=Z_W1Okp_FSQDiVCk6aKb9gV0EXbE2jtiQaPqc6TaL0g,11038
|
|
44
|
-
nominal/core/dataset.py,sha256=
|
|
44
|
+
nominal/core/dataset.py,sha256=LqofzNAlOd3S_3Aaw6b7DoY50rj6GyMHbUClIA2TmpY,46792
|
|
45
45
|
nominal/core/dataset_file.py,sha256=8rCW6MO89MFbQ2NH0WtFWmJfRWeTxhmyuoGojuQQ4Qg,16545
|
|
46
46
|
nominal/core/datasource.py,sha256=V5UahbqsCNIdml978kOHiY6boIxKxbp76KscNBpN5xc,16934
|
|
47
47
|
nominal/core/event.py,sha256=8trZXyuAqRlKedgcqSgDIimXAAJBmEfDLyHkOOBwUC0,7762
|
|
@@ -75,19 +75,22 @@ nominal/exceptions/__init__.py,sha256=W2r_GWJkZQQ6t3HooFjGRdhIgJq3fBvRV7Yn6gseoO
|
|
|
75
75
|
nominal/experimental/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
76
76
|
nominal/experimental/compute/README.md,sha256=jdgqPSIVhMSTnY3qebtDdyd8SCi4o107EtKqui2L76A,4865
|
|
77
77
|
nominal/experimental/compute/__init__.py,sha256=NmjX_ZuCy5L0FYodpuFXpPLUyf1R8V-OitlEKLQvrAs,178
|
|
78
|
-
nominal/experimental/compute/_buckets.py,sha256=
|
|
78
|
+
nominal/experimental/compute/_buckets.py,sha256=W2RckTYEZSxT3fjCnjS1iUYSqRV8EzQEix5T1B0qr30,15523
|
|
79
79
|
nominal/experimental/compute/dsl/__init__.py,sha256=IDHrxnYLOy87zihq_HM1STtXPlxobzddvWbMOAQQh6A,502
|
|
80
80
|
nominal/experimental/compute/dsl/_enum_expr_impls.py,sha256=RtKenY8ClmSYZEmsBh2ttRyVv3OSVMyANvFgbUylX1I,6899
|
|
81
81
|
nominal/experimental/compute/dsl/_numeric_expr_impls.py,sha256=3EIMiysAUdwwrezr9aZ2vB4Z8xt-hHxeZ66h3lVLSV4,17083
|
|
82
82
|
nominal/experimental/compute/dsl/_range_expr_impls.py,sha256=VCcU8L8gfSLSBphi_B8JBcN6zDVskvjmgsa8waz1QEU,1741
|
|
83
83
|
nominal/experimental/compute/dsl/exprs.py,sha256=ppDbdblujuwN3vNkIqGwlTvbUmCnnjoOTCiHukpFGX8,10248
|
|
84
84
|
nominal/experimental/compute/dsl/params.py,sha256=xwFwDMGV1Ifi4pjQGLb0v0YU8jJ_XKatN1nIGU1sm0I,3295
|
|
85
|
+
nominal/experimental/dataset_utils/__init__.py,sha256=OpJWUldJ7p7sgFCE_jfosP9J6giUoktFBlVoUS9QwWE,134
|
|
86
|
+
nominal/experimental/dataset_utils/_dataset_utils.py,sha256=vk3xdfBxkYY0KFhx4THX8B2w-pATMQFnOQIZAZqD9Wg,1896
|
|
85
87
|
nominal/experimental/logging/__init__.py,sha256=9HrTkk_eyHv_w7EwqwOE1lf1Sa1Cm0cAf66beOsUfU0,322
|
|
86
88
|
nominal/experimental/logging/click_log_handler.py,sha256=ANLf4IGgmh95V0kJlr756wQrjmQKp6DEpc9Cj7J5qHM,2331
|
|
87
89
|
nominal/experimental/logging/nominal_log_handler.py,sha256=hyTxyjsvFnE7vtyrDJpunAqADHmXekNWALwxXPIJGCk,5120
|
|
88
90
|
nominal/experimental/logging/rich_log_handler.py,sha256=8yz_VtxNgJg2oiesnXz2iXoBvQrUP5pAsYkxknOXgXA,1231
|
|
89
91
|
nominal/experimental/migration/__init__.py,sha256=E2IgWJLwJ5bN6jbl8k5nHECKFx5aT11jKAzVYcyXn3o,460
|
|
90
|
-
nominal/experimental/migration/
|
|
92
|
+
nominal/experimental/migration/migration_resources.py,sha256=Tf_7kNBeSaY8z2fTF7DAxk-9q3a7F8xXFVvxI8tTc9c,415
|
|
93
|
+
nominal/experimental/migration/migration_utils.py,sha256=E3K2_kMsWIG9p2oIwxM64aXJLRemhsCbqfJ4Jv4SJ4M,31214
|
|
91
94
|
nominal/experimental/rust_streaming/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
92
95
|
nominal/experimental/rust_streaming/rust_write_stream.py,sha256=oQ6ixwm8ct8ZDc_qNB7AucDt8o5-_aBVlW2fFCQ_nmA,1541
|
|
93
96
|
nominal/experimental/stream_v2/__init__.py,sha256=W39vK46pssx5sXvmsImMuJiEPs7iGtwrbYBI0bWnXCY,2313
|
|
@@ -106,8 +109,8 @@ nominal/thirdparty/polars/polars_export_handler.py,sha256=hGCSwXX9dC4MG01CmmjlTb
|
|
|
106
109
|
nominal/thirdparty/tdms/__init__.py,sha256=6n2ImFr2Wiil6JM1P5Q7Mpr0VzLcnDkmup_ftNpPq-s,142
|
|
107
110
|
nominal/thirdparty/tdms/_tdms.py,sha256=m4gxbpxB9MTLi2FuYvGlbUGSyDAZKFxbM3ia2x1wIz0,8746
|
|
108
111
|
nominal/ts/__init__.py,sha256=hmd0ENvDhxRnzDKGLxIub6QG8LpcxCgcyAct029CaEs,21442
|
|
109
|
-
nominal-1.
|
|
110
|
-
nominal-1.
|
|
111
|
-
nominal-1.
|
|
112
|
-
nominal-1.
|
|
113
|
-
nominal-1.
|
|
112
|
+
nominal-1.106.0.dist-info/METADATA,sha256=cdf0KpzkFmrFb5XsWVEa6F4UNUr1WMp79uekN4hIkq8,2307
|
|
113
|
+
nominal-1.106.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
114
|
+
nominal-1.106.0.dist-info/entry_points.txt,sha256=-mCLhxgg9R_lm5efT7vW9wuBH12izvY322R0a3TYxbE,66
|
|
115
|
+
nominal-1.106.0.dist-info/licenses/LICENSE,sha256=zEGHG9mjDjaIS3I79O8mweQo-yiTbqx8jJvUPppVAwk,1067
|
|
116
|
+
nominal-1.106.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|