cognite-neat 0.98.0__py3-none-any.whl → 0.99.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/__init__.py +4 -0
- cognite/neat/_client/_api/data_modeling_loaders.py +585 -0
- cognite/neat/_client/_api/schema.py +111 -0
- cognite/neat/_client/_api_client.py +17 -0
- cognite/neat/_client/data_classes/__init__.py +0 -0
- cognite/neat/{_utils/cdf/data_classes.py → _client/data_classes/data_modeling.py} +8 -135
- cognite/neat/_client/data_classes/schema.py +495 -0
- cognite/neat/_constants.py +27 -4
- cognite/neat/_graph/_shared.py +14 -15
- cognite/neat/_graph/extractors/_classic_cdf/_assets.py +14 -154
- cognite/neat/_graph/extractors/_classic_cdf/_base.py +154 -7
- cognite/neat/_graph/extractors/_classic_cdf/_classic.py +25 -14
- cognite/neat/_graph/extractors/_classic_cdf/_data_sets.py +17 -92
- cognite/neat/_graph/extractors/_classic_cdf/_events.py +13 -162
- cognite/neat/_graph/extractors/_classic_cdf/_files.py +15 -179
- cognite/neat/_graph/extractors/_classic_cdf/_labels.py +32 -100
- cognite/neat/_graph/extractors/_classic_cdf/_relationships.py +27 -178
- cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +14 -139
- cognite/neat/_graph/extractors/_classic_cdf/_timeseries.py +15 -173
- cognite/neat/_graph/extractors/_rdf_file.py +6 -7
- cognite/neat/_graph/loaders/_rdf2dms.py +2 -2
- cognite/neat/_graph/queries/_base.py +17 -1
- cognite/neat/_graph/transformers/_classic_cdf.py +74 -147
- cognite/neat/_graph/transformers/_prune_graph.py +1 -1
- cognite/neat/_graph/transformers/_rdfpath.py +1 -1
- cognite/neat/_issues/_base.py +26 -17
- cognite/neat/_issues/errors/__init__.py +4 -2
- cognite/neat/_issues/errors/_external.py +7 -0
- cognite/neat/_issues/errors/_properties.py +2 -7
- cognite/neat/_issues/errors/_resources.py +1 -1
- cognite/neat/_issues/warnings/__init__.py +8 -0
- cognite/neat/_issues/warnings/_external.py +16 -0
- cognite/neat/_issues/warnings/_properties.py +16 -0
- cognite/neat/_issues/warnings/_resources.py +26 -2
- cognite/neat/_issues/warnings/user_modeling.py +4 -4
- cognite/neat/_rules/_constants.py +8 -11
- cognite/neat/_rules/analysis/_base.py +8 -4
- cognite/neat/_rules/exporters/_base.py +3 -4
- cognite/neat/_rules/exporters/_rules2dms.py +33 -46
- cognite/neat/_rules/importers/__init__.py +1 -3
- cognite/neat/_rules/importers/_base.py +1 -1
- cognite/neat/_rules/importers/_dms2rules.py +6 -29
- cognite/neat/_rules/importers/_rdf/__init__.py +5 -0
- cognite/neat/_rules/importers/_rdf/_base.py +34 -11
- cognite/neat/_rules/importers/_rdf/_imf2rules.py +91 -0
- cognite/neat/_rules/importers/_rdf/_inference2rules.py +43 -35
- cognite/neat/_rules/importers/_rdf/_owl2rules.py +80 -0
- cognite/neat/_rules/importers/_rdf/_shared.py +138 -441
- cognite/neat/_rules/models/__init__.py +1 -1
- cognite/neat/_rules/models/_base_rules.py +22 -12
- cognite/neat/_rules/models/dms/__init__.py +4 -2
- cognite/neat/_rules/models/dms/_exporter.py +45 -48
- cognite/neat/_rules/models/dms/_rules.py +20 -17
- cognite/neat/_rules/models/dms/_rules_input.py +52 -8
- cognite/neat/_rules/models/dms/_validation.py +391 -119
- cognite/neat/_rules/models/entities/_single_value.py +32 -4
- cognite/neat/_rules/models/information/__init__.py +2 -0
- cognite/neat/_rules/models/information/_rules.py +0 -67
- cognite/neat/_rules/models/information/_validation.py +9 -9
- cognite/neat/_rules/models/mapping/__init__.py +2 -3
- cognite/neat/_rules/models/mapping/_classic2core.py +36 -146
- cognite/neat/_rules/models/mapping/_classic2core.yaml +343 -0
- cognite/neat/_rules/transformers/__init__.py +2 -2
- cognite/neat/_rules/transformers/_converters.py +110 -11
- cognite/neat/_rules/transformers/_mapping.py +105 -30
- cognite/neat/_rules/transformers/_pipelines.py +1 -1
- cognite/neat/_rules/transformers/_verification.py +31 -3
- cognite/neat/_session/_base.py +24 -8
- cognite/neat/_session/_drop.py +35 -0
- cognite/neat/_session/_inspect.py +17 -5
- cognite/neat/_session/_mapping.py +39 -0
- cognite/neat/_session/_prepare.py +219 -23
- cognite/neat/_session/_read.py +49 -12
- cognite/neat/_session/_to.py +8 -5
- cognite/neat/_session/exceptions.py +4 -0
- cognite/neat/_store/_base.py +27 -24
- cognite/neat/_utils/rdf_.py +34 -5
- cognite/neat/_version.py +1 -1
- cognite/neat/_workflows/steps/lib/current/rules_exporter.py +5 -88
- cognite/neat/_workflows/steps/lib/current/rules_importer.py +3 -14
- cognite/neat/_workflows/steps/lib/current/rules_validator.py +6 -7
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/METADATA +3 -3
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/RECORD +87 -92
- cognite/neat/_rules/importers/_rdf/_imf2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +0 -86
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +0 -29
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +0 -130
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2rules.py +0 -154
- cognite/neat/_rules/importers/_rdf/_owl2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +0 -58
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +0 -65
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +0 -59
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2rules.py +0 -39
- cognite/neat/_rules/models/dms/_schema.py +0 -1101
- cognite/neat/_rules/models/mapping/_base.py +0 -131
- cognite/neat/_utils/cdf/loaders/__init__.py +0 -25
- cognite/neat/_utils/cdf/loaders/_base.py +0 -54
- cognite/neat/_utils/cdf/loaders/_data_modeling.py +0 -339
- cognite/neat/_utils/cdf/loaders/_ingestion.py +0 -167
- /cognite/neat/{_utils/cdf → _client/_api}/__init__.py +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/LICENSE +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/WHEEL +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/entry_points.txt +0 -0
|
@@ -1,131 +0,0 @@
|
|
|
1
|
-
from collections import Counter, defaultdict
|
|
2
|
-
from collections.abc import Iterator, MutableSequence, Sequence
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
from typing import Any, Generic, SupportsIndex, TypeVar, cast, get_args, overload
|
|
5
|
-
|
|
6
|
-
import pandas as pd
|
|
7
|
-
from pydantic import BaseModel, GetCoreSchemaHandler, field_validator
|
|
8
|
-
from pydantic_core import core_schema
|
|
9
|
-
from pydantic_core.core_schema import ValidationInfo
|
|
10
|
-
|
|
11
|
-
from cognite.neat._issues.errors import NeatValueError
|
|
12
|
-
from cognite.neat._rules.models._base_rules import ClassRef, PropertyRef
|
|
13
|
-
from cognite.neat._rules.models.entities import ClassEntity, Undefined
|
|
14
|
-
|
|
15
|
-
T_Mapping = TypeVar("T_Mapping", bound=ClassRef | PropertyRef)
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class Mapping(BaseModel, Generic[T_Mapping]):
|
|
19
|
-
source: T_Mapping
|
|
20
|
-
destination: T_Mapping
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
class MappingList(list, MutableSequence[Mapping[T_Mapping]]):
|
|
24
|
-
@classmethod
|
|
25
|
-
def __get_pydantic_core_schema__(cls, source: Any, handler: GetCoreSchemaHandler) -> core_schema.CoreSchema:
|
|
26
|
-
if args := get_args(source):
|
|
27
|
-
item_type = args[0]
|
|
28
|
-
else:
|
|
29
|
-
# Someone use SheetList without specifying the type
|
|
30
|
-
raise TypeError("SheetList must be used with a type argument, e.g., SheetList[InformationProperty]")
|
|
31
|
-
|
|
32
|
-
instance_schema = core_schema.is_instance_schema(cls)
|
|
33
|
-
sequence_row_schema = handler.generate_schema(Sequence[item_type]) # type: ignore[valid-type]
|
|
34
|
-
|
|
35
|
-
non_instance_schema = core_schema.no_info_after_validator_function(MappingList, sequence_row_schema)
|
|
36
|
-
return core_schema.union_schema([instance_schema, non_instance_schema])
|
|
37
|
-
|
|
38
|
-
def as_destination_by_source(self) -> dict[T_Mapping, T_Mapping]:
|
|
39
|
-
return {mapping.source: mapping.destination for mapping in self}
|
|
40
|
-
|
|
41
|
-
def to_pandas(self, drop_na_columns: bool = True, include: list[str] | None = None) -> pd.DataFrame:
|
|
42
|
-
"""Converts ResourceDict to pandas DataFrame."""
|
|
43
|
-
df = pd.DataFrame([entity.model_dump() for entity in self])
|
|
44
|
-
if drop_na_columns:
|
|
45
|
-
df = df.dropna(axis=1, how="all")
|
|
46
|
-
if include is not None:
|
|
47
|
-
df = df[include]
|
|
48
|
-
return df
|
|
49
|
-
|
|
50
|
-
def _repr_html_(self) -> str:
|
|
51
|
-
"""Returns HTML representation of ResourceDict."""
|
|
52
|
-
return self.to_pandas(drop_na_columns=True)._repr_html_() # type: ignore[operator]
|
|
53
|
-
|
|
54
|
-
# Implemented to get correct type hints
|
|
55
|
-
def __iter__(self) -> Iterator[Mapping[T_Mapping]]:
|
|
56
|
-
return super().__iter__()
|
|
57
|
-
|
|
58
|
-
@overload
|
|
59
|
-
def __getitem__(self, index: SupportsIndex) -> Mapping[T_Mapping]: ...
|
|
60
|
-
|
|
61
|
-
@overload
|
|
62
|
-
def __getitem__(self, index: slice) -> "MappingList[T_Mapping]": ...
|
|
63
|
-
|
|
64
|
-
def __getitem__(self, index: SupportsIndex | slice, /) -> "Mapping[T_Mapping] | MappingList[T_Mapping]":
|
|
65
|
-
if isinstance(index, slice):
|
|
66
|
-
return MappingList[T_Mapping](super().__getitem__(index))
|
|
67
|
-
return super().__getitem__(index)
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
class RuleMapping(BaseModel):
|
|
71
|
-
properties: MappingList[PropertyRef]
|
|
72
|
-
classes: MappingList[ClassRef]
|
|
73
|
-
|
|
74
|
-
@field_validator("properties", "classes", mode="before")
|
|
75
|
-
def as_mapping_list(cls, value: Sequence[Any], info: ValidationInfo) -> Any:
|
|
76
|
-
if isinstance(value, Sequence) and not isinstance(value, MappingList):
|
|
77
|
-
annotation = cast(type, cls.model_fields[info.field_name].annotation) # type: ignore[index]
|
|
78
|
-
ref_cls = get_args(annotation)[0]
|
|
79
|
-
return annotation([Mapping[ref_cls].model_validate(item) for item in value]) # type: ignore[valid-type, index]
|
|
80
|
-
return value
|
|
81
|
-
|
|
82
|
-
@classmethod
|
|
83
|
-
def load_spreadsheet(
|
|
84
|
-
cls, path: str | Path, source_prefix: str | None = None, destination_prefix: str | None = None
|
|
85
|
-
) -> "RuleMapping":
|
|
86
|
-
"""Loads mapping from Excel spreadsheet.
|
|
87
|
-
|
|
88
|
-
This method expects four columns in the spreadsheet. The first two columns are the source class and
|
|
89
|
-
property, and the last two columns are the destination class and property. The method will create
|
|
90
|
-
a mapping for each row in the spreadsheet.
|
|
91
|
-
|
|
92
|
-
The class mapping will be inferred from the property mappings. If a source class has multiple
|
|
93
|
-
destination classes, the most common destination class will be used.
|
|
94
|
-
|
|
95
|
-
Args:
|
|
96
|
-
path: Path to Excel spreadsheet.
|
|
97
|
-
source_prefix: Default prefix for source classes.
|
|
98
|
-
destination_prefix: Default prefix for destination classes.
|
|
99
|
-
|
|
100
|
-
Returns:
|
|
101
|
-
Mapping object.
|
|
102
|
-
|
|
103
|
-
"""
|
|
104
|
-
df = pd.read_excel(path).dropna(axis=1, how="all")
|
|
105
|
-
properties = MappingList[PropertyRef]()
|
|
106
|
-
destination_classes_by_source: dict[ClassEntity, Counter[ClassEntity]] = defaultdict(Counter)
|
|
107
|
-
for _, row in df.iterrows():
|
|
108
|
-
if len(row) < 4:
|
|
109
|
-
raise NeatValueError(f"Row {row} is not valid. Expected 4 columns, got {len(row)}")
|
|
110
|
-
|
|
111
|
-
if any(pd.isna(row.iloc[:4])):
|
|
112
|
-
continue
|
|
113
|
-
source_class, source_property, destination_class, destination_property = row.iloc[:4]
|
|
114
|
-
source_entity = ClassEntity.load(source_class, prefix=source_prefix or Undefined)
|
|
115
|
-
destination_entity = ClassEntity.load(destination_class, prefix=destination_prefix or Undefined)
|
|
116
|
-
properties.append(
|
|
117
|
-
Mapping(
|
|
118
|
-
source=PropertyRef(Class=source_entity, Property=source_property),
|
|
119
|
-
destination=PropertyRef(Class=destination_entity, Property=destination_property),
|
|
120
|
-
)
|
|
121
|
-
)
|
|
122
|
-
destination_classes_by_source[source_entity][destination_entity] += 1
|
|
123
|
-
|
|
124
|
-
classes = MappingList[ClassRef]()
|
|
125
|
-
for source_entity, destination_classes in destination_classes_by_source.items():
|
|
126
|
-
destination_entity = destination_classes.most_common(1)[0][0]
|
|
127
|
-
classes.append(
|
|
128
|
-
Mapping(source=ClassRef(Class=source_entity), destination=ClassRef(Class=destination_entity))
|
|
129
|
-
)
|
|
130
|
-
|
|
131
|
-
return cls(properties=properties, classes=classes)
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
from ._base import ResourceLoader
|
|
2
|
-
from ._data_modeling import (
|
|
3
|
-
ContainerLoader,
|
|
4
|
-
DataModelingLoader,
|
|
5
|
-
DataModelLoader,
|
|
6
|
-
SpaceLoader,
|
|
7
|
-
ViewLoader,
|
|
8
|
-
)
|
|
9
|
-
from ._ingestion import (
|
|
10
|
-
RawDatabaseLoader,
|
|
11
|
-
RawTableLoader,
|
|
12
|
-
TransformationLoader,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
__all__ = [
|
|
16
|
-
"DataModelingLoader",
|
|
17
|
-
"ContainerLoader",
|
|
18
|
-
"DataModelLoader",
|
|
19
|
-
"ResourceLoader",
|
|
20
|
-
"SpaceLoader",
|
|
21
|
-
"ViewLoader",
|
|
22
|
-
"TransformationLoader",
|
|
23
|
-
"RawTableLoader",
|
|
24
|
-
"RawDatabaseLoader",
|
|
25
|
-
]
|
|
@@ -1,54 +0,0 @@
|
|
|
1
|
-
from abc import ABC, abstractmethod
|
|
2
|
-
from collections.abc import Sequence
|
|
3
|
-
from typing import Generic, TypeVar
|
|
4
|
-
|
|
5
|
-
from cognite.client import CogniteClient
|
|
6
|
-
from cognite.client.data_classes._base import (
|
|
7
|
-
T_CogniteResourceList,
|
|
8
|
-
T_WritableCogniteResource,
|
|
9
|
-
T_WriteClass,
|
|
10
|
-
WriteableCogniteResourceList,
|
|
11
|
-
)
|
|
12
|
-
from cognite.client.utils.useful_types import SequenceNotStr
|
|
13
|
-
|
|
14
|
-
from cognite.neat._shared import T_ID
|
|
15
|
-
|
|
16
|
-
T_WritableCogniteResourceList = TypeVar("T_WritableCogniteResourceList", bound=WriteableCogniteResourceList)
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class ResourceLoader(
|
|
20
|
-
ABC,
|
|
21
|
-
Generic[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList],
|
|
22
|
-
):
|
|
23
|
-
resource_name: str
|
|
24
|
-
|
|
25
|
-
def __init__(self, client: CogniteClient) -> None:
|
|
26
|
-
self.client = client
|
|
27
|
-
|
|
28
|
-
@classmethod
|
|
29
|
-
@abstractmethod
|
|
30
|
-
def get_id(cls, item: T_WriteClass | T_WritableCogniteResource | dict | T_ID) -> T_ID:
|
|
31
|
-
raise NotImplementedError
|
|
32
|
-
|
|
33
|
-
@classmethod
|
|
34
|
-
def get_ids(cls, items: Sequence[T_WriteClass | T_WritableCogniteResource]) -> list[T_ID]:
|
|
35
|
-
return [cls.get_id(item) for item in items]
|
|
36
|
-
|
|
37
|
-
@abstractmethod
|
|
38
|
-
def create(self, items: Sequence[T_WriteClass]) -> T_WritableCogniteResourceList:
|
|
39
|
-
raise NotImplementedError
|
|
40
|
-
|
|
41
|
-
@abstractmethod
|
|
42
|
-
def retrieve(self, ids: SequenceNotStr[T_ID]) -> T_WritableCogniteResourceList:
|
|
43
|
-
raise NotImplementedError
|
|
44
|
-
|
|
45
|
-
@abstractmethod
|
|
46
|
-
def update(self, items: Sequence[T_WriteClass]) -> T_WritableCogniteResourceList:
|
|
47
|
-
raise NotImplementedError
|
|
48
|
-
|
|
49
|
-
@abstractmethod
|
|
50
|
-
def delete(self, ids: SequenceNotStr[T_ID]) -> list[T_ID]:
|
|
51
|
-
raise NotImplementedError
|
|
52
|
-
|
|
53
|
-
def are_equal(self, local: T_WriteClass, remote: T_WritableCogniteResource) -> bool:
|
|
54
|
-
return local == remote.as_write()
|
|
@@ -1,339 +0,0 @@
|
|
|
1
|
-
from collections.abc import Callable, Sequence
|
|
2
|
-
from graphlib import TopologicalSorter
|
|
3
|
-
from typing import Any, Literal, cast
|
|
4
|
-
|
|
5
|
-
from cognite.client import CogniteClient
|
|
6
|
-
from cognite.client.data_classes import filters
|
|
7
|
-
from cognite.client.data_classes._base import (
|
|
8
|
-
T_CogniteResourceList,
|
|
9
|
-
T_WritableCogniteResource,
|
|
10
|
-
T_WriteClass,
|
|
11
|
-
)
|
|
12
|
-
from cognite.client.data_classes.data_modeling import (
|
|
13
|
-
Container,
|
|
14
|
-
ContainerApply,
|
|
15
|
-
ContainerApplyList,
|
|
16
|
-
ContainerList,
|
|
17
|
-
DataModel,
|
|
18
|
-
DataModelApply,
|
|
19
|
-
DataModelApplyList,
|
|
20
|
-
DataModelList,
|
|
21
|
-
RequiresConstraint,
|
|
22
|
-
Space,
|
|
23
|
-
SpaceApply,
|
|
24
|
-
SpaceApplyList,
|
|
25
|
-
SpaceList,
|
|
26
|
-
View,
|
|
27
|
-
ViewApply,
|
|
28
|
-
ViewApplyList,
|
|
29
|
-
ViewList,
|
|
30
|
-
)
|
|
31
|
-
from cognite.client.data_classes.data_modeling.ids import (
|
|
32
|
-
ContainerId,
|
|
33
|
-
DataModelId,
|
|
34
|
-
NodeId,
|
|
35
|
-
ViewId,
|
|
36
|
-
)
|
|
37
|
-
from cognite.client.exceptions import CogniteAPIError
|
|
38
|
-
from cognite.client.utils.useful_types import SequenceNotStr
|
|
39
|
-
|
|
40
|
-
from ._base import T_ID, ResourceLoader, T_WritableCogniteResourceList
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
class DataModelingLoader(
|
|
44
|
-
ResourceLoader[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList]
|
|
45
|
-
):
|
|
46
|
-
@classmethod
|
|
47
|
-
def in_space(cls, item: T_WriteClass | T_WritableCogniteResource | T_ID, space: set[str]) -> bool:
|
|
48
|
-
if hasattr(item, "space"):
|
|
49
|
-
return item.space in space
|
|
50
|
-
raise ValueError(f"Item {item} does not have a space attribute")
|
|
51
|
-
|
|
52
|
-
def sort_by_dependencies(self, items: list[T_WriteClass]) -> list[T_WriteClass]:
|
|
53
|
-
return items
|
|
54
|
-
|
|
55
|
-
def _create_force(
|
|
56
|
-
self,
|
|
57
|
-
items: Sequence[T_WriteClass],
|
|
58
|
-
tried_force_deploy: set[T_ID],
|
|
59
|
-
create_method: Callable[[Sequence[T_WriteClass]], T_WritableCogniteResourceList],
|
|
60
|
-
) -> T_WritableCogniteResourceList:
|
|
61
|
-
try:
|
|
62
|
-
return create_method(items)
|
|
63
|
-
except CogniteAPIError as e:
|
|
64
|
-
failed_items = {failed.as_id() for failed in e.failed if hasattr(failed, "as_id")}
|
|
65
|
-
to_redeploy = [
|
|
66
|
-
item
|
|
67
|
-
for item in items
|
|
68
|
-
if item.as_id() in failed_items and item.as_id() not in tried_force_deploy # type: ignore[attr-defined]
|
|
69
|
-
]
|
|
70
|
-
if not to_redeploy:
|
|
71
|
-
# Avoid infinite loop
|
|
72
|
-
raise e
|
|
73
|
-
ids = [item.as_id() for item in to_redeploy] # type: ignore[attr-defined]
|
|
74
|
-
tried_force_deploy.update(ids)
|
|
75
|
-
self.delete(ids)
|
|
76
|
-
return self._create_force(to_redeploy, tried_force_deploy, create_method)
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
class SpaceLoader(DataModelingLoader[str, SpaceApply, Space, SpaceApplyList, SpaceList]):
|
|
80
|
-
resource_name = "spaces"
|
|
81
|
-
|
|
82
|
-
@classmethod
|
|
83
|
-
def get_id(cls, item: Space | SpaceApply | str | dict) -> str:
|
|
84
|
-
if isinstance(item, Space | SpaceApply):
|
|
85
|
-
return item.space
|
|
86
|
-
if isinstance(item, dict):
|
|
87
|
-
return item["space"]
|
|
88
|
-
return item
|
|
89
|
-
|
|
90
|
-
def create(self, items: Sequence[SpaceApply]) -> SpaceList:
|
|
91
|
-
return self.client.data_modeling.spaces.apply(items)
|
|
92
|
-
|
|
93
|
-
def retrieve(self, ids: SequenceNotStr[str]) -> SpaceList:
|
|
94
|
-
return self.client.data_modeling.spaces.retrieve(ids)
|
|
95
|
-
|
|
96
|
-
def update(self, items: Sequence[SpaceApply]) -> SpaceList:
|
|
97
|
-
return self.create(items)
|
|
98
|
-
|
|
99
|
-
def delete(self, ids: SequenceNotStr[str] | Sequence[Space | SpaceApply]) -> list[str]:
|
|
100
|
-
if all(isinstance(item, Space) for item in ids) or all(isinstance(item, SpaceApply) for item in ids):
|
|
101
|
-
ids = [cast(Space | SpaceApply, item).space for item in ids]
|
|
102
|
-
return self.client.data_modeling.spaces.delete(cast(SequenceNotStr[str], ids))
|
|
103
|
-
|
|
104
|
-
def clean(self, space: str) -> None:
|
|
105
|
-
"""Deletes all data in a space.
|
|
106
|
-
|
|
107
|
-
This means all nodes, edges, views, containers, and data models located in the given space.
|
|
108
|
-
|
|
109
|
-
Args:
|
|
110
|
-
client: Connected CogniteClient
|
|
111
|
-
space: The space to delete.
|
|
112
|
-
|
|
113
|
-
"""
|
|
114
|
-
edges = self.client.data_modeling.instances.list(
|
|
115
|
-
"edge", limit=-1, filter=filters.Equals(["edge", "space"], space)
|
|
116
|
-
)
|
|
117
|
-
if edges:
|
|
118
|
-
instances = self.client.data_modeling.instances.delete(edges=edges.as_ids())
|
|
119
|
-
print(f"Deleted {len(instances.edges)} edges")
|
|
120
|
-
nodes = self.client.data_modeling.instances.list(
|
|
121
|
-
"node", limit=-1, filter=filters.Equals(["node", "space"], space)
|
|
122
|
-
)
|
|
123
|
-
node_types = {NodeId(node.type.space, node.type.external_id) for node in nodes if node.type}
|
|
124
|
-
node_data = set(nodes.as_ids()) - node_types
|
|
125
|
-
if node_data:
|
|
126
|
-
instances = self.client.data_modeling.instances.delete(nodes=list(node_data))
|
|
127
|
-
print(f"Deleted {len(instances.nodes)} nodes")
|
|
128
|
-
if node_types:
|
|
129
|
-
instances = self.client.data_modeling.instances.delete(nodes=list(node_types))
|
|
130
|
-
print(f"Deleted {len(instances.nodes)} node types")
|
|
131
|
-
views = self.client.data_modeling.views.list(limit=-1, space=space)
|
|
132
|
-
if views:
|
|
133
|
-
deleted_views = self.client.data_modeling.views.delete(views.as_ids())
|
|
134
|
-
print(f"Deleted {len(deleted_views)} views")
|
|
135
|
-
containers = self.client.data_modeling.containers.list(limit=-1, space=space)
|
|
136
|
-
if containers:
|
|
137
|
-
deleted_containers = self.client.data_modeling.containers.delete(containers.as_ids())
|
|
138
|
-
print(f"Deleted {len(deleted_containers)} containers")
|
|
139
|
-
if data_models := self.client.data_modeling.data_models.list(limit=-1, space=space):
|
|
140
|
-
deleted_data_models = self.client.data_modeling.data_models.delete(data_models.as_ids())
|
|
141
|
-
print(f"Deleted {len(deleted_data_models)} data models")
|
|
142
|
-
deleted_space = self.client.data_modeling.spaces.delete(space)
|
|
143
|
-
print(f"Deleted space {deleted_space}")
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
class ViewLoader(DataModelingLoader[ViewId, ViewApply, View, ViewApplyList, ViewList]):
|
|
147
|
-
resource_name = "views"
|
|
148
|
-
|
|
149
|
-
def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "skip", "update", "force"] = "fail"):
|
|
150
|
-
super().__init__(client)
|
|
151
|
-
self.existing_handling = existing_handling
|
|
152
|
-
self._cache_view_by_id: dict[ViewId, View] = {}
|
|
153
|
-
self._tried_force_deploy: set[ViewId] = set()
|
|
154
|
-
|
|
155
|
-
@classmethod
|
|
156
|
-
def get_id(cls, item: View | ViewApply | ViewId | dict) -> ViewId:
|
|
157
|
-
if isinstance(item, View | ViewApply):
|
|
158
|
-
return item.as_id()
|
|
159
|
-
if isinstance(item, dict):
|
|
160
|
-
return ViewId.load(item)
|
|
161
|
-
return item
|
|
162
|
-
|
|
163
|
-
def create(self, items: Sequence[ViewApply]) -> ViewList:
|
|
164
|
-
if self.existing_handling == "force":
|
|
165
|
-
return self._create_force(items, self._tried_force_deploy, self.client.data_modeling.views.apply)
|
|
166
|
-
else:
|
|
167
|
-
return self.client.data_modeling.views.apply(items)
|
|
168
|
-
|
|
169
|
-
def retrieve(self, ids: SequenceNotStr[ViewId]) -> ViewList:
|
|
170
|
-
return self.client.data_modeling.views.retrieve(cast(Sequence, ids))
|
|
171
|
-
|
|
172
|
-
def update(self, items: Sequence[ViewApply]) -> ViewList:
|
|
173
|
-
return self.create(items)
|
|
174
|
-
|
|
175
|
-
def delete(self, ids: SequenceNotStr[ViewId]) -> list[ViewId]:
|
|
176
|
-
return self.client.data_modeling.views.delete(cast(Sequence, ids))
|
|
177
|
-
|
|
178
|
-
def _as_write_raw(self, view: View) -> dict[str, Any]:
|
|
179
|
-
dumped = view.as_write().dump()
|
|
180
|
-
if view.properties:
|
|
181
|
-
# All read version of views have all the properties of their parent views.
|
|
182
|
-
# We need to remove these properties to compare with the local view.
|
|
183
|
-
parents = self._retrieve_view_ancestors(view.implements or [], self._cache_view_by_id)
|
|
184
|
-
for parent in parents:
|
|
185
|
-
for prop_name in parent.properties.keys():
|
|
186
|
-
dumped["properties"].pop(prop_name, None)
|
|
187
|
-
|
|
188
|
-
if "properties" in dumped and not dumped["properties"]:
|
|
189
|
-
# All properties were removed, so we remove the properties key.
|
|
190
|
-
dumped.pop("properties", None)
|
|
191
|
-
return dumped
|
|
192
|
-
|
|
193
|
-
def are_equal(self, local: ViewApply, remote: View) -> bool:
|
|
194
|
-
local_dumped = local.dump()
|
|
195
|
-
if not remote.implements:
|
|
196
|
-
return local_dumped == remote.as_write().dump()
|
|
197
|
-
|
|
198
|
-
cdf_resource_dumped = self._as_write_raw(remote)
|
|
199
|
-
|
|
200
|
-
if "properties" in local_dumped and not local_dumped["properties"]:
|
|
201
|
-
# In case the local properties are set to an empty dict.
|
|
202
|
-
local_dumped.pop("properties", None)
|
|
203
|
-
|
|
204
|
-
return local_dumped == cdf_resource_dumped
|
|
205
|
-
|
|
206
|
-
def as_write(self, view: View) -> ViewApply:
|
|
207
|
-
return ViewApply.load(self._as_write_raw(view))
|
|
208
|
-
|
|
209
|
-
def retrieve_all_parents(self, views: list[ViewId]) -> list[View]:
|
|
210
|
-
return self._retrieve_view_ancestors(views, self._cache_view_by_id)
|
|
211
|
-
|
|
212
|
-
def _retrieve_view_ancestors(self, parents: list[ViewId], cache: dict[ViewId, View]) -> list[View]:
|
|
213
|
-
"""Retrieves all ancestors of a view.
|
|
214
|
-
|
|
215
|
-
This will mutate the cache passed in, and return a list of views that are the ancestors
|
|
216
|
-
of the views in the parents list.
|
|
217
|
-
|
|
218
|
-
Args:
|
|
219
|
-
parents: The parents of the view to retrieve all ancestors for
|
|
220
|
-
cache: The cache to store the views in
|
|
221
|
-
"""
|
|
222
|
-
parent_ids = parents.copy()
|
|
223
|
-
found: list[View] = []
|
|
224
|
-
found_ids: set[ViewId] = set()
|
|
225
|
-
while parent_ids:
|
|
226
|
-
to_lookup: set[ViewId] = set()
|
|
227
|
-
grand_parent_ids = []
|
|
228
|
-
for parent in parent_ids:
|
|
229
|
-
if parent in found_ids:
|
|
230
|
-
continue
|
|
231
|
-
elif parent in cache:
|
|
232
|
-
found.append(cache[parent])
|
|
233
|
-
grand_parent_ids.extend(cache[parent].implements or [])
|
|
234
|
-
else:
|
|
235
|
-
to_lookup.add(parent)
|
|
236
|
-
|
|
237
|
-
if to_lookup:
|
|
238
|
-
looked_up = self.client.data_modeling.views.retrieve(list(to_lookup))
|
|
239
|
-
cache.update({view.as_id(): view for view in looked_up})
|
|
240
|
-
found.extend(looked_up)
|
|
241
|
-
found_ids.update({view.as_id() for view in looked_up})
|
|
242
|
-
for view in looked_up:
|
|
243
|
-
grand_parent_ids.extend(view.implements or [])
|
|
244
|
-
|
|
245
|
-
parent_ids = grand_parent_ids
|
|
246
|
-
return found
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
class ContainerLoader(DataModelingLoader[ContainerId, ContainerApply, Container, ContainerApplyList, ContainerList]):
|
|
250
|
-
resource_name = "containers"
|
|
251
|
-
|
|
252
|
-
def __init__(self, client: CogniteClient, existing_handling: Literal["fail", "skip", "update", "force"] = "fail"):
|
|
253
|
-
super().__init__(client)
|
|
254
|
-
self.existing_handling = existing_handling
|
|
255
|
-
self._tried_force_deploy: set[ContainerId] = set()
|
|
256
|
-
|
|
257
|
-
@classmethod
|
|
258
|
-
def get_id(cls, item: Container | ContainerApply | ContainerId | dict) -> ContainerId:
|
|
259
|
-
if isinstance(item, Container | ContainerApply):
|
|
260
|
-
return item.as_id()
|
|
261
|
-
if isinstance(item, dict):
|
|
262
|
-
return ContainerId.load(item)
|
|
263
|
-
return item
|
|
264
|
-
|
|
265
|
-
def sort_by_dependencies(self, items: Sequence[ContainerApply]) -> list[ContainerApply]:
|
|
266
|
-
container_by_id = {container.as_id(): container for container in items}
|
|
267
|
-
container_dependencies = {
|
|
268
|
-
container.as_id(): {
|
|
269
|
-
const.require
|
|
270
|
-
for const in container.constraints.values()
|
|
271
|
-
if isinstance(const, RequiresConstraint) and const.require in container_by_id
|
|
272
|
-
}
|
|
273
|
-
for container in items
|
|
274
|
-
}
|
|
275
|
-
return [
|
|
276
|
-
container_by_id[container_id] for container_id in TopologicalSorter(container_dependencies).static_order()
|
|
277
|
-
]
|
|
278
|
-
|
|
279
|
-
def create(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
280
|
-
if self.existing_handling == "force":
|
|
281
|
-
return self._create_force(items, self._tried_force_deploy, self.client.data_modeling.containers.apply)
|
|
282
|
-
else:
|
|
283
|
-
return self.client.data_modeling.containers.apply(items)
|
|
284
|
-
|
|
285
|
-
def retrieve(self, ids: SequenceNotStr[ContainerId]) -> ContainerList:
|
|
286
|
-
return self.client.data_modeling.containers.retrieve(cast(Sequence, ids))
|
|
287
|
-
|
|
288
|
-
def update(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
289
|
-
return self.create(items)
|
|
290
|
-
|
|
291
|
-
def delete(self, ids: SequenceNotStr[ContainerId]) -> list[ContainerId]:
|
|
292
|
-
return self.client.data_modeling.containers.delete(cast(Sequence, ids))
|
|
293
|
-
|
|
294
|
-
def are_equal(self, local: ContainerApply, remote: Container) -> bool:
|
|
295
|
-
local_dumped = local.dump(camel_case=True)
|
|
296
|
-
if "usedFor" not in local_dumped:
|
|
297
|
-
# Setting used_for to "node" as it is the default value in the CDF.
|
|
298
|
-
local_dumped["usedFor"] = "node"
|
|
299
|
-
|
|
300
|
-
return local_dumped == remote.as_write().dump(camel_case=True)
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
class DataModelLoader(DataModelingLoader[DataModelId, DataModelApply, DataModel, DataModelApplyList, DataModelList]):
|
|
304
|
-
resource_name = "data_models"
|
|
305
|
-
|
|
306
|
-
@classmethod
|
|
307
|
-
def get_id(cls, item: DataModel | DataModelApply | DataModelId | dict) -> DataModelId:
|
|
308
|
-
if isinstance(item, DataModel | DataModelApply):
|
|
309
|
-
return item.as_id()
|
|
310
|
-
if isinstance(item, dict):
|
|
311
|
-
return DataModelId.load(item)
|
|
312
|
-
return item
|
|
313
|
-
|
|
314
|
-
def create(self, items: Sequence[DataModelApply]) -> DataModelList:
|
|
315
|
-
return self.client.data_modeling.data_models.apply(items)
|
|
316
|
-
|
|
317
|
-
def retrieve(self, ids: SequenceNotStr[DataModelId]) -> DataModelList:
|
|
318
|
-
return self.client.data_modeling.data_models.retrieve(cast(Sequence, ids))
|
|
319
|
-
|
|
320
|
-
def update(self, items: Sequence[DataModelApply]) -> DataModelList:
|
|
321
|
-
return self.create(items)
|
|
322
|
-
|
|
323
|
-
def delete(self, ids: SequenceNotStr[DataModelId]) -> list[DataModelId]:
|
|
324
|
-
return self.client.data_modeling.data_models.delete(cast(Sequence, ids))
|
|
325
|
-
|
|
326
|
-
def are_equal(self, local: DataModelApply, remote: DataModel) -> bool:
|
|
327
|
-
local_dumped = local.dump()
|
|
328
|
-
cdf_resource_dumped = remote.as_write().dump()
|
|
329
|
-
|
|
330
|
-
# Data models that have the same views, but in different order, are considered equal.
|
|
331
|
-
# We also account for whether views are given as IDs or View objects.
|
|
332
|
-
local_dumped["views"] = sorted(
|
|
333
|
-
(v if isinstance(v, ViewId) else v.as_id()).as_tuple() for v in local.views or []
|
|
334
|
-
)
|
|
335
|
-
cdf_resource_dumped["views"] = sorted(
|
|
336
|
-
(v if isinstance(v, ViewId) else v.as_id()).as_tuple() for v in remote.views or []
|
|
337
|
-
)
|
|
338
|
-
|
|
339
|
-
return local_dumped == cdf_resource_dumped
|