cognite-neat 0.98.0__py3-none-any.whl → 0.99.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/__init__.py +4 -0
- cognite/neat/_client/_api/data_modeling_loaders.py +585 -0
- cognite/neat/_client/_api/schema.py +111 -0
- cognite/neat/_client/_api_client.py +17 -0
- cognite/neat/_client/data_classes/__init__.py +0 -0
- cognite/neat/{_utils/cdf/data_classes.py → _client/data_classes/data_modeling.py} +8 -135
- cognite/neat/_client/data_classes/schema.py +495 -0
- cognite/neat/_constants.py +27 -4
- cognite/neat/_graph/_shared.py +14 -15
- cognite/neat/_graph/extractors/_classic_cdf/_assets.py +14 -154
- cognite/neat/_graph/extractors/_classic_cdf/_base.py +154 -7
- cognite/neat/_graph/extractors/_classic_cdf/_classic.py +25 -14
- cognite/neat/_graph/extractors/_classic_cdf/_data_sets.py +17 -92
- cognite/neat/_graph/extractors/_classic_cdf/_events.py +13 -162
- cognite/neat/_graph/extractors/_classic_cdf/_files.py +15 -179
- cognite/neat/_graph/extractors/_classic_cdf/_labels.py +32 -100
- cognite/neat/_graph/extractors/_classic_cdf/_relationships.py +27 -178
- cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +14 -139
- cognite/neat/_graph/extractors/_classic_cdf/_timeseries.py +15 -173
- cognite/neat/_graph/extractors/_rdf_file.py +6 -7
- cognite/neat/_graph/loaders/_rdf2dms.py +2 -2
- cognite/neat/_graph/queries/_base.py +17 -1
- cognite/neat/_graph/transformers/_classic_cdf.py +74 -147
- cognite/neat/_graph/transformers/_prune_graph.py +1 -1
- cognite/neat/_graph/transformers/_rdfpath.py +1 -1
- cognite/neat/_issues/_base.py +26 -17
- cognite/neat/_issues/errors/__init__.py +4 -2
- cognite/neat/_issues/errors/_external.py +7 -0
- cognite/neat/_issues/errors/_properties.py +2 -7
- cognite/neat/_issues/errors/_resources.py +1 -1
- cognite/neat/_issues/warnings/__init__.py +8 -0
- cognite/neat/_issues/warnings/_external.py +16 -0
- cognite/neat/_issues/warnings/_properties.py +16 -0
- cognite/neat/_issues/warnings/_resources.py +26 -2
- cognite/neat/_issues/warnings/user_modeling.py +4 -4
- cognite/neat/_rules/_constants.py +8 -11
- cognite/neat/_rules/analysis/_base.py +8 -4
- cognite/neat/_rules/exporters/_base.py +3 -4
- cognite/neat/_rules/exporters/_rules2dms.py +33 -46
- cognite/neat/_rules/importers/__init__.py +1 -3
- cognite/neat/_rules/importers/_base.py +1 -1
- cognite/neat/_rules/importers/_dms2rules.py +6 -29
- cognite/neat/_rules/importers/_rdf/__init__.py +5 -0
- cognite/neat/_rules/importers/_rdf/_base.py +34 -11
- cognite/neat/_rules/importers/_rdf/_imf2rules.py +91 -0
- cognite/neat/_rules/importers/_rdf/_inference2rules.py +43 -35
- cognite/neat/_rules/importers/_rdf/_owl2rules.py +80 -0
- cognite/neat/_rules/importers/_rdf/_shared.py +138 -441
- cognite/neat/_rules/models/__init__.py +1 -1
- cognite/neat/_rules/models/_base_rules.py +22 -12
- cognite/neat/_rules/models/dms/__init__.py +4 -2
- cognite/neat/_rules/models/dms/_exporter.py +45 -48
- cognite/neat/_rules/models/dms/_rules.py +20 -17
- cognite/neat/_rules/models/dms/_rules_input.py +52 -8
- cognite/neat/_rules/models/dms/_validation.py +391 -119
- cognite/neat/_rules/models/entities/_single_value.py +32 -4
- cognite/neat/_rules/models/information/__init__.py +2 -0
- cognite/neat/_rules/models/information/_rules.py +0 -67
- cognite/neat/_rules/models/information/_validation.py +9 -9
- cognite/neat/_rules/models/mapping/__init__.py +2 -3
- cognite/neat/_rules/models/mapping/_classic2core.py +36 -146
- cognite/neat/_rules/models/mapping/_classic2core.yaml +343 -0
- cognite/neat/_rules/transformers/__init__.py +2 -2
- cognite/neat/_rules/transformers/_converters.py +110 -11
- cognite/neat/_rules/transformers/_mapping.py +105 -30
- cognite/neat/_rules/transformers/_pipelines.py +1 -1
- cognite/neat/_rules/transformers/_verification.py +31 -3
- cognite/neat/_session/_base.py +24 -8
- cognite/neat/_session/_drop.py +35 -0
- cognite/neat/_session/_inspect.py +17 -5
- cognite/neat/_session/_mapping.py +39 -0
- cognite/neat/_session/_prepare.py +219 -23
- cognite/neat/_session/_read.py +49 -12
- cognite/neat/_session/_to.py +8 -5
- cognite/neat/_session/exceptions.py +4 -0
- cognite/neat/_store/_base.py +27 -24
- cognite/neat/_utils/rdf_.py +34 -5
- cognite/neat/_version.py +1 -1
- cognite/neat/_workflows/steps/lib/current/rules_exporter.py +5 -88
- cognite/neat/_workflows/steps/lib/current/rules_importer.py +3 -14
- cognite/neat/_workflows/steps/lib/current/rules_validator.py +6 -7
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/METADATA +3 -3
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/RECORD +87 -92
- cognite/neat/_rules/importers/_rdf/_imf2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +0 -86
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +0 -29
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +0 -130
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2rules.py +0 -154
- cognite/neat/_rules/importers/_rdf/_owl2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +0 -58
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +0 -65
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +0 -59
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2rules.py +0 -39
- cognite/neat/_rules/models/dms/_schema.py +0 -1101
- cognite/neat/_rules/models/mapping/_base.py +0 -131
- cognite/neat/_utils/cdf/loaders/__init__.py +0 -25
- cognite/neat/_utils/cdf/loaders/_base.py +0 -54
- cognite/neat/_utils/cdf/loaders/_data_modeling.py +0 -339
- cognite/neat/_utils/cdf/loaders/_ingestion.py +0 -167
- /cognite/neat/{_utils/cdf → _client/_api}/__init__.py +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/LICENSE +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/WHEEL +0 -0
- {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/entry_points.txt +0 -0
|
@@ -24,6 +24,30 @@ class ResourceNotFoundWarning(ResourceNeatWarning, Generic[T_Identifier, T_Refer
|
|
|
24
24
|
referred_type: str
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
@dataclass(unsafe_hash=True)
|
|
28
|
+
class ResourceNotDefinedWarning(ResourceNeatWarning, Generic[T_Identifier, T_ReferenceIdentifier]):
|
|
29
|
+
"""The {resource_type} {identifier} is not defined in the {location}"""
|
|
30
|
+
|
|
31
|
+
extra = "{column_name} {row_number} in {sheet_name}"
|
|
32
|
+
fix = "Define the {resource_type} {identifier} in {location}."
|
|
33
|
+
|
|
34
|
+
location: str
|
|
35
|
+
column_name: str | None = None
|
|
36
|
+
row_number: int | None = None
|
|
37
|
+
sheet_name: str | None = None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass(unsafe_hash=True)
|
|
41
|
+
class ResourceRedefinedWarning(ResourceNeatWarning, Generic[T_Identifier, T_ReferenceIdentifier]):
|
|
42
|
+
"""The {resource_type} {identifier} feature {feature} is being redefine from {current_value} to {new_value}.
|
|
43
|
+
This will be ignored."""
|
|
44
|
+
|
|
45
|
+
fix = "Avoid redefinition {resource_type} features"
|
|
46
|
+
feature: str
|
|
47
|
+
current_value: str
|
|
48
|
+
new_value: str
|
|
49
|
+
|
|
50
|
+
|
|
27
51
|
@dataclass(unsafe_hash=True)
|
|
28
52
|
class ResourcesDuplicatedWarning(NeatWarning, Generic[T_Identifier]):
|
|
29
53
|
"""Duplicated {resource_type} with identifiers {resources} were found. {default_action}"""
|
|
@@ -37,12 +61,12 @@ class ResourcesDuplicatedWarning(NeatWarning, Generic[T_Identifier]):
|
|
|
37
61
|
|
|
38
62
|
@dataclass(unsafe_hash=True)
|
|
39
63
|
class ResourceRetrievalWarning(NeatWarning, Generic[T_Identifier]):
|
|
40
|
-
"""Failed to retrieve {resource_type} with
|
|
64
|
+
"""Failed to retrieve {resource_type} with identifier(s) {resources}. Continuing without
|
|
41
65
|
these resources."""
|
|
42
66
|
|
|
43
67
|
extra = "The error was: {error}"
|
|
44
68
|
|
|
45
|
-
fix = "Check the error."
|
|
69
|
+
fix = "Check the error and fix accordingly."
|
|
46
70
|
|
|
47
71
|
resources: frozenset[T_Identifier]
|
|
48
72
|
resource_type: ResourceType
|
|
@@ -17,7 +17,7 @@ __all__ = [
|
|
|
17
17
|
"HasDataFilterOnNoPropertiesViewWarning",
|
|
18
18
|
"NodeTypeFilterOnParentViewWarning",
|
|
19
19
|
"HasDataFilterOnViewWithReferencesWarning",
|
|
20
|
-
"
|
|
20
|
+
"ContainerPropertyLimitWarning",
|
|
21
21
|
"NotNeatSupportedFilterWarning",
|
|
22
22
|
"ParentInDifferentSpaceWarning",
|
|
23
23
|
]
|
|
@@ -89,15 +89,15 @@ class HasDataFilterOnViewWithReferencesWarning(UserModelingWarning):
|
|
|
89
89
|
|
|
90
90
|
|
|
91
91
|
@dataclass(unsafe_hash=True)
|
|
92
|
-
class
|
|
93
|
-
"""The number of properties in the {
|
|
92
|
+
class ContainerPropertyLimitWarning(UserModelingWarning):
|
|
93
|
+
"""The number of properties in the {container_id} view is {count} which
|
|
94
94
|
is more than the API limit {limit} properties.
|
|
95
95
|
This can lead to performance issues.
|
|
96
96
|
Reduce the number of properties in the view."""
|
|
97
97
|
|
|
98
98
|
fix = "Reduce the number of properties in the view"
|
|
99
99
|
|
|
100
|
-
|
|
100
|
+
container_id: ContainerId
|
|
101
101
|
count: int
|
|
102
102
|
limit: int = DMS_CONTAINER_PROPERTY_SIZE_LIMIT
|
|
103
103
|
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import re
|
|
2
2
|
import sys
|
|
3
3
|
from functools import cached_property
|
|
4
|
+
from typing import Literal
|
|
4
5
|
|
|
5
6
|
if sys.version_info >= (3, 11):
|
|
6
7
|
from enum import StrEnum
|
|
@@ -42,7 +43,7 @@ class EntityTypes(StrEnum):
|
|
|
42
43
|
space = "space"
|
|
43
44
|
|
|
44
45
|
|
|
45
|
-
def get_reserved_words() ->
|
|
46
|
+
def get_reserved_words(key: Literal["class", "view", "property", "space"]) -> list[str]:
|
|
46
47
|
return {
|
|
47
48
|
"class": ["Class", "class"],
|
|
48
49
|
"view": [
|
|
@@ -82,7 +83,7 @@ def get_reserved_words() -> dict[str, list[str]]:
|
|
|
82
83
|
"extensions",
|
|
83
84
|
],
|
|
84
85
|
"space": ["space", "cdf", "dms", "pg3", "shared", "system", "node", "edge"],
|
|
85
|
-
}
|
|
86
|
+
}[key]
|
|
86
87
|
|
|
87
88
|
|
|
88
89
|
ENTITY_PATTERN = re.compile(r"^(?P<prefix>.*?):?(?P<suffix>[^(:]*)(\((?P<content>.+)\))?$")
|
|
@@ -93,25 +94,21 @@ MORE_THAN_ONE_NONE_ALPHANUMERIC_REGEX = r"([_-]{2,})"
|
|
|
93
94
|
PREFIX_COMPLIANCE_REGEX = r"^([a-zA-Z]+)([a-zA-Z0-9]*[_-]{0,1}[a-zA-Z0-9_-]*)([a-zA-Z0-9]*)$"
|
|
94
95
|
|
|
95
96
|
SPACE_COMPLIANCE_REGEX = (
|
|
96
|
-
rf"(?!^({'|'.join(get_reserved_words(
|
|
97
|
+
rf"(?!^({'|'.join(get_reserved_words('space'))})$)" r"(^[a-zA-Z][a-zA-Z0-9_-]{0,41}[a-zA-Z0-9]?$)"
|
|
97
98
|
)
|
|
98
99
|
|
|
99
100
|
|
|
100
101
|
DATA_MODEL_COMPLIANCE_REGEX = r"^[a-zA-Z]([a-zA-Z0-9_]{0,253}[a-zA-Z0-9])?$"
|
|
101
102
|
|
|
102
103
|
VIEW_ID_COMPLIANCE_REGEX = (
|
|
103
|
-
rf"(?!^({'|'.join(get_reserved_words(
|
|
104
|
+
rf"(?!^({'|'.join(get_reserved_words('view'))})$)" r"(^[a-zA-Z][a-zA-Z0-9_]{0,253}[a-zA-Z0-9]?$)"
|
|
104
105
|
)
|
|
105
106
|
DMS_PROPERTY_ID_COMPLIANCE_REGEX = (
|
|
106
|
-
rf"(?!^({'|'.join(get_reserved_words(
|
|
107
|
-
)
|
|
108
|
-
CLASS_ID_COMPLIANCE_REGEX = (
|
|
109
|
-
rf"(?!^({'|'.join(get_reserved_words()['class'])})$)" r"(^[a-zA-Z][a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
107
|
+
rf"(?!^({'|'.join(get_reserved_words('property'))})$)" r"(^[a-zA-Z][a-zA-Z0-9_]{0,253}[a-zA-Z0-9]?$)"
|
|
110
108
|
)
|
|
109
|
+
CLASS_ID_COMPLIANCE_REGEX = rf"(?!^({'|'.join(get_reserved_words('class'))})$)" r"(^[a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
111
110
|
|
|
112
|
-
INFORMATION_PROPERTY_ID_COMPLIANCE_REGEX = (
|
|
113
|
-
r"^(\*)|(?!^(Property|property)$)(^[a-zA-Z][a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
114
|
-
)
|
|
111
|
+
INFORMATION_PROPERTY_ID_COMPLIANCE_REGEX = r"^(\*)|(?!^(Property|property)$)(^[a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
115
112
|
VERSION_COMPLIANCE_REGEX = r"^[a-zA-Z0-9]([.a-zA-Z0-9_-]{0,41}[a-zA-Z0-9])?$"
|
|
116
113
|
|
|
117
114
|
|
|
@@ -109,14 +109,14 @@ class BaseAnalysis(ABC, Generic[T_Rules, T_Class, T_Property, T_ClassEntity, T_P
|
|
|
109
109
|
raise NotImplementedError
|
|
110
110
|
|
|
111
111
|
# Todo Lru cache this method.
|
|
112
|
-
def class_parent_pairs(self) -> dict[T_ClassEntity, list[T_ClassEntity]]:
|
|
112
|
+
def class_parent_pairs(self, allow_different_space: bool = False) -> dict[T_ClassEntity, list[T_ClassEntity]]:
|
|
113
113
|
"""This only returns class - parent pairs only if parent is in the same data model"""
|
|
114
114
|
class_subclass_pairs: dict[T_ClassEntity, list[T_ClassEntity]] = {}
|
|
115
115
|
for cls_ in self._get_classes():
|
|
116
116
|
entity = self._get_cls_entity(cls_)
|
|
117
117
|
class_subclass_pairs[entity] = []
|
|
118
118
|
for parent in self._get_cls_parents(cls_) or []:
|
|
119
|
-
if parent.prefix == entity.prefix:
|
|
119
|
+
if parent.prefix == entity.prefix or allow_different_space:
|
|
120
120
|
class_subclass_pairs[entity].append(parent)
|
|
121
121
|
else:
|
|
122
122
|
warnings.warn(
|
|
@@ -126,11 +126,15 @@ class BaseAnalysis(ABC, Generic[T_Rules, T_Class, T_Property, T_ClassEntity, T_P
|
|
|
126
126
|
|
|
127
127
|
return class_subclass_pairs
|
|
128
128
|
|
|
129
|
-
def classes_with_properties(
|
|
129
|
+
def classes_with_properties(
|
|
130
|
+
self, consider_inheritance: bool = False, allow_different_namespace: bool = False
|
|
131
|
+
) -> dict[T_ClassEntity, list[T_Property]]:
|
|
130
132
|
"""Returns classes that have been defined in the data model.
|
|
131
133
|
|
|
132
134
|
Args:
|
|
133
135
|
consider_inheritance: Whether to consider inheritance or not. Defaults False
|
|
136
|
+
allow_different_namespace: When considering inheritance, whether to allow parents from
|
|
137
|
+
different namespaces or not. Defaults False
|
|
134
138
|
|
|
135
139
|
Returns:
|
|
136
140
|
Dictionary of classes with a list of properties defined for them
|
|
@@ -150,7 +154,7 @@ class BaseAnalysis(ABC, Generic[T_Rules, T_Class, T_Property, T_ClassEntity, T_P
|
|
|
150
154
|
class_property_pairs[self._get_cls_entity(property_)].append(property_) # type: ignore
|
|
151
155
|
|
|
152
156
|
if consider_inheritance:
|
|
153
|
-
class_parent_pairs = self.class_parent_pairs()
|
|
157
|
+
class_parent_pairs = self.class_parent_pairs(allow_different_namespace)
|
|
154
158
|
for class_ in class_parent_pairs:
|
|
155
159
|
self._add_inherited_properties(class_, class_property_pairs, class_parent_pairs)
|
|
156
160
|
|
|
@@ -3,8 +3,7 @@ from collections.abc import Iterable
|
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from typing import Generic, TypeVar
|
|
5
5
|
|
|
6
|
-
from cognite.
|
|
7
|
-
|
|
6
|
+
from cognite.neat._client import NeatClient
|
|
8
7
|
from cognite.neat._rules._shared import T_VerifiedRules
|
|
9
8
|
from cognite.neat._utils.auxiliary import class_html_doc
|
|
10
9
|
from cognite.neat._utils.upload import UploadResult, UploadResultList
|
|
@@ -32,11 +31,11 @@ class BaseExporter(ABC, Generic[T_VerifiedRules, T_Export]):
|
|
|
32
31
|
class CDFExporter(BaseExporter[T_VerifiedRules, T_Export]):
|
|
33
32
|
@abstractmethod
|
|
34
33
|
def export_to_cdf_iterable(
|
|
35
|
-
self, rules: T_VerifiedRules, client:
|
|
34
|
+
self, rules: T_VerifiedRules, client: NeatClient, dry_run: bool = False, fallback_one_by_one: bool = False
|
|
36
35
|
) -> Iterable[UploadResult]:
|
|
37
36
|
raise NotImplementedError
|
|
38
37
|
|
|
39
38
|
def export_to_cdf(
|
|
40
|
-
self, rules: T_VerifiedRules, client:
|
|
39
|
+
self, rules: T_VerifiedRules, client: NeatClient, dry_run: bool = False, fallback_one_by_one: bool = False
|
|
41
40
|
) -> UploadResultList:
|
|
42
41
|
return UploadResultList(self.export_to_cdf_iterable(rules, client, dry_run, fallback_one_by_one))
|
|
@@ -3,7 +3,6 @@ from collections.abc import Collection, Hashable, Iterable, Sequence
|
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from typing import Literal, TypeAlias, cast
|
|
5
5
|
|
|
6
|
-
from cognite.client import CogniteClient
|
|
7
6
|
from cognite.client.data_classes._base import CogniteResource, CogniteResourceList
|
|
8
7
|
from cognite.client.data_classes.data_modeling import (
|
|
9
8
|
ContainerApplyList,
|
|
@@ -15,23 +14,14 @@ from cognite.client.data_classes.data_modeling import (
|
|
|
15
14
|
)
|
|
16
15
|
from cognite.client.exceptions import CogniteAPIError
|
|
17
16
|
|
|
17
|
+
from cognite.neat._client import DataModelingLoader, NeatClient
|
|
18
|
+
from cognite.neat._client.data_classes.schema import DMSSchema
|
|
18
19
|
from cognite.neat._issues import IssueList
|
|
19
20
|
from cognite.neat._issues.warnings import (
|
|
20
21
|
PrincipleOneModelOneSpaceWarning,
|
|
21
22
|
ResourceRetrievalWarning,
|
|
22
23
|
)
|
|
23
|
-
from cognite.neat._rules.models.dms import DMSRules
|
|
24
|
-
from cognite.neat._utils.cdf.loaders import (
|
|
25
|
-
ContainerLoader,
|
|
26
|
-
DataModelingLoader,
|
|
27
|
-
DataModelLoader,
|
|
28
|
-
RawDatabaseLoader,
|
|
29
|
-
RawTableLoader,
|
|
30
|
-
ResourceLoader,
|
|
31
|
-
SpaceLoader,
|
|
32
|
-
TransformationLoader,
|
|
33
|
-
ViewLoader,
|
|
34
|
-
)
|
|
24
|
+
from cognite.neat._rules.models.dms import DMSRules
|
|
35
25
|
from cognite.neat._utils.upload import UploadResult
|
|
36
26
|
|
|
37
27
|
from ._base import CDFExporter
|
|
@@ -49,8 +39,6 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
49
39
|
If set, only export components in the given spaces. Defaults to None which means all spaces.
|
|
50
40
|
existing_handling (Literal["fail", "skip", "update", "force"], optional): How to handle existing components.
|
|
51
41
|
Defaults to "update". See below for details.
|
|
52
|
-
export_pipeline (bool, optional): Whether to export the pipeline. Defaults to False. This means setting
|
|
53
|
-
up transformations, RAW databases and tables to populate the data model.
|
|
54
42
|
instance_space (str, optional): The space to use for the instance. Defaults to None.
|
|
55
43
|
suppress_warnings (bool, optional): Suppress warnings. Defaults to False.
|
|
56
44
|
|
|
@@ -68,14 +56,12 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
68
56
|
export_components: Component | Collection[Component] = "all",
|
|
69
57
|
include_space: set[str] | None = None,
|
|
70
58
|
existing_handling: Literal["fail", "skip", "update", "force"] = "update",
|
|
71
|
-
export_pipeline: bool = False,
|
|
72
59
|
instance_space: str | None = None,
|
|
73
60
|
suppress_warnings: bool = False,
|
|
74
61
|
):
|
|
75
62
|
self.export_components = {export_components} if isinstance(export_components, str) else set(export_components)
|
|
76
63
|
self.include_space = include_space
|
|
77
64
|
self.existing_handling = existing_handling
|
|
78
|
-
self.export_pipeline = export_pipeline
|
|
79
65
|
self.instance_space = instance_space
|
|
80
66
|
self.suppress_warnings = suppress_warnings
|
|
81
67
|
self._schema: DMSSchema | None = None
|
|
@@ -116,17 +102,19 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
116
102
|
return exclude
|
|
117
103
|
|
|
118
104
|
def export(self, rules: DMSRules) -> DMSSchema:
|
|
119
|
-
|
|
105
|
+
# We do not want to include CogniteCore/CogniteProcess Inudstries in the schema
|
|
106
|
+
return rules.as_schema(instance_space=self.instance_space, remove_cdf_spaces=True)
|
|
120
107
|
|
|
121
108
|
def delete_from_cdf(
|
|
122
|
-
self, rules: DMSRules, client:
|
|
109
|
+
self, rules: DMSRules, client: NeatClient, dry_run: bool = False, skip_space: bool = False
|
|
123
110
|
) -> Iterable[UploadResult]:
|
|
124
|
-
to_export = self._prepare_exporters(rules
|
|
111
|
+
to_export = self._prepare_exporters(rules)
|
|
125
112
|
|
|
126
113
|
# we need to reverse order in which we are picking up the items to delete
|
|
127
114
|
# as they are sorted in the order of creation and we need to delete them in reverse order
|
|
128
|
-
for items
|
|
129
|
-
|
|
115
|
+
for items in reversed(to_export):
|
|
116
|
+
loader = client.loaders.get_loader(items)
|
|
117
|
+
if skip_space and isinstance(items, SpaceApplyList):
|
|
130
118
|
continue
|
|
131
119
|
item_ids = loader.get_ids(items)
|
|
132
120
|
existing_items = loader.retrieve(item_ids)
|
|
@@ -166,9 +154,9 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
166
154
|
)
|
|
167
155
|
|
|
168
156
|
def export_to_cdf_iterable(
|
|
169
|
-
self, rules: DMSRules, client:
|
|
157
|
+
self, rules: DMSRules, client: NeatClient, dry_run: bool = False, fallback_one_by_one: bool = False
|
|
170
158
|
) -> Iterable[UploadResult]:
|
|
171
|
-
to_export = self._prepare_exporters(rules
|
|
159
|
+
to_export = self._prepare_exporters(rules)
|
|
172
160
|
|
|
173
161
|
result_by_name = {}
|
|
174
162
|
if self.existing_handling == "force":
|
|
@@ -176,17 +164,18 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
176
164
|
result_by_name[delete_result.name] = delete_result
|
|
177
165
|
|
|
178
166
|
redeploy_data_model = False
|
|
179
|
-
for items
|
|
167
|
+
for items in to_export:
|
|
180
168
|
# The conversion from DMS to GraphQL does not seem to be triggered even if the views
|
|
181
169
|
# are changed. This is a workaround to force the conversion.
|
|
182
|
-
is_redeploying =
|
|
170
|
+
is_redeploying = isinstance(items, DataModelApplyList) and redeploy_data_model
|
|
171
|
+
loader = client.loaders.get_loader(items)
|
|
183
172
|
|
|
184
173
|
to_create, to_delete, to_update, unchanged = self._categorize_items_for_upload(
|
|
185
174
|
loader, items, is_redeploying
|
|
186
175
|
)
|
|
187
176
|
|
|
188
177
|
issue_list = IssueList()
|
|
189
|
-
warning_list = self._validate(loader, items)
|
|
178
|
+
warning_list = self._validate(loader, items, client)
|
|
190
179
|
issue_list.extend(warning_list)
|
|
191
180
|
|
|
192
181
|
created: set[Hashable] = set()
|
|
@@ -206,6 +195,8 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
206
195
|
failed_changed.update(loader.get_id(item) for item in to_update)
|
|
207
196
|
else:
|
|
208
197
|
raise ValueError(f"Unsupported existing_handling {self.existing_handling}")
|
|
198
|
+
created.update(loader.get_id(item) for item in to_create)
|
|
199
|
+
deleted.update(loader.get_id(item) for item in to_delete)
|
|
209
200
|
else:
|
|
210
201
|
if to_delete:
|
|
211
202
|
try:
|
|
@@ -226,7 +217,7 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
226
217
|
else:
|
|
227
218
|
deleted.update(loader.get_id(item) for item in to_delete)
|
|
228
219
|
|
|
229
|
-
if isinstance(
|
|
220
|
+
if isinstance(items, DataModelApplyList):
|
|
230
221
|
to_create = loader.sort_by_dependencies(to_create)
|
|
231
222
|
|
|
232
223
|
try:
|
|
@@ -292,11 +283,11 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
292
283
|
issues=issue_list,
|
|
293
284
|
)
|
|
294
285
|
|
|
295
|
-
if
|
|
286
|
+
if isinstance(items, ViewApplyList) and (created or changed):
|
|
296
287
|
redeploy_data_model = True
|
|
297
288
|
|
|
298
289
|
def _categorize_items_for_upload(
|
|
299
|
-
self, loader:
|
|
290
|
+
self, loader: DataModelingLoader, items: Sequence[CogniteResource], is_redeploying
|
|
300
291
|
) -> tuple[list[CogniteResource], list[CogniteResource], list[CogniteResource], list[CogniteResource]]:
|
|
301
292
|
item_ids = loader.get_ids(items)
|
|
302
293
|
cdf_items = loader.retrieve(item_ids)
|
|
@@ -304,7 +295,7 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
304
295
|
to_create, to_update, unchanged, to_delete = [], [], [], []
|
|
305
296
|
for item in items:
|
|
306
297
|
if (
|
|
307
|
-
isinstance(
|
|
298
|
+
isinstance(items, DataModelApplyList)
|
|
308
299
|
and self.include_space is not None
|
|
309
300
|
and not loader.in_space(item, self.include_space)
|
|
310
301
|
):
|
|
@@ -322,28 +313,24 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
322
313
|
to_update.append(item)
|
|
323
314
|
return to_create, to_delete, to_update, unchanged
|
|
324
315
|
|
|
325
|
-
def _prepare_exporters(self, rules
|
|
316
|
+
def _prepare_exporters(self, rules: DMSRules) -> list[CogniteResourceList]:
|
|
326
317
|
schema = self.export(rules)
|
|
327
|
-
to_export: list[
|
|
318
|
+
to_export: list[CogniteResourceList] = []
|
|
328
319
|
if self.export_components.intersection({"all", "spaces"}):
|
|
329
|
-
to_export.append(
|
|
320
|
+
to_export.append(SpaceApplyList(schema.spaces.values()))
|
|
330
321
|
if self.export_components.intersection({"all", "containers"}):
|
|
331
|
-
to_export.append(
|
|
322
|
+
to_export.append(ContainerApplyList(schema.containers.values()))
|
|
332
323
|
if self.export_components.intersection({"all", "views"}):
|
|
333
|
-
to_export.append(
|
|
324
|
+
to_export.append(ViewApplyList(schema.views.values()))
|
|
334
325
|
if self.export_components.intersection({"all", "data_models"}):
|
|
335
|
-
to_export.append(
|
|
336
|
-
if isinstance(schema, PipelineSchema):
|
|
337
|
-
to_export.append((schema.databases, RawDatabaseLoader(client)))
|
|
338
|
-
to_export.append((schema.raw_tables, RawTableLoader(client)))
|
|
339
|
-
to_export.append((schema.transformations, TransformationLoader(client)))
|
|
326
|
+
to_export.append(DataModelApplyList([schema.data_model]))
|
|
340
327
|
return to_export
|
|
341
328
|
|
|
342
|
-
def _validate(self, loader:
|
|
329
|
+
def _validate(self, loader: DataModelingLoader, items: CogniteResourceList, client: NeatClient) -> IssueList:
|
|
343
330
|
issue_list = IssueList()
|
|
344
|
-
if isinstance(
|
|
331
|
+
if isinstance(items, DataModelApplyList):
|
|
345
332
|
models = cast(list[DataModelApply], items)
|
|
346
|
-
if other_models := self._exist_other_data_models(
|
|
333
|
+
if other_models := self._exist_other_data_models(client, models):
|
|
347
334
|
warning = PrincipleOneModelOneSpaceWarning(
|
|
348
335
|
f"There are multiple data models in the same space {models[0].space}. "
|
|
349
336
|
f"Other data models in the space are {other_models}.",
|
|
@@ -355,13 +342,13 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
355
342
|
return issue_list
|
|
356
343
|
|
|
357
344
|
@classmethod
|
|
358
|
-
def _exist_other_data_models(cls,
|
|
345
|
+
def _exist_other_data_models(cls, client: NeatClient, models: list[DataModelApply]) -> list[DataModelId]:
|
|
359
346
|
if not models:
|
|
360
347
|
return []
|
|
361
348
|
space = models[0].space
|
|
362
349
|
external_id = models[0].external_id
|
|
363
350
|
try:
|
|
364
|
-
data_models =
|
|
351
|
+
data_models = client.data_modeling.data_models.list(space=space, limit=25, all_versions=False)
|
|
365
352
|
except CogniteAPIError as e:
|
|
366
353
|
warnings.warn(ResourceRetrievalWarning(frozenset({space}), "space", str(e)), stacklevel=2)
|
|
367
354
|
return []
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
from ._base import BaseImporter
|
|
2
2
|
from ._dms2rules import DMSImporter
|
|
3
3
|
from ._dtdl2rules import DTDLImporter
|
|
4
|
-
from ._rdf
|
|
5
|
-
from ._rdf._inference2rules import InferenceImporter
|
|
6
|
-
from ._rdf._owl2rules import OWLImporter
|
|
4
|
+
from ._rdf import IMFImporter, InferenceImporter, OWLImporter
|
|
7
5
|
from ._spreadsheet2rules import ExcelImporter, GoogleSheetImporter
|
|
8
6
|
from ._yaml2rules import YAMLImporter
|
|
9
7
|
|
|
@@ -85,7 +85,7 @@ def _handle_issues(
|
|
|
85
85
|
try:
|
|
86
86
|
yield future_result
|
|
87
87
|
except ValidationError as e:
|
|
88
|
-
issues.extend(error_cls.
|
|
88
|
+
issues.extend(error_cls.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
|
|
89
89
|
future_result._result = "failure"
|
|
90
90
|
else:
|
|
91
91
|
future_result._result = "success"
|
|
@@ -4,7 +4,6 @@ from datetime import datetime, timezone
|
|
|
4
4
|
from pathlib import Path
|
|
5
5
|
from typing import Literal, cast
|
|
6
6
|
|
|
7
|
-
from cognite.client import CogniteClient
|
|
8
7
|
from cognite.client import data_modeling as dm
|
|
9
8
|
from cognite.client.data_classes.data_modeling import DataModelId, DataModelIdentifier
|
|
10
9
|
from cognite.client.data_classes.data_modeling.containers import BTreeIndex, InvertedIndex
|
|
@@ -19,6 +18,7 @@ from cognite.client.data_classes.data_modeling.views import (
|
|
|
19
18
|
)
|
|
20
19
|
from cognite.client.utils import ms_to_datetime
|
|
21
20
|
|
|
21
|
+
from cognite.neat._client import NeatClient
|
|
22
22
|
from cognite.neat._issues import IssueList, NeatIssue
|
|
23
23
|
from cognite.neat._issues.errors import FileTypeUnexpectedError, ResourceMissingIdentifierError, ResourceRetrievalError
|
|
24
24
|
from cognite.neat._issues.warnings import (
|
|
@@ -80,9 +80,6 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
80
80
|
self.issue_list = IssueList(read_issues)
|
|
81
81
|
self._all_containers_by_id = schema.containers.copy()
|
|
82
82
|
self._all_views_by_id = schema.views.copy()
|
|
83
|
-
if schema.reference:
|
|
84
|
-
self._all_containers_by_id.update(schema.reference.containers.items())
|
|
85
|
-
self._all_views_by_id.update(schema.reference.views.items())
|
|
86
83
|
|
|
87
84
|
def update_referenced_containers(self, containers: Iterable[dm.ContainerApply]) -> None:
|
|
88
85
|
"""Update the referenced containers. This is useful to add Cognite containers identified after the root schema
|
|
@@ -95,9 +92,8 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
95
92
|
@classmethod
|
|
96
93
|
def from_data_model_id(
|
|
97
94
|
cls,
|
|
98
|
-
client:
|
|
95
|
+
client: NeatClient,
|
|
99
96
|
data_model_id: DataModelIdentifier,
|
|
100
|
-
reference_model_id: DataModelIdentifier | None = None,
|
|
101
97
|
) -> "DMSImporter":
|
|
102
98
|
"""Create a DMSImporter ready to convert the given data model to rules.
|
|
103
99
|
|
|
@@ -111,7 +107,7 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
111
107
|
DMSImporter: DMSImporter instance
|
|
112
108
|
"""
|
|
113
109
|
|
|
114
|
-
data_model_ids = [data_model_id
|
|
110
|
+
data_model_ids = [data_model_id]
|
|
115
111
|
data_models = client.data_modeling.data_models.retrieve(data_model_ids, inline_views=True)
|
|
116
112
|
|
|
117
113
|
user_models = cls._find_model_in_list(data_models, data_model_id)
|
|
@@ -128,34 +124,16 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
128
124
|
)
|
|
129
125
|
user_model = user_models.latest_version()
|
|
130
126
|
|
|
131
|
-
if reference_model_id:
|
|
132
|
-
ref_models = cls._find_model_in_list(data_models, reference_model_id)
|
|
133
|
-
if len(ref_models) == 0:
|
|
134
|
-
return cls(
|
|
135
|
-
DMSSchema(),
|
|
136
|
-
[
|
|
137
|
-
ResourceRetrievalError(
|
|
138
|
-
dm.DataModelId.load(reference_model_id),
|
|
139
|
-
"data model",
|
|
140
|
-
"Data Model is missing in CDF",
|
|
141
|
-
)
|
|
142
|
-
],
|
|
143
|
-
)
|
|
144
|
-
ref_model: dm.DataModel[dm.View] | None = ref_models.latest_version()
|
|
145
|
-
else:
|
|
146
|
-
ref_model = None
|
|
147
|
-
|
|
148
127
|
issue_list = IssueList()
|
|
149
128
|
with _handle_issues(issue_list) as result:
|
|
150
|
-
schema =
|
|
129
|
+
schema = NeatClient(client).schema.retrieve_data_model(user_model)
|
|
151
130
|
|
|
152
131
|
if result.result == "failure" or issue_list.has_errors:
|
|
153
132
|
return cls(DMSSchema(), issue_list)
|
|
154
133
|
|
|
155
|
-
metadata = cls._create_metadata_from_model(user_model
|
|
156
|
-
ref_metadata = cls._create_metadata_from_model(ref_model) if ref_model else None
|
|
134
|
+
metadata = cls._create_metadata_from_model(user_model)
|
|
157
135
|
|
|
158
|
-
return cls(schema, issue_list, metadata,
|
|
136
|
+
return cls(schema, issue_list, metadata, None)
|
|
159
137
|
|
|
160
138
|
@classmethod
|
|
161
139
|
def _find_model_in_list(
|
|
@@ -174,7 +152,6 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
174
152
|
def _create_metadata_from_model(
|
|
175
153
|
cls,
|
|
176
154
|
model: dm.DataModel[dm.View] | dm.DataModelApply,
|
|
177
|
-
has_reference: bool = False,
|
|
178
155
|
) -> DMSInputMetadata:
|
|
179
156
|
description, creator = DMSInputMetadata._get_description_and_creator(model.description)
|
|
180
157
|
|
|
@@ -1,13 +1,16 @@
|
|
|
1
|
+
from datetime import datetime
|
|
1
2
|
from pathlib import Path
|
|
2
3
|
|
|
3
4
|
from cognite.client import data_modeling as dm
|
|
4
|
-
from rdflib import
|
|
5
|
+
from rdflib import Graph, Namespace, URIRef
|
|
5
6
|
|
|
7
|
+
from cognite.neat._constants import get_default_prefixes
|
|
6
8
|
from cognite.neat._issues import IssueList
|
|
7
9
|
from cognite.neat._issues.errors import FileReadError
|
|
8
10
|
from cognite.neat._issues.errors._general import NeatValueError
|
|
9
11
|
from cognite.neat._rules._shared import ReadRules
|
|
10
12
|
from cognite.neat._rules.importers._base import BaseImporter
|
|
13
|
+
from cognite.neat._rules.models._base_rules import RoleTypes
|
|
11
14
|
from cognite.neat._rules.models.data_types import AnyURI
|
|
12
15
|
from cognite.neat._rules.models.entities import UnknownEntity
|
|
13
16
|
from cognite.neat._rules.models.information import (
|
|
@@ -30,6 +33,13 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
30
33
|
graph: Knowledge graph
|
|
31
34
|
data_model_id: Data model id to be used for the imported rules
|
|
32
35
|
space: CDF Space to be used for the imported rules
|
|
36
|
+
language: Language for description and human readable entity names
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
!!! note "Language"
|
|
41
|
+
Language is provided as ISO 639-1 code. If not provided, English will be used as default.
|
|
42
|
+
|
|
33
43
|
"""
|
|
34
44
|
|
|
35
45
|
def __init__(
|
|
@@ -39,6 +49,7 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
39
49
|
data_model_id: dm.DataModelId | tuple[str, str, str],
|
|
40
50
|
max_number_of_instance: int,
|
|
41
51
|
non_existing_node_type: UnknownEntity | AnyURI,
|
|
52
|
+
language: str,
|
|
42
53
|
) -> None:
|
|
43
54
|
self.issue_list = issue_list
|
|
44
55
|
self.graph = graph
|
|
@@ -48,6 +59,7 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
48
59
|
|
|
49
60
|
self.max_number_of_instance = max_number_of_instance
|
|
50
61
|
self.non_existing_node_type = non_existing_node_type
|
|
62
|
+
self.language = language
|
|
51
63
|
|
|
52
64
|
@classmethod
|
|
53
65
|
def from_graph_store(
|
|
@@ -56,6 +68,7 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
56
68
|
data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_RDF_DATA_MODEL_ID,
|
|
57
69
|
max_number_of_instance: int = -1,
|
|
58
70
|
non_existing_node_type: UnknownEntity | AnyURI = DEFAULT_NON_EXISTING_NODE_TYPE,
|
|
71
|
+
language: str = "en",
|
|
59
72
|
):
|
|
60
73
|
return cls(
|
|
61
74
|
IssueList(title=f"{cls.__name__} issues"),
|
|
@@ -63,6 +76,7 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
63
76
|
data_model_id=data_model_id,
|
|
64
77
|
max_number_of_instance=max_number_of_instance,
|
|
65
78
|
non_existing_node_type=non_existing_node_type,
|
|
79
|
+
language=language,
|
|
66
80
|
)
|
|
67
81
|
|
|
68
82
|
@classmethod
|
|
@@ -72,6 +86,7 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
72
86
|
data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_RDF_DATA_MODEL_ID,
|
|
73
87
|
max_number_of_instance: int = -1,
|
|
74
88
|
non_existing_node_type: UnknownEntity | AnyURI = DEFAULT_NON_EXISTING_NODE_TYPE,
|
|
89
|
+
language: str = "en",
|
|
75
90
|
):
|
|
76
91
|
issue_list = IssueList(title=f"{cls.__name__} issues")
|
|
77
92
|
|
|
@@ -82,15 +97,8 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
82
97
|
issue_list.append(FileReadError(filepath, str(e)))
|
|
83
98
|
|
|
84
99
|
# bind key namespaces
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
graph.bind("rdfs", RDFS)
|
|
88
|
-
graph.bind("dcterms", DCTERMS)
|
|
89
|
-
graph.bind("dc", DC)
|
|
90
|
-
graph.bind("skos", SKOS)
|
|
91
|
-
graph.bind("sh", SH)
|
|
92
|
-
graph.bind("xsd", XSD)
|
|
93
|
-
graph.bind("imf", "http://ns.imfid.org/imf#")
|
|
100
|
+
for prefix, namespace in get_default_prefixes().items():
|
|
101
|
+
graph.bind(prefix, namespace)
|
|
94
102
|
|
|
95
103
|
return cls(
|
|
96
104
|
issue_list,
|
|
@@ -98,6 +106,7 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
98
106
|
data_model_id=data_model_id,
|
|
99
107
|
max_number_of_instance=max_number_of_instance,
|
|
100
108
|
non_existing_node_type=non_existing_node_type,
|
|
109
|
+
language=language,
|
|
101
110
|
)
|
|
102
111
|
|
|
103
112
|
def to_rules(
|
|
@@ -129,4 +138,18 @@ class BaseRDFImporter(BaseImporter[InformationInputRules]):
|
|
|
129
138
|
prefixes: Dict of prefixes and namespaces
|
|
130
139
|
"""
|
|
131
140
|
if Namespace(get_namespace(URI)) not in prefixes.values():
|
|
132
|
-
prefixes[f"
|
|
141
|
+
prefixes[f"prefix_{len(prefixes)+1}"] = Namespace(get_namespace(URI))
|
|
142
|
+
|
|
143
|
+
@property
|
|
144
|
+
def _metadata(self) -> dict:
|
|
145
|
+
return {
|
|
146
|
+
"role": RoleTypes.information,
|
|
147
|
+
"space": self.data_model_id.space,
|
|
148
|
+
"external_id": self.data_model_id.external_id,
|
|
149
|
+
"version": self.data_model_id.version,
|
|
150
|
+
"created": datetime.now().replace(microsecond=0),
|
|
151
|
+
"updated": datetime.now().replace(microsecond=0),
|
|
152
|
+
"name": None,
|
|
153
|
+
"description": f"Data model imported using {type(self).__name__}",
|
|
154
|
+
"creator": "Neat",
|
|
155
|
+
}
|