cognite-neat 0.88.1__py3-none-any.whl → 0.88.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite/neat/_version.py +1 -1
- cognite/neat/graph/__init__.py +0 -3
- cognite/neat/graph/loaders/_base.py +6 -6
- cognite/neat/graph/loaders/_rdf2asset.py +28 -31
- cognite/neat/graph/loaders/_rdf2dms.py +24 -15
- cognite/neat/issues/__init__.py +14 -0
- cognite/neat/issues/_base.py +415 -0
- cognite/neat/issues/errors/__init__.py +72 -0
- cognite/neat/issues/errors/_external.py +67 -0
- cognite/neat/issues/errors/_general.py +28 -0
- cognite/neat/issues/errors/_properties.py +62 -0
- cognite/neat/issues/errors/_resources.py +111 -0
- cognite/neat/issues/errors/_workflow.py +36 -0
- cognite/neat/{rules/issues → issues}/formatters.py +10 -10
- cognite/neat/issues/warnings/__init__.py +66 -0
- cognite/neat/issues/warnings/_external.py +40 -0
- cognite/neat/issues/warnings/_general.py +29 -0
- cognite/neat/issues/warnings/_models.py +92 -0
- cognite/neat/issues/warnings/_properties.py +44 -0
- cognite/neat/issues/warnings/_resources.py +55 -0
- cognite/neat/issues/warnings/user_modeling.py +113 -0
- cognite/neat/rules/_shared.py +10 -2
- cognite/neat/rules/exporters/_base.py +6 -6
- cognite/neat/rules/exporters/_rules2dms.py +19 -11
- cognite/neat/rules/exporters/_rules2excel.py +4 -4
- cognite/neat/rules/exporters/_rules2ontology.py +74 -51
- cognite/neat/rules/exporters/_rules2yaml.py +3 -3
- cognite/neat/rules/exporters/_validation.py +11 -96
- cognite/neat/rules/importers/__init__.py +7 -3
- cognite/neat/rules/importers/_base.py +9 -13
- cognite/neat/rules/importers/_dms2rules.py +42 -24
- cognite/neat/rules/importers/_dtdl2rules/dtdl_converter.py +49 -53
- cognite/neat/rules/importers/_dtdl2rules/dtdl_importer.py +31 -23
- cognite/neat/rules/importers/_dtdl2rules/spec.py +7 -0
- cognite/neat/rules/importers/_rdf/_imf2rules/__init__.py +3 -0
- cognite/neat/rules/importers/_rdf/_imf2rules/_imf2classes.py +82 -0
- cognite/neat/rules/importers/_rdf/_imf2rules/_imf2metadata.py +34 -0
- cognite/neat/rules/importers/_rdf/_imf2rules/_imf2properties.py +123 -0
- cognite/neat/rules/importers/{_owl2rules/_owl2rules.py → _rdf/_imf2rules/_imf2rules.py} +24 -18
- cognite/neat/rules/importers/{_inference2rules.py → _rdf/_inference2rules.py} +9 -9
- cognite/neat/rules/importers/_rdf/_owl2rules/_owl2classes.py +58 -0
- cognite/neat/rules/importers/_rdf/_owl2rules/_owl2metadata.py +68 -0
- cognite/neat/rules/importers/_rdf/_owl2rules/_owl2properties.py +60 -0
- cognite/neat/rules/importers/_rdf/_owl2rules/_owl2rules.py +76 -0
- cognite/neat/rules/importers/_rdf/_shared.py +586 -0
- cognite/neat/rules/importers/_spreadsheet2rules.py +35 -22
- cognite/neat/rules/importers/_yaml2rules.py +23 -21
- cognite/neat/rules/models/_constants.py +2 -1
- cognite/neat/rules/models/_rdfpath.py +4 -4
- cognite/neat/rules/models/_types/_field.py +9 -11
- cognite/neat/rules/models/asset/_rules.py +1 -3
- cognite/neat/rules/models/asset/_validation.py +14 -10
- cognite/neat/rules/models/dms/_converter.py +2 -4
- cognite/neat/rules/models/dms/_exporter.py +30 -8
- cognite/neat/rules/models/dms/_rules.py +23 -7
- cognite/neat/rules/models/dms/_schema.py +94 -62
- cognite/neat/rules/models/dms/_validation.py +105 -66
- cognite/neat/rules/models/entities.py +3 -0
- cognite/neat/rules/models/information/_converter.py +2 -2
- cognite/neat/rules/models/information/_rules.py +7 -8
- cognite/neat/rules/models/information/_validation.py +48 -25
- cognite/neat/rules/transformers/__init__.py +0 -0
- cognite/neat/rules/transformers/_base.py +15 -0
- cognite/neat/utils/auxiliary.py +2 -35
- cognite/neat/utils/text.py +17 -0
- cognite/neat/workflows/base.py +4 -4
- cognite/neat/workflows/cdf_store.py +3 -3
- cognite/neat/workflows/steps/data_contracts.py +1 -1
- cognite/neat/workflows/steps/lib/current/graph_extractor.py +3 -3
- cognite/neat/workflows/steps/lib/current/graph_loader.py +2 -2
- cognite/neat/workflows/steps/lib/current/graph_store.py +1 -1
- cognite/neat/workflows/steps/lib/current/rules_exporter.py +10 -10
- cognite/neat/workflows/steps/lib/current/rules_importer.py +78 -6
- cognite/neat/workflows/steps/lib/current/rules_validator.py +20 -9
- cognite/neat/workflows/steps/lib/io/io_steps.py +5 -5
- cognite/neat/workflows/steps_registry.py +4 -5
- {cognite_neat-0.88.1.dist-info → cognite_neat-0.88.3.dist-info}/METADATA +1 -1
- {cognite_neat-0.88.1.dist-info → cognite_neat-0.88.3.dist-info}/RECORD +86 -77
- cognite/neat/exceptions.py +0 -145
- cognite/neat/graph/exceptions.py +0 -90
- cognite/neat/graph/issues/loader.py +0 -104
- cognite/neat/issues.py +0 -158
- cognite/neat/rules/importers/_owl2rules/_owl2classes.py +0 -215
- cognite/neat/rules/importers/_owl2rules/_owl2metadata.py +0 -209
- cognite/neat/rules/importers/_owl2rules/_owl2properties.py +0 -203
- cognite/neat/rules/issues/__init__.py +0 -26
- cognite/neat/rules/issues/base.py +0 -82
- cognite/neat/rules/issues/dms.py +0 -683
- cognite/neat/rules/issues/fileread.py +0 -197
- cognite/neat/rules/issues/importing.py +0 -423
- cognite/neat/rules/issues/ontology.py +0 -298
- cognite/neat/rules/issues/spreadsheet.py +0 -563
- cognite/neat/rules/issues/spreadsheet_file.py +0 -151
- cognite/neat/rules/issues/tables.py +0 -72
- cognite/neat/workflows/_exceptions.py +0 -41
- /cognite/neat/{graph/issues → rules/importers/_rdf}/__init__.py +0 -0
- /cognite/neat/rules/importers/{_owl2rules → _rdf/_owl2rules}/__init__.py +0 -0
- /cognite/neat/{graph/stores → store}/__init__.py +0 -0
- /cognite/neat/{graph/stores → store}/_base.py +0 -0
- /cognite/neat/{graph/stores → store}/_provenance.py +0 -0
- {cognite_neat-0.88.1.dist-info → cognite_neat-0.88.3.dist-info}/LICENSE +0 -0
- {cognite_neat-0.88.1.dist-info → cognite_neat-0.88.3.dist-info}/WHEEL +0 -0
- {cognite_neat-0.88.1.dist-info → cognite_neat-0.88.3.dist-info}/entry_points.txt +0 -0
|
@@ -18,9 +18,14 @@ from cognite.client.data_classes.data_modeling.views import (
|
|
|
18
18
|
)
|
|
19
19
|
from cognite.client.utils import ms_to_datetime
|
|
20
20
|
|
|
21
|
-
from cognite.neat.
|
|
22
|
-
from cognite.neat.
|
|
23
|
-
from cognite.neat.
|
|
21
|
+
from cognite.neat.issues import IssueList, NeatIssue
|
|
22
|
+
from cognite.neat.issues.errors import FileTypeUnexpectedError, ResourceMissingIdentifierError, ResourceRetrievalError
|
|
23
|
+
from cognite.neat.issues.warnings import (
|
|
24
|
+
PropertyNotFoundWarning,
|
|
25
|
+
PropertyTypeNotSupportedWarning,
|
|
26
|
+
ResourceNotFoundWarning,
|
|
27
|
+
)
|
|
28
|
+
from cognite.neat.rules.importers._base import BaseImporter, VerifiedRules, _handle_issues
|
|
24
29
|
from cognite.neat.rules.models import (
|
|
25
30
|
DataModelType,
|
|
26
31
|
DMSRules,
|
|
@@ -60,7 +65,7 @@ class DMSImporter(BaseImporter):
|
|
|
60
65
|
def __init__(
|
|
61
66
|
self,
|
|
62
67
|
schema: DMSSchema,
|
|
63
|
-
read_issues: Sequence[
|
|
68
|
+
read_issues: Sequence[NeatIssue] | None = None,
|
|
64
69
|
metadata: DMSMetadata | None = None,
|
|
65
70
|
ref_metadata: DMSMetadata | None = None,
|
|
66
71
|
):
|
|
@@ -100,14 +105,28 @@ class DMSImporter(BaseImporter):
|
|
|
100
105
|
|
|
101
106
|
user_models = cls._find_model_in_list(data_models, data_model_id)
|
|
102
107
|
if len(user_models) == 0:
|
|
103
|
-
return cls(
|
|
108
|
+
return cls(
|
|
109
|
+
DMSSchema(),
|
|
110
|
+
[
|
|
111
|
+
ResourceRetrievalError(
|
|
112
|
+
dm.DataModelId.load(reference_model_id), # type: ignore[arg-type]
|
|
113
|
+
"data model",
|
|
114
|
+
"Data Model is missing in CDF",
|
|
115
|
+
)
|
|
116
|
+
],
|
|
117
|
+
)
|
|
104
118
|
user_model = user_models.latest_version()
|
|
105
119
|
|
|
106
120
|
if reference_model_id:
|
|
107
121
|
ref_models = cls._find_model_in_list(data_models, reference_model_id)
|
|
108
122
|
if len(ref_models) == 0:
|
|
109
123
|
return cls(
|
|
110
|
-
DMSSchema(),
|
|
124
|
+
DMSSchema(),
|
|
125
|
+
[
|
|
126
|
+
ResourceRetrievalError(
|
|
127
|
+
dm.DataModelId.load(reference_model_id), "data model", "Data Model is missing in CDF"
|
|
128
|
+
)
|
|
129
|
+
],
|
|
111
130
|
)
|
|
112
131
|
ref_model: dm.DataModel[dm.View] | None = ref_models.latest_version()
|
|
113
132
|
else:
|
|
@@ -178,29 +197,29 @@ class DMSImporter(BaseImporter):
|
|
|
178
197
|
@classmethod
|
|
179
198
|
def from_zip_file(cls, zip_file: str | Path) -> "DMSImporter":
|
|
180
199
|
if Path(zip_file).suffix != ".zip":
|
|
181
|
-
return cls(DMSSchema(), [
|
|
200
|
+
return cls(DMSSchema(), [FileTypeUnexpectedError(Path(zip_file), frozenset([".zip"]))])
|
|
182
201
|
issue_list = IssueList()
|
|
183
202
|
with _handle_issues(issue_list) as _:
|
|
184
203
|
schema = DMSSchema.from_zip(zip_file)
|
|
185
204
|
return cls(schema, issue_list)
|
|
186
205
|
|
|
187
206
|
@overload
|
|
188
|
-
def to_rules(self, errors: Literal["raise"], role: RoleTypes | None = None) ->
|
|
207
|
+
def to_rules(self, errors: Literal["raise"], role: RoleTypes | None = None) -> VerifiedRules: ...
|
|
189
208
|
|
|
190
209
|
@overload
|
|
191
210
|
def to_rules(
|
|
192
211
|
self, errors: Literal["continue"] = "continue", role: RoleTypes | None = None
|
|
193
|
-
) -> tuple[
|
|
212
|
+
) -> tuple[VerifiedRules | None, IssueList]: ...
|
|
194
213
|
|
|
195
214
|
def to_rules(
|
|
196
215
|
self, errors: Literal["raise", "continue"] = "continue", role: RoleTypes | None = None
|
|
197
|
-
) -> tuple[
|
|
216
|
+
) -> tuple[VerifiedRules | None, IssueList] | VerifiedRules:
|
|
198
217
|
if self.issue_list.has_errors:
|
|
199
218
|
# In case there were errors during the import, the to_rules method will return None
|
|
200
219
|
return self._return_or_raise(self.issue_list, errors)
|
|
201
220
|
|
|
202
221
|
if not self.root_schema.data_model:
|
|
203
|
-
self.issue_list.append(
|
|
222
|
+
self.issue_list.append(ResourceMissingIdentifierError("data model", type(self.root_schema).__name__))
|
|
204
223
|
return self._return_or_raise(self.issue_list, errors)
|
|
205
224
|
model = self.root_schema.data_model
|
|
206
225
|
with _handle_issues(
|
|
@@ -301,10 +320,11 @@ class DMSImporter(BaseImporter):
|
|
|
301
320
|
) -> DMSProperty | None:
|
|
302
321
|
if isinstance(prop, dm.MappedPropertyApply) and prop.container not in self._all_containers_by_id:
|
|
303
322
|
self.issue_list.append(
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
323
|
+
ResourceNotFoundWarning[dm.ContainerId, dm.PropertyId](
|
|
324
|
+
dm.ContainerId.load(prop.container),
|
|
325
|
+
"container",
|
|
326
|
+
view_entity.to_property_id(prop_id),
|
|
327
|
+
"view property",
|
|
308
328
|
)
|
|
309
329
|
)
|
|
310
330
|
return None
|
|
@@ -313,11 +333,7 @@ class DMSImporter(BaseImporter):
|
|
|
313
333
|
and prop.container_property_identifier not in self._all_containers_by_id[prop.container].properties
|
|
314
334
|
):
|
|
315
335
|
self.issue_list.append(
|
|
316
|
-
|
|
317
|
-
view_id=str(view_entity),
|
|
318
|
-
property_=prop_id,
|
|
319
|
-
container_id=str(ContainerEntity.from_id(prop.container)),
|
|
320
|
-
)
|
|
336
|
+
PropertyNotFoundWarning(prop.container, "container", prop_id, view_entity.as_id(), "view"),
|
|
321
337
|
)
|
|
322
338
|
return None
|
|
323
339
|
if not isinstance(
|
|
@@ -329,7 +345,7 @@ class DMSImporter(BaseImporter):
|
|
|
329
345
|
| MultiReverseDirectRelationApply,
|
|
330
346
|
):
|
|
331
347
|
self.issue_list.append(
|
|
332
|
-
|
|
348
|
+
PropertyTypeNotSupportedWarning[dm.ViewId](view_entity.as_id(), "view", prop_id, type(prop).__name__)
|
|
333
349
|
)
|
|
334
350
|
return None
|
|
335
351
|
|
|
@@ -394,7 +410,9 @@ class DMSImporter(BaseImporter):
|
|
|
394
410
|
else:
|
|
395
411
|
return DataType.load(container_prop.type._type)
|
|
396
412
|
else:
|
|
397
|
-
self.issue_list.append(
|
|
413
|
+
self.issue_list.append(
|
|
414
|
+
PropertyTypeNotSupportedWarning[dm.ViewId](view_entity.as_id(), "view", prop_id, type(prop).__name__)
|
|
415
|
+
)
|
|
398
416
|
return None
|
|
399
417
|
|
|
400
418
|
def _get_nullable(self, prop: ViewPropertyApply) -> bool | None:
|
|
@@ -453,8 +471,8 @@ class DMSImporter(BaseImporter):
|
|
|
453
471
|
continue
|
|
454
472
|
else:
|
|
455
473
|
self.issue_list.append(
|
|
456
|
-
|
|
457
|
-
|
|
474
|
+
PropertyTypeNotSupportedWarning[dm.ContainerId](
|
|
475
|
+
prop.container, "container", prop_id, type(constraint_obj).__name__
|
|
458
476
|
)
|
|
459
477
|
)
|
|
460
478
|
return unique_constraints or None
|
|
@@ -1,14 +1,20 @@
|
|
|
1
1
|
from collections import Counter
|
|
2
2
|
from collections.abc import Callable, Sequence
|
|
3
3
|
|
|
4
|
-
|
|
5
|
-
from cognite.neat.
|
|
4
|
+
from cognite.neat.issues import IssueList, NeatIssue
|
|
5
|
+
from cognite.neat.issues.errors import (
|
|
6
|
+
PropertyTypeNotSupportedError,
|
|
7
|
+
ResourceMissingIdentifierError,
|
|
8
|
+
ResourceNotFoundError,
|
|
9
|
+
)
|
|
10
|
+
from cognite.neat.issues.warnings import PropertyTypeNotSupportedWarning, ResourceTypeNotSupportedWarning
|
|
6
11
|
from cognite.neat.rules.importers._dtdl2rules.spec import (
|
|
7
12
|
DTMI,
|
|
8
13
|
Command,
|
|
9
14
|
CommandV2,
|
|
10
15
|
Component,
|
|
11
16
|
DTDLBase,
|
|
17
|
+
DTDLBaseWithName,
|
|
12
18
|
Enum,
|
|
13
19
|
Interface,
|
|
14
20
|
Object,
|
|
@@ -19,15 +25,14 @@ from cognite.neat.rules.importers._dtdl2rules.spec import (
|
|
|
19
25
|
Telemetry,
|
|
20
26
|
TelemetryV2,
|
|
21
27
|
)
|
|
22
|
-
from cognite.neat.rules.issues import IssueList, ValidationIssue
|
|
23
28
|
from cognite.neat.rules.models.data_types import _DATA_TYPE_BY_NAME, DataType, Json, String
|
|
24
29
|
from cognite.neat.rules.models.entities import ClassEntity
|
|
25
30
|
from cognite.neat.rules.models.information import InformationClass, InformationProperty
|
|
26
31
|
|
|
27
32
|
|
|
28
33
|
class _DTDLConverter:
|
|
29
|
-
def __init__(self, issues: list[
|
|
30
|
-
self.issues
|
|
34
|
+
def __init__(self, issues: list[NeatIssue] | None = None) -> None:
|
|
35
|
+
self.issues = IssueList(issues or [])
|
|
31
36
|
self.properties: list[InformationProperty] = []
|
|
32
37
|
self.classes: list[InformationClass] = []
|
|
33
38
|
self._item_by_id: dict[DTMI, DTDLBase] = {}
|
|
@@ -75,11 +80,10 @@ class _DTDLConverter:
|
|
|
75
80
|
convert_method(item, parent)
|
|
76
81
|
else:
|
|
77
82
|
self.issues.append(
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
)
|
|
83
|
+
ResourceTypeNotSupportedWarning(
|
|
84
|
+
item.identifier_with_fallback,
|
|
85
|
+
item.type,
|
|
86
|
+
),
|
|
83
87
|
)
|
|
84
88
|
|
|
85
89
|
def convert_interface(self, item: Interface, _: str | None) -> None:
|
|
@@ -94,11 +98,11 @@ class _DTDLConverter:
|
|
|
94
98
|
for sub_item_or_id in item.contents or []:
|
|
95
99
|
if isinstance(sub_item_or_id, DTMI) and sub_item_or_id not in self._item_by_id:
|
|
96
100
|
self.issues.append(
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
101
|
+
PropertyTypeNotSupportedWarning(
|
|
102
|
+
item.id_.model_dump() or item.display_name or "missing",
|
|
103
|
+
item.type,
|
|
104
|
+
sub_item_or_id.path[-1],
|
|
105
|
+
".".join(sub_item_or_id.path),
|
|
102
106
|
)
|
|
103
107
|
)
|
|
104
108
|
elif isinstance(sub_item_or_id, DTMI):
|
|
@@ -130,12 +134,13 @@ class _DTDLConverter:
|
|
|
130
134
|
)
|
|
131
135
|
self.properties.append(prop)
|
|
132
136
|
|
|
133
|
-
def _missing_parent_warning(self, item):
|
|
137
|
+
def _missing_parent_warning(self, item: DTDLBaseWithName):
|
|
134
138
|
self.issues.append(
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
+
ResourceNotFoundError(
|
|
140
|
+
"UNKNOWN",
|
|
141
|
+
"parent",
|
|
142
|
+
item.identifier_with_fallback,
|
|
143
|
+
item.type,
|
|
139
144
|
)
|
|
140
145
|
)
|
|
141
146
|
|
|
@@ -151,22 +156,15 @@ class _DTDLConverter:
|
|
|
151
156
|
return None
|
|
152
157
|
if item.request is None:
|
|
153
158
|
self.issues.append(
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
instance_id=item.id_.model_dump() if item.id_ else None,
|
|
159
|
-
)
|
|
159
|
+
ResourceTypeNotSupportedWarning[str](
|
|
160
|
+
item.identifier_with_fallback,
|
|
161
|
+
f"{item.type}.request",
|
|
162
|
+
),
|
|
160
163
|
)
|
|
161
164
|
return None
|
|
162
165
|
if item.response is not None:
|
|
163
166
|
# Currently, we do not know how to handle response
|
|
164
|
-
self.issues.append(
|
|
165
|
-
issues.importing.IgnoredComponentWarning(
|
|
166
|
-
identifier=f"{parent}.response",
|
|
167
|
-
reason="Neat does not have a concept of response for commands. This will be ignored.",
|
|
168
|
-
)
|
|
169
|
-
)
|
|
167
|
+
self.issues.append(ResourceTypeNotSupportedWarning[str](f"{parent}.response", "Command.Response"))
|
|
170
168
|
value_type = self.schema_to_value_type(item.request.schema_, item)
|
|
171
169
|
if value_type is None:
|
|
172
170
|
return
|
|
@@ -213,10 +211,9 @@ class _DTDLConverter:
|
|
|
213
211
|
else:
|
|
214
212
|
# Falling back to json
|
|
215
213
|
self.issues.append(
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
instance_id=item.target.model_dump(),
|
|
214
|
+
ResourceMissingIdentifierError(
|
|
215
|
+
"unknown",
|
|
216
|
+
item.target.model_dump(),
|
|
220
217
|
)
|
|
221
218
|
)
|
|
222
219
|
value_type = Json()
|
|
@@ -239,9 +236,9 @@ class _DTDLConverter:
|
|
|
239
236
|
def convert_object(self, item: Object, _: str | None) -> None:
|
|
240
237
|
if item.id_ is None:
|
|
241
238
|
self.issues.append(
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
239
|
+
ResourceMissingIdentifierError(
|
|
240
|
+
resource_type=item.type,
|
|
241
|
+
name=item.display_name,
|
|
245
242
|
)
|
|
246
243
|
)
|
|
247
244
|
return None
|
|
@@ -280,21 +277,20 @@ class _DTDLConverter:
|
|
|
280
277
|
return _DATA_TYPE_BY_NAME[input_type.casefold()]()
|
|
281
278
|
elif isinstance(input_type, str):
|
|
282
279
|
self.issues.append(
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
instance_id=item.id_.model_dump() if item.id_ else None,
|
|
280
|
+
PropertyTypeNotSupportedError(
|
|
281
|
+
item.identifier_with_fallback,
|
|
282
|
+
item.type,
|
|
283
|
+
"schema",
|
|
284
|
+
input_type,
|
|
289
285
|
)
|
|
290
286
|
)
|
|
291
287
|
return None
|
|
292
288
|
elif isinstance(input_type, Object | Interface):
|
|
293
289
|
if input_type.id_ is None:
|
|
294
290
|
self.issues.append(
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
291
|
+
ResourceMissingIdentifierError(
|
|
292
|
+
input_type.type,
|
|
293
|
+
input_type.display_name,
|
|
298
294
|
)
|
|
299
295
|
)
|
|
300
296
|
return Json()
|
|
@@ -304,11 +300,11 @@ class _DTDLConverter:
|
|
|
304
300
|
return ClassEntity.load(input_type.id_.as_class_id())
|
|
305
301
|
else:
|
|
306
302
|
self.issues.append(
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
303
|
+
PropertyTypeNotSupportedWarning(
|
|
304
|
+
item.identifier_with_fallback,
|
|
305
|
+
item.type, # type: ignore[arg-type]
|
|
306
|
+
"schema",
|
|
307
|
+
input_type.type if input_type else "missing",
|
|
312
308
|
)
|
|
313
309
|
)
|
|
314
310
|
return None
|
|
@@ -6,15 +6,21 @@ from typing import Literal, overload
|
|
|
6
6
|
|
|
7
7
|
from pydantic import ValidationError
|
|
8
8
|
|
|
9
|
-
from cognite.neat.
|
|
10
|
-
from cognite.neat.
|
|
9
|
+
from cognite.neat.issues import IssueList, NeatIssue
|
|
10
|
+
from cognite.neat.issues.warnings import (
|
|
11
|
+
FileItemNotSupportedWarning,
|
|
12
|
+
FileMissingRequiredFieldWarning,
|
|
13
|
+
FileReadWarning,
|
|
14
|
+
FileTypeUnexpectedWarning,
|
|
15
|
+
NeatValueWarning,
|
|
16
|
+
)
|
|
17
|
+
from cognite.neat.rules._shared import VerifiedRules
|
|
11
18
|
from cognite.neat.rules.importers._base import BaseImporter, _handle_issues
|
|
12
19
|
from cognite.neat.rules.importers._dtdl2rules.dtdl_converter import _DTDLConverter
|
|
13
20
|
from cognite.neat.rules.importers._dtdl2rules.spec import DTDL_CLS_BY_TYPE_BY_SPEC, DTDLBase, Interface
|
|
14
|
-
from cognite.neat.rules.issues import IssueList, ValidationIssue
|
|
15
21
|
from cognite.neat.rules.models import InformationRules, RoleTypes, SchemaCompleteness, SheetList
|
|
16
22
|
from cognite.neat.rules.models.information import InformationClass, InformationProperty
|
|
17
|
-
from cognite.neat.utils.text import to_pascal
|
|
23
|
+
from cognite.neat.utils.text import humanize_collection, to_pascal
|
|
18
24
|
|
|
19
25
|
|
|
20
26
|
class DTDLImporter(BaseImporter):
|
|
@@ -36,7 +42,7 @@ class DTDLImporter(BaseImporter):
|
|
|
36
42
|
self,
|
|
37
43
|
items: Sequence[DTDLBase],
|
|
38
44
|
title: str | None = None,
|
|
39
|
-
read_issues: list[
|
|
45
|
+
read_issues: list[NeatIssue] | None = None,
|
|
40
46
|
schema: SchemaCompleteness = SchemaCompleteness.partial,
|
|
41
47
|
) -> None:
|
|
42
48
|
self._items = items
|
|
@@ -45,11 +51,11 @@ class DTDLImporter(BaseImporter):
|
|
|
45
51
|
self._schema_completeness = schema
|
|
46
52
|
|
|
47
53
|
@classmethod
|
|
48
|
-
def _from_file_content(cls, file_content: str, filepath: Path) -> Iterable[DTDLBase |
|
|
54
|
+
def _from_file_content(cls, file_content: str, filepath: Path) -> Iterable[DTDLBase | NeatIssue]:
|
|
49
55
|
raw = json.loads(file_content)
|
|
50
56
|
if isinstance(raw, dict):
|
|
51
57
|
if (context := raw.get("@context")) is None:
|
|
52
|
-
yield
|
|
58
|
+
yield FileMissingRequiredFieldWarning(filepath, "@context", "Missing '@context' key.")
|
|
53
59
|
return
|
|
54
60
|
raw_list = [raw]
|
|
55
61
|
elif isinstance(raw, list):
|
|
@@ -57,13 +63,11 @@ class DTDLImporter(BaseImporter):
|
|
|
57
63
|
(entry["@context"] for entry in raw if isinstance(entry, dict) and "@context" in entry), None
|
|
58
64
|
)
|
|
59
65
|
if context is None:
|
|
60
|
-
yield
|
|
66
|
+
yield FileMissingRequiredFieldWarning(filepath, "@context", "Missing '@context' key.")
|
|
61
67
|
return
|
|
62
68
|
raw_list = raw
|
|
63
69
|
else:
|
|
64
|
-
yield
|
|
65
|
-
filepath=filepath, reason="Content is not an object or array."
|
|
66
|
-
)
|
|
70
|
+
yield FileTypeUnexpectedWarning(filepath, frozenset(["dict", "list"]), "Content is not an object or array.")
|
|
67
71
|
return
|
|
68
72
|
|
|
69
73
|
if isinstance(context, list):
|
|
@@ -73,31 +77,35 @@ class DTDLImporter(BaseImporter):
|
|
|
73
77
|
try:
|
|
74
78
|
cls_by_type = DTDL_CLS_BY_TYPE_BY_SPEC[spec_version]
|
|
75
79
|
except KeyError:
|
|
76
|
-
yield
|
|
80
|
+
yield NeatValueWarning(
|
|
81
|
+
f"Unsupported DTDL spec version: {spec_version} in {filepath}. "
|
|
82
|
+
f"Supported versions are {humanize_collection(DTDL_CLS_BY_TYPE_BY_SPEC.keys())}."
|
|
83
|
+
" The file will be skipped."
|
|
84
|
+
)
|
|
77
85
|
return
|
|
78
86
|
|
|
79
87
|
for item in raw_list:
|
|
80
88
|
if not (type_ := item.get("@type")):
|
|
81
|
-
yield
|
|
89
|
+
yield FileMissingRequiredFieldWarning(filepath, "@type", "Missing '@type' key.")
|
|
82
90
|
continue
|
|
83
91
|
cls_ = cls_by_type.get(type_)
|
|
84
92
|
if cls_ is None:
|
|
85
|
-
yield
|
|
93
|
+
yield FileItemNotSupportedWarning(f"Unknown '@type' {type_}.", filepath=filepath)
|
|
86
94
|
continue
|
|
87
95
|
try:
|
|
88
96
|
yield cls_.model_validate(item)
|
|
89
97
|
except ValidationError as e:
|
|
90
|
-
yield
|
|
98
|
+
yield FileTypeUnexpectedWarning(filepath, frozenset([cls.__name__]), str(e))
|
|
91
99
|
except Exception as e:
|
|
92
|
-
yield
|
|
100
|
+
yield FileReadWarning(filepath=filepath, reason=str(e))
|
|
93
101
|
|
|
94
102
|
@classmethod
|
|
95
103
|
def from_directory(cls, directory: Path) -> "DTDLImporter":
|
|
96
104
|
items: list[DTDLBase] = []
|
|
97
|
-
issues: list[
|
|
105
|
+
issues: list[NeatIssue] = []
|
|
98
106
|
for filepath in directory.glob("**/*.json"):
|
|
99
107
|
for item in cls._from_file_content(filepath.read_text(), filepath):
|
|
100
|
-
if isinstance(item,
|
|
108
|
+
if isinstance(item, NeatIssue):
|
|
101
109
|
issues.append(item)
|
|
102
110
|
else:
|
|
103
111
|
items.append(item)
|
|
@@ -106,28 +114,28 @@ class DTDLImporter(BaseImporter):
|
|
|
106
114
|
@classmethod
|
|
107
115
|
def from_zip(cls, zip_file: Path) -> "DTDLImporter":
|
|
108
116
|
items: list[DTDLBase] = []
|
|
109
|
-
issues: list[
|
|
117
|
+
issues: list[NeatIssue] = []
|
|
110
118
|
with zipfile.ZipFile(zip_file) as z:
|
|
111
119
|
for filepath in z.namelist():
|
|
112
120
|
if filepath.endswith(".json"):
|
|
113
121
|
for item in cls._from_file_content(z.read(filepath).decode(), Path(filepath)):
|
|
114
|
-
if isinstance(item,
|
|
122
|
+
if isinstance(item, NeatIssue):
|
|
115
123
|
issues.append(item)
|
|
116
124
|
else:
|
|
117
125
|
items.append(item)
|
|
118
126
|
return cls(items, zip_file.stem, read_issues=issues)
|
|
119
127
|
|
|
120
128
|
@overload
|
|
121
|
-
def to_rules(self, errors: Literal["raise"], role: RoleTypes | None = None) ->
|
|
129
|
+
def to_rules(self, errors: Literal["raise"], role: RoleTypes | None = None) -> VerifiedRules: ...
|
|
122
130
|
|
|
123
131
|
@overload
|
|
124
132
|
def to_rules(
|
|
125
133
|
self, errors: Literal["continue"] = "continue", role: RoleTypes | None = None
|
|
126
|
-
) -> tuple[
|
|
134
|
+
) -> tuple[VerifiedRules | None, IssueList]: ...
|
|
127
135
|
|
|
128
136
|
def to_rules(
|
|
129
137
|
self, errors: Literal["raise", "continue"] = "continue", role: RoleTypes | None = None
|
|
130
|
-
) -> tuple[
|
|
138
|
+
) -> tuple[VerifiedRules | None, IssueList] | VerifiedRules:
|
|
131
139
|
converter = _DTDLConverter(self._read_issues)
|
|
132
140
|
|
|
133
141
|
converter.convert(self._items)
|
|
@@ -13,6 +13,7 @@ from abc import ABC
|
|
|
13
13
|
from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeAlias
|
|
14
14
|
|
|
15
15
|
from pydantic import BaseModel, Field, field_validator, model_serializer, model_validator
|
|
16
|
+
from pydantic.fields import FieldInfo
|
|
16
17
|
|
|
17
18
|
from cognite.neat.rules.models.entities import ClassEntity
|
|
18
19
|
|
|
@@ -134,6 +135,10 @@ class DTDLBase(BaseModel, ABC):
|
|
|
134
135
|
display_name: str | None = Field(None, alias="displayName")
|
|
135
136
|
description: str | None = None
|
|
136
137
|
|
|
138
|
+
@property
|
|
139
|
+
def identifier_with_fallback(self) -> str:
|
|
140
|
+
return (self.id_.model_dump() if self.id_ else self.display_name) or "MISSING"
|
|
141
|
+
|
|
137
142
|
|
|
138
143
|
PrimitiveSchema: TypeAlias = Literal[
|
|
139
144
|
"boolean", "date", "dateTime", "double", "duration", "float", "integer", "long", "string", "time"
|
|
@@ -317,6 +322,8 @@ class Interface(DTDLBase):
|
|
|
317
322
|
if not isinstance(value, list):
|
|
318
323
|
return value
|
|
319
324
|
context = info.data.get("@context", cls.default_context)
|
|
325
|
+
if isinstance(context, FieldInfo):
|
|
326
|
+
context = context.default
|
|
320
327
|
spec_version = context.rsplit(";", maxsplit=1)[1]
|
|
321
328
|
try:
|
|
322
329
|
cls_by_type = DTDL_CLS_BY_TYPE_BY_SPEC[spec_version]
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
from typing import cast
|
|
2
|
+
|
|
3
|
+
from rdflib import Graph
|
|
4
|
+
|
|
5
|
+
from cognite.neat.rules.importers._rdf._shared import (
|
|
6
|
+
clean_up_classes,
|
|
7
|
+
make_classes_compliant,
|
|
8
|
+
parse_raw_classes_dataframe,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def parse_imf_to_classes(graph: Graph, language: str = "en") -> list[dict]:
|
|
13
|
+
"""Parse IMF elements from RDF-graph and extract classes to pandas dataframe.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
graph: Graph containing imf elements
|
|
17
|
+
language: Language to use for parsing, by default "en"
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
Dataframe containing imf elements
|
|
21
|
+
|
|
22
|
+
!!! note "IMF Compliance"
|
|
23
|
+
The IMF elements are expressed in RDF, primarily using SHACL and OWL. To ensure
|
|
24
|
+
that the resulting classes are compliant with CDF, similar validation checks as
|
|
25
|
+
in the OWL ontology importer are applied.
|
|
26
|
+
|
|
27
|
+
For the IMF-types more of the compliance logic is placed directly in the SPARQL
|
|
28
|
+
query. Among these are the creation of class name not starting with a number,
|
|
29
|
+
and ensuring that all classes have a parent.
|
|
30
|
+
|
|
31
|
+
IMF-attributes are considered both classes and properties. This kind of punning
|
|
32
|
+
is necessary to capture additional information carried by attributes. They carry,
|
|
33
|
+
among other things, a set of relationsships to reference terms, units of measure,
|
|
34
|
+
and qualifiers that together make up the meaning of the attribute.
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
query = """
|
|
38
|
+
SELECT ?class ?name ?description ?parentClass ?reference ?match ?comment
|
|
39
|
+
WHERE {
|
|
40
|
+
#Finding IMF - elements
|
|
41
|
+
VALUES ?type { imf:BlockType imf:TerminalType imf:AttributeType }
|
|
42
|
+
?imfClass a ?type .
|
|
43
|
+
OPTIONAL {?imfClass rdfs:subClassOf ?parent }.
|
|
44
|
+
OPTIONAL {?imfClass rdfs:label | skos:prefLabel ?name }.
|
|
45
|
+
OPTIONAL {?imfClass rdfs:comment | skos:description ?description} .
|
|
46
|
+
|
|
47
|
+
# Finding the last segment of the class IRI
|
|
48
|
+
BIND(STR(?imfClass) AS ?classString)
|
|
49
|
+
BIND(REPLACE(?classString, "^.*[/#]([^/#]*)$", "$1") AS ?classSegment)
|
|
50
|
+
BIND(IF(CONTAINS(?classString, "imf/"), CONCAT("IMF_", ?classSegment) , ?classSegment) AS ?class)
|
|
51
|
+
|
|
52
|
+
# Add imf:Attribute as parent class
|
|
53
|
+
BIND(IF(!bound(?parent) && ?type = imf:AttributeType, imf:Attribute, ?parent) AS ?parentClass)
|
|
54
|
+
|
|
55
|
+
# Rebind the IRI of the IMF-type to the ?reference variable to align with dataframe column headers
|
|
56
|
+
# This is solely for readability, the ?imfClass could have been returned directly instead of ?reference
|
|
57
|
+
BIND(?imfClass AS ?reference)
|
|
58
|
+
|
|
59
|
+
FILTER (!isBlank(?class))
|
|
60
|
+
FILTER (!bound(?parentClass) || !isBlank(?parentClass))
|
|
61
|
+
FILTER (!bound(?name) || LANG(?name) = "" || LANGMATCHES(LANG(?name), "en"))
|
|
62
|
+
FILTER (!bound(?description) || LANG(?description) = "" || LANGMATCHES(LANG(?description), "en"))
|
|
63
|
+
}
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
# create raw dataframe
|
|
67
|
+
raw_df = parse_raw_classes_dataframe(cast(list[tuple], list(graph.query(query.replace("en", language)))))
|
|
68
|
+
if raw_df.empty:
|
|
69
|
+
return []
|
|
70
|
+
|
|
71
|
+
# group values and clean up
|
|
72
|
+
processed_df = clean_up_classes(raw_df)
|
|
73
|
+
|
|
74
|
+
# make compliant
|
|
75
|
+
processed_df = make_classes_compliant(processed_df, importer="IMF")
|
|
76
|
+
|
|
77
|
+
# Make Parent Class list elements into string joined with comma
|
|
78
|
+
processed_df["Parent Class"] = processed_df["Parent Class"].apply(
|
|
79
|
+
lambda x: ", ".join(x) if isinstance(x, list) and x else None
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
return processed_df.dropna(axis=0, how="all").replace(float("nan"), None).to_dict(orient="records")
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from rdflib import Namespace
|
|
2
|
+
|
|
3
|
+
from cognite.neat.rules.importers._rdf._shared import make_metadata_compliant
|
|
4
|
+
from cognite.neat.rules.models import RoleTypes, SchemaCompleteness
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def parse_imf_metadata() -> dict:
|
|
8
|
+
"""Provide hardcoded IMF metadata to dict.
|
|
9
|
+
|
|
10
|
+
Returns:
|
|
11
|
+
Dictionary containing IMF metadata
|
|
12
|
+
|
|
13
|
+
!!! note "Compliant IMF metadata"
|
|
14
|
+
The current RDF provide IMF types as SHACL, but there are not any metadata describing
|
|
15
|
+
the actual content.
|
|
16
|
+
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
raw_metadata = {
|
|
20
|
+
"role": RoleTypes.information,
|
|
21
|
+
"schema": SchemaCompleteness.partial,
|
|
22
|
+
"prefix": "pca-imf",
|
|
23
|
+
"namespace": Namespace("http://posccaesar.org/imf/"),
|
|
24
|
+
"version": None,
|
|
25
|
+
"created": None,
|
|
26
|
+
"updated": None,
|
|
27
|
+
"title": "IMF - types",
|
|
28
|
+
"description": "IMF - types",
|
|
29
|
+
"creator": None,
|
|
30
|
+
"rights": None,
|
|
31
|
+
"license": None,
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return make_metadata_compliant(raw_metadata)
|