cognite-neat 0.88.2__py3-none-any.whl → 0.89.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (129) hide show
  1. cognite/neat/_version.py +1 -1
  2. cognite/neat/constants.py +3 -0
  3. cognite/neat/graph/__init__.py +0 -3
  4. cognite/neat/graph/extractors/_mock_graph_generator.py +2 -1
  5. cognite/neat/graph/loaders/_base.py +3 -3
  6. cognite/neat/graph/loaders/_rdf2asset.py +24 -25
  7. cognite/neat/graph/loaders/_rdf2dms.py +20 -15
  8. cognite/neat/issues/__init__.py +1 -3
  9. cognite/neat/issues/_base.py +261 -71
  10. cognite/neat/issues/errors/__init__.py +73 -0
  11. cognite/neat/issues/errors/_external.py +67 -0
  12. cognite/neat/issues/errors/_general.py +35 -0
  13. cognite/neat/issues/errors/_properties.py +62 -0
  14. cognite/neat/issues/errors/_resources.py +111 -0
  15. cognite/neat/issues/errors/_workflow.py +36 -0
  16. cognite/neat/issues/formatters.py +1 -1
  17. cognite/neat/issues/warnings/__init__.py +66 -0
  18. cognite/neat/issues/warnings/_external.py +40 -0
  19. cognite/neat/issues/warnings/_general.py +29 -0
  20. cognite/neat/issues/warnings/_models.py +92 -0
  21. cognite/neat/issues/warnings/_properties.py +44 -0
  22. cognite/neat/issues/warnings/_resources.py +55 -0
  23. cognite/neat/issues/warnings/user_modeling.py +113 -0
  24. cognite/neat/rules/_shared.py +53 -2
  25. cognite/neat/rules/analysis/_base.py +1 -1
  26. cognite/neat/rules/exporters/_base.py +7 -18
  27. cognite/neat/rules/exporters/_rules2dms.py +17 -20
  28. cognite/neat/rules/exporters/_rules2excel.py +9 -16
  29. cognite/neat/rules/exporters/_rules2ontology.py +77 -64
  30. cognite/neat/rules/exporters/_rules2yaml.py +6 -9
  31. cognite/neat/rules/exporters/_validation.py +11 -96
  32. cognite/neat/rules/importers/_base.py +9 -58
  33. cognite/neat/rules/importers/_dms2rules.py +188 -135
  34. cognite/neat/rules/importers/_dtdl2rules/dtdl_converter.py +48 -35
  35. cognite/neat/rules/importers/_dtdl2rules/dtdl_importer.py +36 -45
  36. cognite/neat/rules/importers/_dtdl2rules/spec.py +7 -0
  37. cognite/neat/rules/importers/_rdf/_imf2rules/_imf2classes.py +8 -4
  38. cognite/neat/rules/importers/_rdf/_imf2rules/_imf2metadata.py +3 -3
  39. cognite/neat/rules/importers/_rdf/_imf2rules/_imf2properties.py +18 -11
  40. cognite/neat/rules/importers/_rdf/_imf2rules/_imf2rules.py +12 -19
  41. cognite/neat/rules/importers/_rdf/_inference2rules.py +14 -37
  42. cognite/neat/rules/importers/_rdf/_owl2rules/_owl2classes.py +1 -0
  43. cognite/neat/rules/importers/_rdf/_owl2rules/_owl2properties.py +1 -0
  44. cognite/neat/rules/importers/_rdf/_owl2rules/_owl2rules.py +9 -20
  45. cognite/neat/rules/importers/_rdf/_shared.py +4 -4
  46. cognite/neat/rules/importers/_spreadsheet2rules.py +46 -97
  47. cognite/neat/rules/importers/_yaml2rules.py +32 -58
  48. cognite/neat/rules/models/__init__.py +21 -5
  49. cognite/neat/rules/models/_base_input.py +162 -0
  50. cognite/neat/rules/models/{_base.py → _base_rules.py} +1 -12
  51. cognite/neat/rules/models/_rdfpath.py +4 -4
  52. cognite/neat/rules/models/{_types/_field.py → _types.py} +5 -10
  53. cognite/neat/rules/models/asset/__init__.py +5 -2
  54. cognite/neat/rules/models/asset/_rules.py +3 -23
  55. cognite/neat/rules/models/asset/_rules_input.py +40 -115
  56. cognite/neat/rules/models/asset/_validation.py +14 -10
  57. cognite/neat/rules/models/data_types.py +150 -44
  58. cognite/neat/rules/models/dms/__init__.py +19 -7
  59. cognite/neat/rules/models/dms/_exporter.py +102 -34
  60. cognite/neat/rules/models/dms/_rules.py +65 -162
  61. cognite/neat/rules/models/dms/_rules_input.py +186 -254
  62. cognite/neat/rules/models/dms/_schema.py +87 -78
  63. cognite/neat/rules/models/dms/_serializer.py +44 -3
  64. cognite/neat/rules/models/dms/_validation.py +106 -68
  65. cognite/neat/rules/models/domain.py +52 -1
  66. cognite/neat/rules/models/entities/__init__.py +63 -0
  67. cognite/neat/rules/models/entities/_constants.py +73 -0
  68. cognite/neat/rules/models/entities/_loaders.py +76 -0
  69. cognite/neat/rules/models/entities/_multi_value.py +67 -0
  70. cognite/neat/rules/models/{entities.py → entities/_single_value.py} +74 -232
  71. cognite/neat/rules/models/entities/_types.py +86 -0
  72. cognite/neat/rules/models/{wrapped_entities.py → entities/_wrapped.py} +1 -1
  73. cognite/neat/rules/models/information/__init__.py +10 -2
  74. cognite/neat/rules/models/information/_rules.py +10 -22
  75. cognite/neat/rules/models/information/_rules_input.py +57 -204
  76. cognite/neat/rules/models/information/_validation.py +48 -25
  77. cognite/neat/rules/transformers/__init__.py +21 -0
  78. cognite/neat/rules/transformers/_base.py +81 -0
  79. cognite/neat/rules/{models/information/_converter.py → transformers/_converters.py} +217 -21
  80. cognite/neat/rules/transformers/_map_onto.py +97 -0
  81. cognite/neat/rules/transformers/_pipelines.py +61 -0
  82. cognite/neat/rules/transformers/_verification.py +136 -0
  83. cognite/neat/{graph/stores → store}/_provenance.py +10 -1
  84. cognite/neat/utils/auxiliary.py +2 -35
  85. cognite/neat/utils/cdf/data_classes.py +20 -0
  86. cognite/neat/utils/regex_patterns.py +6 -0
  87. cognite/neat/utils/text.py +17 -0
  88. cognite/neat/workflows/base.py +4 -4
  89. cognite/neat/workflows/cdf_store.py +3 -3
  90. cognite/neat/workflows/steps/data_contracts.py +1 -1
  91. cognite/neat/workflows/steps/lib/current/graph_extractor.py +3 -3
  92. cognite/neat/workflows/steps/lib/current/graph_loader.py +2 -2
  93. cognite/neat/workflows/steps/lib/current/graph_store.py +1 -1
  94. cognite/neat/workflows/steps/lib/current/rules_exporter.py +116 -47
  95. cognite/neat/workflows/steps/lib/current/rules_importer.py +30 -28
  96. cognite/neat/workflows/steps/lib/current/rules_validator.py +5 -6
  97. cognite/neat/workflows/steps/lib/io/io_steps.py +5 -5
  98. cognite/neat/workflows/steps_registry.py +4 -5
  99. {cognite_neat-0.88.2.dist-info → cognite_neat-0.89.0.dist-info}/METADATA +1 -1
  100. {cognite_neat-0.88.2.dist-info → cognite_neat-0.89.0.dist-info}/RECORD +105 -106
  101. cognite/neat/exceptions.py +0 -145
  102. cognite/neat/graph/exceptions.py +0 -90
  103. cognite/neat/issues/errors/external.py +0 -21
  104. cognite/neat/issues/errors/properties.py +0 -75
  105. cognite/neat/issues/errors/resources.py +0 -123
  106. cognite/neat/issues/errors/schema.py +0 -0
  107. cognite/neat/issues/neat_warnings/__init__.py +0 -2
  108. cognite/neat/issues/neat_warnings/identifier.py +0 -27
  109. cognite/neat/issues/neat_warnings/models.py +0 -22
  110. cognite/neat/issues/neat_warnings/properties.py +0 -77
  111. cognite/neat/issues/neat_warnings/resources.py +0 -125
  112. cognite/neat/rules/issues/__init__.py +0 -22
  113. cognite/neat/rules/issues/base.py +0 -63
  114. cognite/neat/rules/issues/dms.py +0 -549
  115. cognite/neat/rules/issues/fileread.py +0 -197
  116. cognite/neat/rules/issues/ontology.py +0 -298
  117. cognite/neat/rules/issues/spreadsheet.py +0 -563
  118. cognite/neat/rules/issues/spreadsheet_file.py +0 -151
  119. cognite/neat/rules/issues/tables.py +0 -72
  120. cognite/neat/rules/models/_constants.py +0 -1
  121. cognite/neat/rules/models/_types/__init__.py +0 -19
  122. cognite/neat/rules/models/asset/_converter.py +0 -4
  123. cognite/neat/rules/models/dms/_converter.py +0 -145
  124. cognite/neat/workflows/_exceptions.py +0 -41
  125. /cognite/neat/{graph/stores → store}/__init__.py +0 -0
  126. /cognite/neat/{graph/stores → store}/_base.py +0 -0
  127. {cognite_neat-0.88.2.dist-info → cognite_neat-0.89.0.dist-info}/LICENSE +0 -0
  128. {cognite_neat-0.88.2.dist-info → cognite_neat-0.89.0.dist-info}/WHEEL +0 -0
  129. {cognite_neat-0.88.2.dist-info → cognite_neat-0.89.0.dist-info}/entry_points.txt +0 -0
@@ -1,11 +1,13 @@
1
1
  from collections import Counter
2
2
  from collections.abc import Callable, Sequence
3
3
 
4
- from cognite.neat.issues import IssueList, NeatIssue
5
- from cognite.neat.issues.errors.properties import PropertyTypeNotSupportedError
6
- from cognite.neat.issues.errors.resources import MissingIdentifierError, ResourceNotFoundError
7
- from cognite.neat.issues.neat_warnings.properties import PropertyTypeNotSupportedWarning
8
- from cognite.neat.issues.neat_warnings.resources import ResourceTypeNotSupportedWarning
4
+ from cognite.neat.issues import IssueList
5
+ from cognite.neat.issues.errors import (
6
+ PropertyTypeNotSupportedError,
7
+ ResourceMissingIdentifierError,
8
+ ResourceNotFoundError,
9
+ )
10
+ from cognite.neat.issues.warnings import PropertyTypeNotSupportedWarning, ResourceTypeNotSupportedWarning
9
11
  from cognite.neat.rules.importers._dtdl2rules.spec import (
10
12
  DTMI,
11
13
  Command,
@@ -25,17 +27,20 @@ from cognite.neat.rules.importers._dtdl2rules.spec import (
25
27
  )
26
28
  from cognite.neat.rules.models.data_types import _DATA_TYPE_BY_NAME, DataType, Json, String
27
29
  from cognite.neat.rules.models.entities import ClassEntity
28
- from cognite.neat.rules.models.information import InformationClass, InformationProperty
30
+ from cognite.neat.rules.models.information import (
31
+ InformationInputClass,
32
+ InformationInputProperty,
33
+ )
29
34
 
30
35
 
31
36
  class _DTDLConverter:
32
- def __init__(self, issues: list[NeatIssue] | None = None) -> None:
37
+ def __init__(self, issues: IssueList | None = None) -> None:
33
38
  self.issues = IssueList(issues or [])
34
- self.properties: list[InformationProperty] = []
35
- self.classes: list[InformationClass] = []
39
+ self.properties: list[InformationInputProperty] = []
40
+ self.classes: list[InformationInputClass] = []
36
41
  self._item_by_id: dict[DTMI, DTDLBase] = {}
37
42
 
38
- self._method_by_type: dict[type[DTDLBase], Callable[[DTDLBase, str | None], None]] = {
43
+ self._method_by_type = {
39
44
  Interface: self.convert_interface, # type: ignore[dict-item]
40
45
  Property: self.convert_property, # type: ignore[dict-item]
41
46
  PropertyV2: self.convert_property, # type: ignore[dict-item]
@@ -51,7 +56,11 @@ class _DTDLConverter:
51
56
  def get_most_common_prefix(self) -> str:
52
57
  if not self.classes:
53
58
  raise ValueError("No classes found")
54
- counted = Counter(cls_.class_.prefix for cls_ in self.classes if isinstance(cls_.class_.prefix, str))
59
+ counted = Counter(
60
+ class_.prefix
61
+ for class_ in (cls_.class_ for cls_ in self.classes)
62
+ if isinstance(class_, ClassEntity) and isinstance(class_.prefix, str)
63
+ )
55
64
  if not counted:
56
65
  raise ValueError("No prefixes found")
57
66
  return counted.most_common(1)[0][0]
@@ -73,19 +82,20 @@ class _DTDLConverter:
73
82
  self.convert_item(item)
74
83
 
75
84
  def convert_item(self, item: DTDLBase, parent: str | None = None) -> None:
76
- convert_method = self._method_by_type.get(type(item))
85
+ # Bug in mypy https://github.com/python/mypy/issues/17478
86
+ convert_method: Callable[[DTDLBase, str | None], None] | None = self._method_by_type.get(type(item)) # type: ignore[assignment]
77
87
  if convert_method is not None:
78
88
  convert_method(item, parent)
79
89
  else:
80
90
  self.issues.append(
81
- ResourceTypeNotSupportedWarning[str](
82
- item.id_.model_dump() if item.id_ else item.display_name or "missing",
91
+ ResourceTypeNotSupportedWarning(
92
+ item.identifier_with_fallback,
83
93
  item.type,
84
94
  ),
85
95
  )
86
96
 
87
97
  def convert_interface(self, item: Interface, _: str | None) -> None:
88
- class_ = InformationClass(
98
+ class_ = InformationInputClass(
89
99
  class_=item.id_.as_class_id(),
90
100
  name=item.display_name,
91
101
  description=item.description,
@@ -105,9 +115,9 @@ class _DTDLConverter:
105
115
  )
106
116
  elif isinstance(sub_item_or_id, DTMI):
107
117
  sub_item = self._item_by_id[sub_item_or_id]
108
- self.convert_item(sub_item, class_.class_.versioned_id)
118
+ self.convert_item(sub_item, class_.class_str)
109
119
  else:
110
- self.convert_item(sub_item_or_id, class_.class_.versioned_id)
120
+ self.convert_item(sub_item_or_id, class_.class_str)
111
121
  # interface.schema objects are handled in the convert method
112
122
 
113
123
  def convert_property(
@@ -120,7 +130,7 @@ class _DTDLConverter:
120
130
  if value_type is None:
121
131
  return None
122
132
 
123
- prop = InformationProperty(
133
+ prop = InformationInputProperty(
124
134
  class_=ClassEntity.load(parent),
125
135
  property_=item.name,
126
136
  name=item.display_name,
@@ -134,8 +144,11 @@ class _DTDLConverter:
134
144
 
135
145
  def _missing_parent_warning(self, item: DTDLBaseWithName):
136
146
  self.issues.append(
137
- ResourceNotFoundError[str](
138
- (item.id_.model_dump() if item.id_ else item.display_name) or "missing", item.type, "parent missing"
147
+ ResourceNotFoundError(
148
+ "UNKNOWN",
149
+ "parent",
150
+ item.identifier_with_fallback,
151
+ item.type,
139
152
  )
140
153
  )
141
154
 
@@ -152,7 +165,7 @@ class _DTDLConverter:
152
165
  if item.request is None:
153
166
  self.issues.append(
154
167
  ResourceTypeNotSupportedWarning[str](
155
- item.id_.model_dump() if item.id_ else item.display_name or "missing",
168
+ item.identifier_with_fallback,
156
169
  f"{item.type}.request",
157
170
  ),
158
171
  )
@@ -163,7 +176,7 @@ class _DTDLConverter:
163
176
  value_type = self.schema_to_value_type(item.request.schema_, item)
164
177
  if value_type is None:
165
178
  return
166
- prop = InformationProperty(
179
+ prop = InformationInputProperty(
167
180
  class_=ClassEntity.load(parent),
168
181
  property_=item.name,
169
182
  name=item.display_name,
@@ -183,7 +196,7 @@ class _DTDLConverter:
183
196
  value_type = self.schema_to_value_type(item.schema_, item)
184
197
  if value_type is None:
185
198
  return
186
- prop = InformationProperty(
199
+ prop = InformationInputProperty(
187
200
  class_=ClassEntity.load(parent),
188
201
  property_=item.name,
189
202
  name=item.display_name,
@@ -206,14 +219,14 @@ class _DTDLConverter:
206
219
  else:
207
220
  # Falling back to json
208
221
  self.issues.append(
209
- MissingIdentifierError(
210
- "Unknown",
222
+ ResourceMissingIdentifierError(
223
+ "unknown",
211
224
  item.target.model_dump(),
212
225
  )
213
226
  )
214
227
  value_type = Json()
215
228
 
216
- prop = InformationProperty(
229
+ prop = InformationInputProperty(
217
230
  class_=ClassEntity.load(parent),
218
231
  property_=item.name,
219
232
  name=item.display_name,
@@ -231,14 +244,14 @@ class _DTDLConverter:
231
244
  def convert_object(self, item: Object, _: str | None) -> None:
232
245
  if item.id_ is None:
233
246
  self.issues.append(
234
- MissingIdentifierError(
247
+ ResourceMissingIdentifierError(
235
248
  resource_type=item.type,
236
249
  name=item.display_name,
237
250
  )
238
251
  )
239
252
  return None
240
253
 
241
- class_ = InformationClass(
254
+ class_ = InformationInputClass(
242
255
  class_=item.id_.as_class_id(),
243
256
  name=item.display_name,
244
257
  description=item.description,
@@ -250,7 +263,7 @@ class _DTDLConverter:
250
263
  value_type = self.schema_to_value_type(field_.schema_, item)
251
264
  if value_type is None:
252
265
  continue
253
- prop = InformationProperty(
266
+ prop = InformationInputProperty(
254
267
  class_=class_.class_,
255
268
  name=field_.name,
256
269
  description=field_.description,
@@ -272,8 +285,8 @@ class _DTDLConverter:
272
285
  return _DATA_TYPE_BY_NAME[input_type.casefold()]()
273
286
  elif isinstance(input_type, str):
274
287
  self.issues.append(
275
- PropertyTypeNotSupportedError[str](
276
- (item.id_.model_dump() if item.id_ else item.display_name) or "missing",
288
+ PropertyTypeNotSupportedError(
289
+ item.identifier_with_fallback,
277
290
  item.type,
278
291
  "schema",
279
292
  input_type,
@@ -283,7 +296,7 @@ class _DTDLConverter:
283
296
  elif isinstance(input_type, Object | Interface):
284
297
  if input_type.id_ is None:
285
298
  self.issues.append(
286
- MissingIdentifierError(
299
+ ResourceMissingIdentifierError(
287
300
  input_type.type,
288
301
  input_type.display_name,
289
302
  )
@@ -292,12 +305,12 @@ class _DTDLConverter:
292
305
  else:
293
306
  if isinstance(input_type, Object):
294
307
  self.convert_object(input_type, None)
295
- return ClassEntity.load(input_type.id_.as_class_id())
308
+ return input_type.id_.as_class_id()
296
309
  else:
297
310
  self.issues.append(
298
311
  PropertyTypeNotSupportedWarning(
299
- item.id_.model_dump() if item.id_ else item.display_name or "missing",
300
- item.type,
312
+ item.identifier_with_fallback,
313
+ item.type, # type: ignore[arg-type]
301
314
  "schema",
302
315
  input_type.type if input_type else "missing",
303
316
  )
@@ -2,23 +2,27 @@ import json
2
2
  import zipfile
3
3
  from collections.abc import Iterable, Sequence
4
4
  from pathlib import Path
5
- from typing import Literal, overload
6
5
 
7
6
  from pydantic import ValidationError
8
7
 
9
8
  from cognite.neat.issues import IssueList, NeatIssue
10
- from cognite.neat.rules import issues
11
- from cognite.neat.rules._shared import Rules
12
- from cognite.neat.rules.importers._base import BaseImporter, _handle_issues
9
+ from cognite.neat.issues.warnings import (
10
+ FileItemNotSupportedWarning,
11
+ FileMissingRequiredFieldWarning,
12
+ FileReadWarning,
13
+ FileTypeUnexpectedWarning,
14
+ NeatValueWarning,
15
+ )
16
+ from cognite.neat.rules._shared import ReadRules
17
+ from cognite.neat.rules.importers._base import BaseImporter
13
18
  from cognite.neat.rules.importers._dtdl2rules.dtdl_converter import _DTDLConverter
14
19
  from cognite.neat.rules.importers._dtdl2rules.spec import DTDL_CLS_BY_TYPE_BY_SPEC, DTDLBase, Interface
15
- from cognite.neat.rules.issues import ValidationIssue
16
- from cognite.neat.rules.models import InformationRules, RoleTypes, SchemaCompleteness, SheetList
17
- from cognite.neat.rules.models.information import InformationClass, InformationProperty
18
- from cognite.neat.utils.text import to_pascal
20
+ from cognite.neat.rules.models import InformationInputRules, SchemaCompleteness
21
+ from cognite.neat.rules.models.information import InformationInputMetadata
22
+ from cognite.neat.utils.text import humanize_collection, to_pascal
19
23
 
20
24
 
21
- class DTDLImporter(BaseImporter):
25
+ class DTDLImporter(BaseImporter[InformationInputRules]):
22
26
  """Importer from Azure Digital Twin - DTDL (Digital Twin Definition Language).
23
27
 
24
28
  This importer supports DTDL v2.0 and v3.0.
@@ -42,15 +46,15 @@ class DTDLImporter(BaseImporter):
42
46
  ) -> None:
43
47
  self._items = items
44
48
  self.title = title
45
- self._read_issues = read_issues
49
+ self._read_issues = IssueList(read_issues)
46
50
  self._schema_completeness = schema
47
51
 
48
52
  @classmethod
49
- def _from_file_content(cls, file_content: str, filepath: Path) -> Iterable[DTDLBase | ValidationIssue]:
53
+ def _from_file_content(cls, file_content: str, filepath: Path) -> Iterable[DTDLBase | NeatIssue]:
50
54
  raw = json.loads(file_content)
51
55
  if isinstance(raw, dict):
52
56
  if (context := raw.get("@context")) is None:
53
- yield issues.fileread.InvalidFileFormatWarning(filepath=filepath, reason="Missing '@context' key.")
57
+ yield FileMissingRequiredFieldWarning(filepath, "@context", "Missing '@context' key.")
54
58
  return
55
59
  raw_list = [raw]
56
60
  elif isinstance(raw, list):
@@ -58,13 +62,11 @@ class DTDLImporter(BaseImporter):
58
62
  (entry["@context"] for entry in raw if isinstance(entry, dict) and "@context" in entry), None
59
63
  )
60
64
  if context is None:
61
- yield issues.fileread.InvalidFileFormatWarning(filepath=filepath, reason="Missing '@context' key.")
65
+ yield FileMissingRequiredFieldWarning(filepath, "@context", "Missing '@context' key.")
62
66
  return
63
67
  raw_list = raw
64
68
  else:
65
- yield issues.fileread.InvalidFileFormatWarning(
66
- filepath=filepath, reason="Content is not an object or array."
67
- )
69
+ yield FileTypeUnexpectedWarning(filepath, frozenset(["dict", "list"]), "Content is not an object or array.")
68
70
  return
69
71
 
70
72
  if isinstance(context, list):
@@ -74,23 +76,27 @@ class DTDLImporter(BaseImporter):
74
76
  try:
75
77
  cls_by_type = DTDL_CLS_BY_TYPE_BY_SPEC[spec_version]
76
78
  except KeyError:
77
- yield issues.fileread.UnsupportedSpecWarning(filepath=filepath, version=spec_version, spec_name="DTDL")
79
+ yield NeatValueWarning(
80
+ f"Unsupported DTDL spec version: {spec_version} in {filepath}. "
81
+ f"Supported versions are {humanize_collection(DTDL_CLS_BY_TYPE_BY_SPEC.keys())}."
82
+ " The file will be skipped."
83
+ )
78
84
  return
79
85
 
80
86
  for item in raw_list:
81
87
  if not (type_ := item.get("@type")):
82
- yield issues.fileread.InvalidFileFormatWarning(filepath=filepath, reason="Missing '@type' key.")
88
+ yield FileMissingRequiredFieldWarning(filepath, "@type", "Missing '@type' key.")
83
89
  continue
84
90
  cls_ = cls_by_type.get(type_)
85
91
  if cls_ is None:
86
- yield issues.fileread.UnknownItemWarning(reason=f"Unknown '@type' {type_}.", filepath=filepath)
92
+ yield FileItemNotSupportedWarning(f"Unknown '@type' {type_}.", filepath=filepath)
87
93
  continue
88
94
  try:
89
95
  yield cls_.model_validate(item)
90
96
  except ValidationError as e:
91
- yield issues.fileread.InvalidFileFormatWarning(filepath=filepath, reason=str(e))
97
+ yield FileTypeUnexpectedWarning(filepath, frozenset([cls.__name__]), str(e))
92
98
  except Exception as e:
93
- yield issues.fileread.BugInImporterWarning(filepath=filepath, error=str(e), importer_name=cls.__name__)
99
+ yield FileReadWarning(filepath=filepath, reason=str(e))
94
100
 
95
101
  @classmethod
96
102
  def from_directory(cls, directory: Path) -> "DTDLImporter":
@@ -112,23 +118,13 @@ class DTDLImporter(BaseImporter):
112
118
  for filepath in z.namelist():
113
119
  if filepath.endswith(".json"):
114
120
  for item in cls._from_file_content(z.read(filepath).decode(), Path(filepath)):
115
- if isinstance(item, ValidationIssue):
121
+ if isinstance(item, NeatIssue):
116
122
  issues.append(item)
117
123
  else:
118
124
  items.append(item)
119
125
  return cls(items, zip_file.stem, read_issues=issues)
120
126
 
121
- @overload
122
- def to_rules(self, errors: Literal["raise"], role: RoleTypes | None = None) -> Rules: ...
123
-
124
- @overload
125
- def to_rules(
126
- self, errors: Literal["continue"] = "continue", role: RoleTypes | None = None
127
- ) -> tuple[Rules | None, IssueList]: ...
128
-
129
- def to_rules(
130
- self, errors: Literal["raise", "continue"] = "continue", role: RoleTypes | None = None
131
- ) -> tuple[Rules | None, IssueList] | Rules:
127
+ def to_rules(self) -> ReadRules[InformationInputRules]:
132
128
  converter = _DTDLConverter(self._read_issues)
133
129
 
134
130
  converter.convert(self._items)
@@ -145,16 +141,11 @@ class DTDLImporter(BaseImporter):
145
141
  ...
146
142
  else:
147
143
  metadata["prefix"] = most_common_prefix
148
- with _handle_issues(converter.issues) as future:
149
- rules = InformationRules(
150
- metadata=metadata,
151
- properties=SheetList[InformationProperty](data=converter.properties),
152
- classes=SheetList[InformationClass](data=converter.classes),
153
- )
154
- if future.result == "failure":
155
- if errors == "continue":
156
- return None, converter.issues
157
- else:
158
- raise converter.issues.as_errors()
159
144
 
160
- return self._to_output(rules, converter.issues, errors, role)
145
+ rules = InformationInputRules(
146
+ metadata=InformationInputMetadata.load(metadata),
147
+ properties=converter.properties,
148
+ classes=converter.classes,
149
+ )
150
+
151
+ return ReadRules(rules, converter.issues, {})
@@ -13,6 +13,7 @@ from abc import ABC
13
13
  from typing import TYPE_CHECKING, Any, ClassVar, Literal, TypeAlias
14
14
 
15
15
  from pydantic import BaseModel, Field, field_validator, model_serializer, model_validator
16
+ from pydantic.fields import FieldInfo
16
17
 
17
18
  from cognite.neat.rules.models.entities import ClassEntity
18
19
 
@@ -134,6 +135,10 @@ class DTDLBase(BaseModel, ABC):
134
135
  display_name: str | None = Field(None, alias="displayName")
135
136
  description: str | None = None
136
137
 
138
+ @property
139
+ def identifier_with_fallback(self) -> str:
140
+ return (self.id_.model_dump() if self.id_ else self.display_name) or "MISSING"
141
+
137
142
 
138
143
  PrimitiveSchema: TypeAlias = Literal[
139
144
  "boolean", "date", "dateTime", "double", "duration", "float", "integer", "long", "string", "time"
@@ -317,6 +322,8 @@ class Interface(DTDLBase):
317
322
  if not isinstance(value, list):
318
323
  return value
319
324
  context = info.data.get("@context", cls.default_context)
325
+ if isinstance(context, FieldInfo):
326
+ context = context.default
320
327
  spec_version = context.rsplit(";", maxsplit=1)[1]
321
328
  try:
322
329
  cls_by_type = DTDL_CLS_BY_TYPE_BY_SPEC[spec_version]
@@ -37,23 +37,27 @@ def parse_imf_to_classes(graph: Graph, language: str = "en") -> list[dict]:
37
37
  query = """
38
38
  SELECT ?class ?name ?description ?parentClass ?reference ?match ?comment
39
39
  WHERE {
40
- #Finding IMF - elements
40
+ # Finding IMF - elements
41
41
  VALUES ?type { imf:BlockType imf:TerminalType imf:AttributeType }
42
42
  ?imfClass a ?type .
43
43
  OPTIONAL {?imfClass rdfs:subClassOf ?parent }.
44
44
  OPTIONAL {?imfClass rdfs:label | skos:prefLabel ?name }.
45
- OPTIONAL {?imfClass rdfs:comment | skos:description ?description} .
45
+
46
+ # Note: Bug in PCA has lead to the use non-existing term skos:description. This will be replaced
47
+ # with the correct skos:definition in the near future, so both terms are included here.
48
+ OPTIONAL {?imfClass rdfs:comment | skos:definition | skos:description ?description} .
46
49
 
47
50
  # Finding the last segment of the class IRI
48
51
  BIND(STR(?imfClass) AS ?classString)
49
- BIND(REPLACE(?classString, "^.*[/#]([^/#]*)$", "$1") AS ?classSegment)
52
+ BIND(REPLACE(?classString, "^.*[/#]([^/#]*)$", "$1") AS ?tempSegment)
53
+ BIND(REPLACE(?tempSegment, "-", "_") AS ?classSegment)
50
54
  BIND(IF(CONTAINS(?classString, "imf/"), CONCAT("IMF_", ?classSegment) , ?classSegment) AS ?class)
51
55
 
52
56
  # Add imf:Attribute as parent class
53
57
  BIND(IF(!bound(?parent) && ?type = imf:AttributeType, imf:Attribute, ?parent) AS ?parentClass)
54
58
 
55
59
  # Rebind the IRI of the IMF-type to the ?reference variable to align with dataframe column headers
56
- # This is solely for readability, the ?imfClass could have been returnered directly instead of ?reference
60
+ # This is solely for readability, the ?imfClass could have been returned directly instead of ?reference
57
61
  BIND(?imfClass AS ?reference)
58
62
 
59
63
  FILTER (!isBlank(?class))
@@ -19,12 +19,12 @@ def parse_imf_metadata() -> dict:
19
19
  raw_metadata = {
20
20
  "role": RoleTypes.information,
21
21
  "schema": SchemaCompleteness.partial,
22
- "prefix": "pca-imf",
23
- "namespace": Namespace("http://posccaesar.org/imf/"),
22
+ "prefix": "pcaimf",
23
+ "namespace": Namespace("https://posccaesar.org/imf/"),
24
24
  "version": None,
25
25
  "created": None,
26
26
  "updated": None,
27
- "title": "IMF - types",
27
+ "title": "IMF_types",
28
28
  "description": "IMF - types",
29
29
  "creator": None,
30
30
  "rights": None,
@@ -36,7 +36,7 @@ def parse_imf_to_properties(graph: Graph, language: str = "en") -> list[dict]:
36
36
  """
37
37
 
38
38
  query = """
39
- SELECT ?class ?property ?name ?description ?valueType ?minCount ?maxCount ?default ?reference
39
+ SELECT DISTINCT ?class ?property ?name ?description ?valueType ?minCount ?maxCount ?default ?reference
40
40
  ?match ?comment ?propertyType
41
41
  WHERE
42
42
  {
@@ -48,7 +48,7 @@ def parse_imf_to_properties(graph: Graph, language: str = "en") -> list[dict]:
48
48
  ?propertyShape sh:path ?imfProperty .
49
49
 
50
50
  OPTIONAL { ?imfProperty skos:prefLabel ?name . }
51
- OPTIONAL { ?imfProperty skos:description ?description . }
51
+ OPTIONAL { ?imfProperty skos:definition | skos:description ?description . }
52
52
  OPTIONAL { ?imfProperty rdfs:range ?range . }
53
53
  OPTIONAL { ?imfProperty a ?type . }
54
54
  OPTIONAL { ?propertyShape sh:minCount ?minCardinality} .
@@ -62,22 +62,24 @@ def parse_imf_to_properties(graph: Graph, language: str = "en") -> list[dict]:
62
62
  ?imfClass a imf:AttributeType ;
63
63
  ?imfProperty ?default .
64
64
 
65
- # imf:predicate is required
66
- BIND(IF(?imfProperty = <http://ns.imfid.org/imf#predicate>, 1, 0) AS ?minCardinality)
67
-
68
65
  # The following information is used to describe the attribute when it is connected to a block or a terminal
69
66
  # and not duplicated here.
70
- FILTER(?imfProperty != rdf:type && ?imfProperty != skos:prefLabel && ?imfProperty != skos:description)
67
+ # Note: Bug in PCA has lead to the use non-existing term skos:description. This will be replaced
68
+ # with the correct skos:definition in the near future, so both terms are included here.
69
+ FILTER(?imfProperty != rdf:type && ?imfProperty != skos:prefLabel &&
70
+ ?imfProperty != skos:defintion && ?imfProperty != skos:description)
71
71
  }
72
72
 
73
73
  # Finding the last segment of the class IRI
74
74
  BIND(STR(?imfClass) AS ?classString)
75
- BIND(REPLACE(?classString, "^.*[/#]([^/#]*)$", "$1") AS ?classSegment)
75
+ BIND(REPLACE(?classString, "^.*[/#]([^/#]*)$", "$1") AS ?tempClassSegment)
76
+ BIND(REPLACE(?tempClassSegment, "-", "_") AS ?classSegment)
76
77
  BIND(IF(CONTAINS(?classString, "imf/"), CONCAT("IMF_", ?classSegment) , ?classSegment) AS ?class)
77
78
 
78
79
  # Finding the last segment of the property IRI
79
80
  BIND(STR(?imfProperty) AS ?propertyString)
80
- BIND(REPLACE(?propertyString, "^.*[/#]([^/#]*)$", "$1") AS ?propertySegment)
81
+ BIND(REPLACE(?propertyString, "^.*[/#]([^/#]*)$", "$1") AS ?tempPropertySegment)
82
+ BIND(REPLACE(?tempPropertySegment, "-", "_") AS ?propertySegment)
81
83
  BIND(IF(CONTAINS(?propertyString, "imf/"), CONCAT("IMF_", ?propertySegment) , ?propertySegment) AS ?property)
82
84
 
83
85
  # Set the value type for the property based on sh:class, sh:qualifiedValueType or rdfs:range
@@ -85,12 +87,17 @@ def parse_imf_to_properties(graph: Graph, language: str = "en") -> list[dict]:
85
87
 
86
88
  # Finding the last segment of value types
87
89
  BIND(STR(?valueIriType) AS ?valueTypeString)
88
- BIND(REPLACE(?valueTypeString, "^.*[/#]([^/#]*)$", "$1") AS ?valueTypeSegment)
90
+ BIND(REPLACE(?valueTypeString, "^.*[/#]([^/#]*)$", "$1") AS ?tempValueTypeSegment)
91
+ BIND(REPLACE(?tempValueTypeSegment, "-", "_") AS ?valueTypeSegment)
89
92
  BIND(IF(CONTAINS(?valueTypeString, "imf/"), CONCAT("IMF_", ?valueTypeSegment) , ?valueTypeSegment)
90
93
  AS ?valueType)
91
94
 
92
- # Helper variable to set property type if value type is not already set.
93
- BIND(IF(BOUND(?type) && ?type = owl:DatatypeProperty, ?type , owl:ObjectProperty) AS ?propertyType)
95
+ # Helper variable to set owl datatype- or object-property if this is not already set.
96
+ BIND(IF( EXISTS {?imfProperty a ?tempPropertyType . FILTER(?tempPropertyType = owl:DatatypeProperty) },
97
+ owl:DatatypeProperty,
98
+ owl:ObjectProperty
99
+ )
100
+ AS ?propertyType)
94
101
 
95
102
  # Assert cardinality values if they do not exist
96
103
  BIND(IF(BOUND(?minCardinality), ?minCardinality, 0) AS ?minCount)
@@ -2,13 +2,14 @@
2
2
  there are loaders to TransformationRules pydantic class."""
3
3
 
4
4
  from pathlib import Path
5
- from typing import Literal, overload
6
5
 
7
6
  from rdflib import DC, DCTERMS, OWL, RDF, RDFS, SH, SKOS, Graph
8
7
 
9
8
  from cognite.neat.issues import IssueList
10
- from cognite.neat.rules.importers._base import BaseImporter, Rules
11
- from cognite.neat.rules.models import InformationRules, RoleTypes
9
+ from cognite.neat.issues.errors import FileReadError
10
+ from cognite.neat.rules._shared import ReadRules
11
+ from cognite.neat.rules.importers._base import BaseImporter
12
+ from cognite.neat.rules.models import InformationInputRules
12
13
  from cognite.neat.rules.models.data_types import _XSD_TYPES
13
14
 
14
15
  from ._imf2classes import parse_imf_to_classes
@@ -16,7 +17,7 @@ from ._imf2metadata import parse_imf_metadata
16
17
  from ._imf2properties import parse_imf_to_properties
17
18
 
18
19
 
19
- class IMFImporter(BaseImporter):
20
+ class IMFImporter(BaseImporter[InformationInputRules]):
20
21
  """Convert SHACL shapes to tables/ transformation rules / Excel file.
21
22
 
22
23
  Args:
@@ -36,24 +37,16 @@ class IMFImporter(BaseImporter):
36
37
  """
37
38
 
38
39
  def __init__(self, filepath: Path):
39
- self.owl_filepath = filepath
40
-
41
- @overload
42
- def to_rules(self, errors: Literal["raise"], role: RoleTypes | None = None) -> Rules: ...
43
-
44
- @overload
45
- def to_rules(
46
- self, errors: Literal["continue"] = "continue", role: RoleTypes | None = None
47
- ) -> tuple[Rules | None, IssueList]: ...
40
+ self.filepath = filepath
48
41
 
49
42
  def to_rules(
50
- self, errors: Literal["raise", "continue"] = "continue", role: RoleTypes | None = None
51
- ) -> tuple[Rules | None, IssueList] | Rules:
43
+ self,
44
+ ) -> ReadRules[InformationInputRules]:
52
45
  graph = Graph()
53
46
  try:
54
- graph.parse(self.owl_filepath)
47
+ graph.parse(self.filepath)
55
48
  except Exception as e:
56
- raise Exception(f"Could not parse owl file: {e}") from e
49
+ return ReadRules(None, IssueList([FileReadError(self.filepath, f"Could not parse owl file: {e}")]), {})
57
50
 
58
51
  # bind key namespaces
59
52
  graph.bind("owl", OWL)
@@ -74,8 +67,8 @@ class IMFImporter(BaseImporter):
74
67
 
75
68
  components = make_components_compliant(components)
76
69
 
77
- rules = InformationRules.model_validate(components)
78
- return self._to_output(rules, IssueList(), errors, role)
70
+ rules = InformationInputRules.load(components)
71
+ return ReadRules(rules, IssueList(), {})
79
72
 
80
73
 
81
74
  def make_components_compliant(components: dict) -> dict: