cognite-neat 0.98.0__py3-none-any.whl → 0.99.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (103) hide show
  1. cognite/neat/_client/__init__.py +4 -0
  2. cognite/neat/_client/_api/data_modeling_loaders.py +585 -0
  3. cognite/neat/_client/_api/schema.py +111 -0
  4. cognite/neat/_client/_api_client.py +17 -0
  5. cognite/neat/_client/data_classes/__init__.py +0 -0
  6. cognite/neat/{_utils/cdf/data_classes.py → _client/data_classes/data_modeling.py} +8 -135
  7. cognite/neat/_client/data_classes/schema.py +495 -0
  8. cognite/neat/_constants.py +27 -4
  9. cognite/neat/_graph/_shared.py +14 -15
  10. cognite/neat/_graph/extractors/_classic_cdf/_assets.py +14 -154
  11. cognite/neat/_graph/extractors/_classic_cdf/_base.py +154 -7
  12. cognite/neat/_graph/extractors/_classic_cdf/_classic.py +25 -14
  13. cognite/neat/_graph/extractors/_classic_cdf/_data_sets.py +17 -92
  14. cognite/neat/_graph/extractors/_classic_cdf/_events.py +13 -162
  15. cognite/neat/_graph/extractors/_classic_cdf/_files.py +15 -179
  16. cognite/neat/_graph/extractors/_classic_cdf/_labels.py +32 -100
  17. cognite/neat/_graph/extractors/_classic_cdf/_relationships.py +27 -178
  18. cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +14 -139
  19. cognite/neat/_graph/extractors/_classic_cdf/_timeseries.py +15 -173
  20. cognite/neat/_graph/extractors/_rdf_file.py +6 -7
  21. cognite/neat/_graph/loaders/_rdf2dms.py +2 -2
  22. cognite/neat/_graph/queries/_base.py +17 -1
  23. cognite/neat/_graph/transformers/_classic_cdf.py +74 -147
  24. cognite/neat/_graph/transformers/_prune_graph.py +1 -1
  25. cognite/neat/_graph/transformers/_rdfpath.py +1 -1
  26. cognite/neat/_issues/_base.py +26 -17
  27. cognite/neat/_issues/errors/__init__.py +4 -2
  28. cognite/neat/_issues/errors/_external.py +7 -0
  29. cognite/neat/_issues/errors/_properties.py +2 -7
  30. cognite/neat/_issues/errors/_resources.py +1 -1
  31. cognite/neat/_issues/warnings/__init__.py +8 -0
  32. cognite/neat/_issues/warnings/_external.py +16 -0
  33. cognite/neat/_issues/warnings/_properties.py +16 -0
  34. cognite/neat/_issues/warnings/_resources.py +26 -2
  35. cognite/neat/_issues/warnings/user_modeling.py +4 -4
  36. cognite/neat/_rules/_constants.py +8 -11
  37. cognite/neat/_rules/analysis/_base.py +8 -4
  38. cognite/neat/_rules/exporters/_base.py +3 -4
  39. cognite/neat/_rules/exporters/_rules2dms.py +33 -46
  40. cognite/neat/_rules/importers/__init__.py +1 -3
  41. cognite/neat/_rules/importers/_base.py +1 -1
  42. cognite/neat/_rules/importers/_dms2rules.py +6 -29
  43. cognite/neat/_rules/importers/_rdf/__init__.py +5 -0
  44. cognite/neat/_rules/importers/_rdf/_base.py +34 -11
  45. cognite/neat/_rules/importers/_rdf/_imf2rules.py +91 -0
  46. cognite/neat/_rules/importers/_rdf/_inference2rules.py +43 -35
  47. cognite/neat/_rules/importers/_rdf/_owl2rules.py +80 -0
  48. cognite/neat/_rules/importers/_rdf/_shared.py +138 -441
  49. cognite/neat/_rules/models/__init__.py +1 -1
  50. cognite/neat/_rules/models/_base_rules.py +22 -12
  51. cognite/neat/_rules/models/dms/__init__.py +4 -2
  52. cognite/neat/_rules/models/dms/_exporter.py +45 -48
  53. cognite/neat/_rules/models/dms/_rules.py +20 -17
  54. cognite/neat/_rules/models/dms/_rules_input.py +52 -8
  55. cognite/neat/_rules/models/dms/_validation.py +391 -119
  56. cognite/neat/_rules/models/entities/_single_value.py +32 -4
  57. cognite/neat/_rules/models/information/__init__.py +2 -0
  58. cognite/neat/_rules/models/information/_rules.py +0 -67
  59. cognite/neat/_rules/models/information/_validation.py +9 -9
  60. cognite/neat/_rules/models/mapping/__init__.py +2 -3
  61. cognite/neat/_rules/models/mapping/_classic2core.py +36 -146
  62. cognite/neat/_rules/models/mapping/_classic2core.yaml +343 -0
  63. cognite/neat/_rules/transformers/__init__.py +2 -2
  64. cognite/neat/_rules/transformers/_converters.py +110 -11
  65. cognite/neat/_rules/transformers/_mapping.py +105 -30
  66. cognite/neat/_rules/transformers/_pipelines.py +1 -1
  67. cognite/neat/_rules/transformers/_verification.py +31 -3
  68. cognite/neat/_session/_base.py +24 -8
  69. cognite/neat/_session/_drop.py +35 -0
  70. cognite/neat/_session/_inspect.py +17 -5
  71. cognite/neat/_session/_mapping.py +39 -0
  72. cognite/neat/_session/_prepare.py +219 -23
  73. cognite/neat/_session/_read.py +49 -12
  74. cognite/neat/_session/_to.py +8 -5
  75. cognite/neat/_session/exceptions.py +4 -0
  76. cognite/neat/_store/_base.py +27 -24
  77. cognite/neat/_utils/rdf_.py +34 -5
  78. cognite/neat/_version.py +1 -1
  79. cognite/neat/_workflows/steps/lib/current/rules_exporter.py +5 -88
  80. cognite/neat/_workflows/steps/lib/current/rules_importer.py +3 -14
  81. cognite/neat/_workflows/steps/lib/current/rules_validator.py +6 -7
  82. {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/METADATA +3 -3
  83. {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/RECORD +87 -92
  84. cognite/neat/_rules/importers/_rdf/_imf2rules/__init__.py +0 -3
  85. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +0 -86
  86. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +0 -29
  87. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +0 -130
  88. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2rules.py +0 -154
  89. cognite/neat/_rules/importers/_rdf/_owl2rules/__init__.py +0 -3
  90. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +0 -58
  91. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +0 -65
  92. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +0 -59
  93. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2rules.py +0 -39
  94. cognite/neat/_rules/models/dms/_schema.py +0 -1101
  95. cognite/neat/_rules/models/mapping/_base.py +0 -131
  96. cognite/neat/_utils/cdf/loaders/__init__.py +0 -25
  97. cognite/neat/_utils/cdf/loaders/_base.py +0 -54
  98. cognite/neat/_utils/cdf/loaders/_data_modeling.py +0 -339
  99. cognite/neat/_utils/cdf/loaders/_ingestion.py +0 -167
  100. /cognite/neat/{_utils/cdf → _client/_api}/__init__.py +0 -0
  101. {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/LICENSE +0 -0
  102. {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/WHEEL +0 -0
  103. {cognite_neat-0.98.0.dist-info → cognite_neat-0.99.1.dist-info}/entry_points.txt +0 -0
@@ -19,7 +19,7 @@ from rdflib import RDF
19
19
  from cognite.neat._graph._tracking import LogTracker, Tracker
20
20
  from cognite.neat._issues import IssueList, NeatIssue, NeatIssueList
21
21
  from cognite.neat._issues.errors import (
22
- ResourceConvertionError,
22
+ ResourceConversionError,
23
23
  ResourceCreationError,
24
24
  ResourceDuplicatedError,
25
25
  ResourceRetrievalError,
@@ -88,7 +88,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
88
88
  data_model = rules.as_schema().as_read_model()
89
89
  except Exception as e:
90
90
  issues.append(
91
- ResourceConvertionError(
91
+ ResourceConversionError(
92
92
  identifier=rules.metadata.as_identifier(),
93
93
  resource_type="DMS Rules",
94
94
  target_format="read DMS model",
@@ -12,7 +12,7 @@ from cognite.neat._rules._constants import EntityTypes
12
12
  from cognite.neat._rules.models.entities import ClassEntity
13
13
  from cognite.neat._rules.models.information import InformationRules
14
14
  from cognite.neat._shared import InstanceType
15
- from cognite.neat._utils.rdf_ import remove_namespace_from_uri
15
+ from cognite.neat._utils.rdf_ import remove_instance_ids_in_batch, remove_namespace_from_uri
16
16
 
17
17
  from ._construct import build_construct_query
18
18
 
@@ -342,3 +342,19 @@ class Queries:
342
342
  self.graph.query(query.format(unknownType=str(UNKNOWN_TYPE))),
343
343
  ):
344
344
  yield cast(URIRef, source_type), cast(URIRef, property_), [URIRef(uri) for uri in value_types.split(",")]
345
+
346
+ def drop_types(self, type_: list[URIRef]) -> dict[URIRef, int]:
347
+ """Drop types from the graph store
348
+
349
+ Args:
350
+ type_: List of types to drop
351
+
352
+ Returns:
353
+ Dictionary of dropped types
354
+ """
355
+ dropped_types: dict[URIRef, int] = {}
356
+ for t in type_:
357
+ instance_ids = self.list_instances_ids_of_class(t)
358
+ dropped_types[t] = len(instance_ids)
359
+ remove_instance_ids_in_batch(self.graph, instance_ids)
360
+ return dropped_types
@@ -1,4 +1,6 @@
1
+ import textwrap
1
2
  import warnings
3
+ from abc import ABC
2
4
  from typing import cast
3
5
 
4
6
  from rdflib import RDF, Graph, Literal, Namespace, URIRef
@@ -7,7 +9,7 @@ from rdflib.query import ResultRow
7
9
  from cognite.neat._constants import CLASSIC_CDF_NAMESPACE, DEFAULT_NAMESPACE
8
10
  from cognite.neat._graph import extractors
9
11
  from cognite.neat._issues.warnings import ResourceNotFoundWarning
10
- from cognite.neat._utils.rdf_ import remove_namespace_from_uri
12
+ from cognite.neat._utils.rdf_ import Triple, add_triples_in_batch, remove_namespace_from_uri
11
13
 
12
14
  from ._base import BaseTransformer
13
15
 
@@ -32,8 +34,8 @@ class AddAssetDepth(BaseTransformer):
32
34
  depth_typing: dict[int, str] | None = None,
33
35
  ):
34
36
  self.asset_type = asset_type or DEFAULT_NAMESPACE.Asset
35
- self.root_prop = root_prop or DEFAULT_NAMESPACE.root
36
- self.parent_prop = parent_prop or DEFAULT_NAMESPACE.parent
37
+ self.root_prop = root_prop or DEFAULT_NAMESPACE.rootId
38
+ self.parent_prop = parent_prop or DEFAULT_NAMESPACE.parentId
37
39
  self.depth_typing = depth_typing
38
40
 
39
41
  def transform(self, graph: Graph) -> None:
@@ -75,7 +77,33 @@ class AddAssetDepth(BaseTransformer):
75
77
  return None
76
78
 
77
79
 
78
- class AssetTimeSeriesConnector(BaseTransformer):
80
+ class BaseAssetConnector(BaseTransformer, ABC):
81
+ _asset_type: URIRef = DEFAULT_NAMESPACE.Asset
82
+ _item_type: URIRef
83
+ _default_attribute: URIRef
84
+ _connection_type: URIRef
85
+
86
+ _select_item_ids = "SELECT DISTINCT ?item_id WHERE {{?item_id a <{item_type}>}}"
87
+ _select_connected_assets: str = textwrap.dedent("""SELECT ?asset_id WHERE {{
88
+ <{item_id}> <{attribute}> ?asset_id .
89
+ ?asset_id a <{asset_type}>}}""")
90
+
91
+ def __init__(self, attribute: URIRef | None = None) -> None:
92
+ self._attribute = attribute or self._default_attribute
93
+
94
+ def transform(self, graph: Graph) -> None:
95
+ for item_id, *_ in graph.query(self._select_item_ids.format(item_type=self._item_type)): # type: ignore[misc]
96
+ triples: list[Triple] = []
97
+ for asset_id, *_ in graph.query( # type: ignore[misc]
98
+ self._select_connected_assets.format(
99
+ item_id=item_id, attribute=self._attribute, asset_type=self._asset_type
100
+ )
101
+ ):
102
+ triples.append((asset_id, self._connection_type, item_id)) # type: ignore[arg-type]
103
+ add_triples_in_batch(graph, triples)
104
+
105
+
106
+ class AssetTimeSeriesConnector(BaseAssetConnector):
79
107
  description: str = "Connects assets to timeseries, thus forming bi-directional connection"
80
108
  _use_only_once: bool = True
81
109
  _need_changes = frozenset(
@@ -84,41 +112,12 @@ class AssetTimeSeriesConnector(BaseTransformer):
84
112
  str(extractors.TimeSeriesExtractor.__name__),
85
113
  }
86
114
  )
87
- _asset_template: str = """SELECT ?asset_id WHERE {{
88
- <{timeseries_id}> <{asset_prop}> ?asset_id .
89
- ?asset_id a <{asset_type}>}}"""
90
-
91
- def __init__(
92
- self,
93
- asset_type: URIRef | None = None,
94
- timeseries_type: URIRef | None = None,
95
- asset_prop: URIRef | None = None,
96
- ):
97
- self.asset_type = asset_type or DEFAULT_NAMESPACE.Asset
98
- self.timeseries_type = timeseries_type or DEFAULT_NAMESPACE.TimeSeries
99
- self.asset_prop = asset_prop or DEFAULT_NAMESPACE.asset
100
-
101
- def transform(self, graph: Graph) -> None:
102
- for ts_id_result in graph.query(
103
- f"SELECT DISTINCT ?timeseries_id WHERE {{?timeseries_id a <{self.timeseries_type}>}}"
104
- ):
105
- timeseries_id: URIRef = cast(tuple, ts_id_result)[0]
106
-
107
- if asset_id_res := list(
108
- graph.query(
109
- self._asset_template.format(
110
- timeseries_id=timeseries_id,
111
- asset_prop=self.asset_prop,
112
- asset_type=self.asset_type,
113
- )
114
- )
115
- ):
116
- # timeseries can be connected to only one asset in the graph
117
- asset_id = cast(list[tuple], asset_id_res)[0][0]
118
- graph.add((asset_id, DEFAULT_NAMESPACE.timeSeries, timeseries_id))
115
+ _item_type = DEFAULT_NAMESPACE.TimeSeries
116
+ _default_attribute = DEFAULT_NAMESPACE.assetId
117
+ _connection_type = DEFAULT_NAMESPACE.timeSeries
119
118
 
120
119
 
121
- class AssetSequenceConnector(BaseTransformer):
120
+ class AssetSequenceConnector(BaseAssetConnector):
122
121
  description: str = "Connects assets to sequences, thus forming bi-directional connection"
123
122
  _use_only_once: bool = True
124
123
  _need_changes = frozenset(
@@ -127,41 +126,12 @@ class AssetSequenceConnector(BaseTransformer):
127
126
  str(extractors.SequencesExtractor.__name__),
128
127
  }
129
128
  )
130
- _asset_template: str = """SELECT ?asset_id WHERE {{
131
- <{sequence_id}> <{asset_prop}> ?asset_id .
132
- ?asset_id a <{asset_type}>}}"""
133
-
134
- def __init__(
135
- self,
136
- asset_type: URIRef | None = None,
137
- sequence_type: URIRef | None = None,
138
- asset_prop: URIRef | None = None,
139
- ):
140
- self.asset_type = asset_type or DEFAULT_NAMESPACE.Asset
141
- self.sequence_type = sequence_type or DEFAULT_NAMESPACE.Sequence
142
- self.asset_prop = asset_prop or DEFAULT_NAMESPACE.asset
143
-
144
- def transform(self, graph: Graph) -> None:
145
- for sequency_id_result in graph.query(
146
- f"SELECT DISTINCT ?sequence_id WHERE {{?sequence_id a <{self.sequence_type}>}}"
147
- ):
148
- sequence_id: URIRef = cast(tuple, sequency_id_result)[0]
149
-
150
- if asset_id_res := list(
151
- graph.query(
152
- self._asset_template.format(
153
- sequence_id=sequence_id,
154
- asset_prop=self.asset_prop,
155
- asset_type=self.asset_type,
156
- )
157
- )
158
- ):
159
- # sequence can be connected to only one asset in the graph
160
- asset_id = cast(list[tuple], asset_id_res)[0][0]
161
- graph.add((asset_id, DEFAULT_NAMESPACE.sequence, sequence_id))
129
+ _item_type = DEFAULT_NAMESPACE.Sequence
130
+ _default_attribute = DEFAULT_NAMESPACE.assetId
131
+ _connection_type = DEFAULT_NAMESPACE.sequence
162
132
 
163
133
 
164
- class AssetFileConnector(BaseTransformer):
134
+ class AssetFileConnector(BaseAssetConnector):
165
135
  description: str = "Connects assets to files, thus forming bi-directional connection"
166
136
  _use_only_once: bool = True
167
137
  _need_changes = frozenset(
@@ -170,39 +140,12 @@ class AssetFileConnector(BaseTransformer):
170
140
  str(extractors.FilesExtractor.__name__),
171
141
  }
172
142
  )
173
- _asset_template: str = """SELECT ?asset_id WHERE {{
174
- <{file_id}> <{asset_prop}> ?asset_id .
175
- ?asset_id a <{asset_type}>}}"""
176
-
177
- def __init__(
178
- self,
179
- asset_type: URIRef | None = None,
180
- file_type: URIRef | None = None,
181
- asset_prop: URIRef | None = None,
182
- ):
183
- self.asset_type = asset_type or DEFAULT_NAMESPACE.Asset
184
- self.file_type = file_type or DEFAULT_NAMESPACE.File
185
- self.asset_prop = asset_prop or DEFAULT_NAMESPACE.asset
186
-
187
- def transform(self, graph: Graph) -> None:
188
- for sequency_id_result in graph.query(f"SELECT DISTINCT ?file_id WHERE {{?file_id a <{self.file_type}>}}"):
189
- file_id: URIRef = cast(tuple, sequency_id_result)[0]
190
-
191
- if assets_id_res := list(
192
- graph.query(
193
- self._asset_template.format(
194
- file_id=file_id,
195
- asset_prop=self.asset_prop,
196
- asset_type=self.asset_type,
197
- )
198
- )
199
- ):
200
- # files can be connected to multiple assets in the graph
201
- for (asset_id,) in cast(list[tuple], assets_id_res):
202
- graph.add((asset_id, DEFAULT_NAMESPACE.file, file_id))
143
+ _item_type = DEFAULT_NAMESPACE.File
144
+ _default_attribute = DEFAULT_NAMESPACE.assetIds
145
+ _connection_type = DEFAULT_NAMESPACE.file
203
146
 
204
147
 
205
- class AssetEventConnector(BaseTransformer):
148
+ class AssetEventConnector(BaseAssetConnector):
206
149
  description: str = "Connects assets to events, thus forming bi-directional connection"
207
150
  _use_only_once: bool = True
208
151
  _need_changes = frozenset(
@@ -211,36 +154,9 @@ class AssetEventConnector(BaseTransformer):
211
154
  str(extractors.EventsExtractor.__name__),
212
155
  }
213
156
  )
214
- _asset_template: str = """SELECT ?asset_id WHERE {{
215
- <{event_id}> <{asset_prop}> ?asset_id .
216
- ?asset_id a <{asset_type}>}}"""
217
-
218
- def __init__(
219
- self,
220
- asset_type: URIRef | None = None,
221
- event_type: URIRef | None = None,
222
- asset_prop: URIRef | None = None,
223
- ):
224
- self.asset_type = asset_type or DEFAULT_NAMESPACE.Asset
225
- self.event_type = event_type or DEFAULT_NAMESPACE.Event
226
- self.asset_prop = asset_prop or DEFAULT_NAMESPACE.asset
227
-
228
- def transform(self, graph: Graph) -> None:
229
- for event_id_result in graph.query(f"SELECT DISTINCT ?event_id WHERE {{?event_id a <{self.event_type}>}}"):
230
- event_id: URIRef = cast(tuple, event_id_result)[0]
231
-
232
- if assets_id_res := list(
233
- graph.query(
234
- self._asset_template.format(
235
- event_id=event_id,
236
- asset_prop=self.asset_prop,
237
- asset_type=self.asset_type,
238
- )
239
- )
240
- ):
241
- # files can be connected to multiple assets in the graph
242
- for (asset_id,) in cast(list[tuple], assets_id_res):
243
- graph.add((asset_id, DEFAULT_NAMESPACE.event, event_id))
157
+ _item_type = DEFAULT_NAMESPACE.Event
158
+ _default_attribute = DEFAULT_NAMESPACE.assetIds
159
+ _connection_type = DEFAULT_NAMESPACE.event
244
160
 
245
161
 
246
162
  class AssetRelationshipConnector(BaseTransformer):
@@ -271,9 +187,9 @@ class AssetRelationshipConnector(BaseTransformer):
271
187
  ):
272
188
  self.asset_type = asset_type or DEFAULT_NAMESPACE.Asset
273
189
  self.relationship_type = relationship_type or DEFAULT_NAMESPACE.Relationship
274
- self.relationship_source_xid_prop = relationship_source_xid_prop or DEFAULT_NAMESPACE.source_external_id
275
- self.relationship_target_xid_prop = relationship_target_xid_prop or DEFAULT_NAMESPACE.target_external_id
276
- self.asset_xid_property = asset_xid_property or DEFAULT_NAMESPACE.external_id
190
+ self.relationship_source_xid_prop = relationship_source_xid_prop or DEFAULT_NAMESPACE.sourceExternalId
191
+ self.relationship_target_xid_prop = relationship_target_xid_prop or DEFAULT_NAMESPACE.targetExternalId
192
+ self.asset_xid_property = asset_xid_property or DEFAULT_NAMESPACE.externalId
277
193
 
278
194
  def transform(self, graph: Graph) -> None:
279
195
  for relationship_id_result in graph.query(
@@ -338,7 +254,7 @@ class RelationshipToSchemaTransformer(BaseTransformer):
338
254
  self._namespace = namespace
339
255
 
340
256
  _NOT_PROPERTIES: frozenset[str] = frozenset(
341
- {"source_external_id", "target_external_id", "external_id", "source_type", "target_type"}
257
+ {"sourceExternalId", "targetExternalId", "externalId", "sourceType", "targetType"}
342
258
  )
343
259
  _RELATIONSHIP_NODE_TYPES: tuple[str, ...] = tuple(["Asset", "Event", "File", "Sequence", "TimeSeries"])
344
260
  description = "Replaces relationships with a schema"
@@ -350,8 +266,8 @@ class RelationshipToSchemaTransformer(BaseTransformer):
350
266
  SELECT (COUNT(?instance) AS ?instanceCount)
351
267
  WHERE {{
352
268
  ?instance a classic:Relationship .
353
- ?instance classic:source_type classic:{source_type} .
354
- ?instance classic:target_type classic:{target_type} .
269
+ ?instance classic:sourceType classic:{source_type} .
270
+ ?instance classic:targetType classic:{target_type} .
355
271
  }}"""
356
272
 
357
273
  _instances = """PREFIX classic: <{namespace}>
@@ -359,15 +275,15 @@ WHERE {{
359
275
  SELECT ?instance
360
276
  WHERE {{
361
277
  ?instance a classic:Relationship .
362
- ?instance classic:source_type classic:{source_type} .
363
- ?instance classic:target_type classic:{target_type} .
278
+ ?instance classic:sourceType classic:{source_type} .
279
+ ?instance classic:targetType classic:{target_type} .
364
280
  }}"""
365
281
  _lookup_entity_query = """PREFIX classic: <{namespace}>
366
282
 
367
283
  SELECT ?entity
368
284
  WHERE {{
369
285
  ?entity a classic:{entity_type} .
370
- ?entity classic:external_id "{external_id}" .
286
+ ?entity classic:externalId "{external_id}" .
371
287
  }}"""
372
288
 
373
289
  def transform(self, graph: Graph) -> None:
@@ -393,8 +309,8 @@ WHERE {{
393
309
  object_by_predicates = cast(
394
310
  dict[str, URIRef | Literal], {remove_namespace_from_uri(row[1]): row[2] for row in result}
395
311
  )
396
- source_external_id = cast(URIRef, object_by_predicates["source_external_id"])
397
- target_source_id = cast(URIRef, object_by_predicates["target_external_id"])
312
+ source_external_id = cast(URIRef, object_by_predicates["sourceExternalId"])
313
+ target_source_id = cast(URIRef, object_by_predicates["targetExternalId"])
398
314
  try:
399
315
  source_id = self._lookup_entity(graph, source_type, source_external_id)
400
316
  except ValueError:
@@ -405,7 +321,7 @@ WHERE {{
405
321
  except ValueError:
406
322
  warnings.warn(ResourceNotFoundWarning(target_source_id, "class", str(instance_id), "class"), stacklevel=2)
407
323
  return None
408
- external_id = str(object_by_predicates["external_id"])
324
+ external_id = str(object_by_predicates["externalId"])
409
325
  # If there is properties on the relationship, we create a new intermediate node
410
326
  self._create_node(graph, object_by_predicates, external_id, source_id, target_id, self._predicate(target_type))
411
327
 
@@ -431,7 +347,7 @@ WHERE {{
431
347
  predicate: URIRef,
432
348
  ) -> None:
433
349
  """Creates a new intermediate node for the relationship with properties."""
434
- # Create new node
350
+ # Create the entity with the properties
435
351
  instance_id = self._namespace[external_id]
436
352
  graph.add((instance_id, RDF.type, self._namespace["Edge"]))
437
353
  for prop_name, object_ in objects_by_predicates.items():
@@ -439,9 +355,20 @@ WHERE {{
439
355
  continue
440
356
  graph.add((instance_id, self._namespace[prop_name], object_))
441
357
 
442
- # Connect the new node to the source and target nodes
443
- graph.add((source_id, predicate, instance_id))
444
- graph.add((instance_id, self._namespace["end_node"], target_id))
358
+ # Target and Source IDs will always be a combination of Asset, Sequence, Event, TimeSeries, and File.
359
+ # If we assume source ID is an asset and target ID is a time series, then
360
+ # before we had relationship pointing to both: timeseries <- relationship -> asset
361
+ # After, we want asset -> timeseries, and asset.edgeSource -> Edge
362
+ # and the new edge will point to the asset and the timeseries through startNode and endNode
363
+
364
+ # Link the two entities directly,
365
+ graph.add((source_id, predicate, target_id))
366
+ # Create the new edge
367
+ graph.add((instance_id, self._namespace["startNode"], source_id))
368
+ graph.add((instance_id, self._namespace["endNode"], target_id))
369
+
370
+ # Link the source to the edge properties
371
+ graph.add((source_id, self._namespace["edgeSource"], instance_id))
445
372
 
446
373
  def _predicate(self, target_type: str) -> URIRef:
447
374
  return self._namespace[f"relationship{target_type.capitalize()}"]
@@ -123,4 +123,4 @@ class PruneDanglingNodes(BaseTransformer):
123
123
  for node in nodes_without_neighbours:
124
124
  # Remove node and its property triples in the graph
125
125
  if isinstance(node, ResultRow):
126
- graph.remove(triple=(node["subject"], None, None))
126
+ graph.remove((node["subject"], None, None))
@@ -49,7 +49,7 @@ class AddSelfReferenceProperty(BaseTransformer):
49
49
  )
50
50
 
51
51
  traversal = SingleProperty.from_string(
52
- class_=property_.class_.id,
52
+ class_=property_.view.id,
53
53
  property_=f"{self.rules.metadata.prefix}:{property_.property_}",
54
54
  )
55
55
 
@@ -210,7 +210,7 @@ class NeatError(NeatIssue, Exception):
210
210
  """This is the base class for all exceptions (errors) used in Neat."""
211
211
 
212
212
  @classmethod
213
- def from_pydantic_errors(cls, errors: list[ErrorDetails], **kwargs) -> "list[NeatError]":
213
+ def from_errors(cls, errors: "list[ErrorDetails | NeatError]", **kwargs) -> "list[NeatError]":
214
214
  """Convert a list of pydantic errors to a list of Error instances.
215
215
 
216
216
  This is intended to be overridden in subclasses to handle specific error types.
@@ -219,24 +219,36 @@ class NeatError(NeatIssue, Exception):
219
219
  read_info_by_sheet = kwargs.get("read_info_by_sheet")
220
220
 
221
221
  for error in errors:
222
- if error["type"] == "is_instance_of" and error["loc"][1] == "is-instance[SheetList]":
222
+ if (
223
+ isinstance(error, dict)
224
+ and error["type"] == "is_instance_of"
225
+ and error["loc"][1] == "is-instance[SheetList]"
226
+ ):
223
227
  # Skip the error for SheetList, as it is not relevant for the user. This is an
224
228
  # internal class used to have helper methods for a lists as .to_pandas()
225
229
  continue
226
- ctx = error.get("ctx")
227
- if isinstance(ctx, dict) and isinstance(multi_error := ctx.get("error"), MultiValueError):
230
+ neat_error: NeatError | None = None
231
+ if isinstance(error, dict) and isinstance(ctx := error.get("ctx"), dict) and "error" in ctx:
232
+ neat_error = ctx["error"]
233
+ elif isinstance(error, NeatError | MultiValueError):
234
+ neat_error = error
235
+
236
+ if isinstance(neat_error, MultiValueError):
228
237
  if read_info_by_sheet:
229
- for caught_error in multi_error.errors:
238
+ for caught_error in neat_error.errors:
230
239
  cls._adjust_row_numbers(caught_error, read_info_by_sheet) # type: ignore[arg-type]
231
- all_errors.extend(multi_error.errors) # type: ignore[arg-type]
232
- elif isinstance(ctx, dict) and isinstance(single_error := ctx.get("error"), NeatError):
240
+ all_errors.extend(neat_error.errors) # type: ignore[arg-type]
241
+ elif isinstance(neat_error, NeatError):
233
242
  if read_info_by_sheet:
234
- cls._adjust_row_numbers(single_error, read_info_by_sheet)
235
- all_errors.append(single_error)
236
- elif len(error["loc"]) >= 4 and read_info_by_sheet:
243
+ cls._adjust_row_numbers(neat_error, read_info_by_sheet)
244
+ all_errors.append(neat_error)
245
+ elif isinstance(error, dict) and len(error["loc"]) >= 4 and read_info_by_sheet:
237
246
  all_errors.append(RowError.from_pydantic_error(error, read_info_by_sheet))
238
- else:
247
+ elif isinstance(error, dict):
239
248
  all_errors.append(DefaultPydanticError.from_pydantic_error(error))
249
+ else:
250
+ # This is unreachable. However, in case it turns out to be reachable, we want to know about it.
251
+ raise ValueError(f"Unsupported error type: {error}")
240
252
  return all_errors
241
253
 
242
254
  @staticmethod
@@ -511,13 +523,10 @@ def catch_issues(
511
523
  try:
512
524
  yield future_result
513
525
  except ValidationError as e:
514
- issues.extend(error_cls.from_pydantic_errors(e.errors(), **(error_args or {})))
515
- future_result._result = "failure"
516
- except MultiValueError as e:
517
- issues.extend(e.errors)
526
+ issues.extend(error_cls.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
518
527
  future_result._result = "failure"
519
- except NeatError as e:
520
- issues.append(e)
528
+ except (NeatError, MultiValueError) as e:
529
+ issues.extend(error_cls.from_errors([e], **(error_args or {}))) # type: ignore[arg-type, list-item]
521
530
  future_result._result = "failure"
522
531
  else:
523
532
  future_result._result = "success"
@@ -2,6 +2,7 @@ from cognite.neat._issues._base import DefaultPydanticError, NeatError, RowError
2
2
 
3
3
  from ._external import (
4
4
  AuthorizationError,
5
+ CDFMissingClientError,
5
6
  FileMissingRequiredFieldError,
6
7
  FileNotAFileError,
7
8
  FileNotFoundNeatError,
@@ -20,7 +21,7 @@ from ._properties import (
20
21
  )
21
22
  from ._resources import (
22
23
  ResourceChangedError,
23
- ResourceConvertionError,
24
+ ResourceConversionError,
24
25
  ResourceCreationError,
25
26
  ResourceDuplicatedError,
26
27
  ResourceError,
@@ -58,7 +59,7 @@ __all__ = [
58
59
  "ResourceError",
59
60
  "ResourceNotDefinedError",
60
61
  "ResourceMissingIdentifierError",
61
- "ResourceConvertionError",
62
+ "ResourceConversionError",
62
63
  "WorkflowConfigurationNotSetError",
63
64
  "WorkFlowMissingDataError",
64
65
  "WorkflowStepNotInitializedError",
@@ -70,6 +71,7 @@ __all__ = [
70
71
  "RowError",
71
72
  "NeatTypeError",
72
73
  "ReversedConnectionNotFeasibleError",
74
+ "CDFMissingClientError",
73
75
  ]
74
76
 
75
77
  _NEAT_ERRORS_BY_NAME = {error.__name__: error for error in _get_subclasses(NeatError, include_base=True)}
@@ -65,3 +65,10 @@ class FileNotAFileError(NeatError, FileNotFoundError):
65
65
 
66
66
  fix = "Make sure to provide a valid file"
67
67
  filepath: Path
68
+
69
+
70
+ @dataclass(unsafe_hash=True)
71
+ class CDFMissingClientError(NeatError, RuntimeError):
72
+ """CDF client is required: {reason}"""
73
+
74
+ reason: str
@@ -34,14 +34,9 @@ class PropertyTypeNotSupportedError(PropertyError[T_Identifier]):
34
34
 
35
35
  @dataclass(unsafe_hash=True)
36
36
  class ReversedConnectionNotFeasibleError(PropertyError[T_Identifier]):
37
- """The {resource_type} {property_name} with identifier {identifier} of the view {target_view_id} cannot be made
38
- since view {source_view_id} does not have direct connection {direct_connection} defined,
39
- or {direct_connection} value type is not {target_view_id}
40
- """
37
+ """The {resource_type} {identifier}.{property_name} cannot be created: {reason}"""
41
38
 
42
- target_view_id: str
43
- source_view_id: str
44
- direct_connection: str
39
+ reason: str
45
40
 
46
41
 
47
42
  # This is a generic error that should be used sparingly
@@ -64,7 +64,7 @@ class ResourceNotDefinedError(ResourceError[T_Identifier]):
64
64
 
65
65
 
66
66
  @dataclass(unsafe_hash=True)
67
- class ResourceConvertionError(ResourceError, ValueError):
67
+ class ResourceConversionError(ResourceError, ValueError):
68
68
  """Failed to convert the {resource_type} {identifier} to {target_format}: {reason}"""
69
69
 
70
70
  fix = "Check the error message and correct the rules."
@@ -6,6 +6,8 @@ from cognite.neat._issues._base import DefaultWarning, NeatWarning, _get_subclas
6
6
 
7
7
  from . import user_modeling
8
8
  from ._external import (
9
+ CDFAuthWarning,
10
+ CDFMaxIterationsWarning,
9
11
  FileItemNotSupportedWarning,
10
12
  FileMissingRequiredFieldWarning,
11
13
  FileReadWarning,
@@ -26,6 +28,8 @@ from ._models import (
26
28
  from ._properties import (
27
29
  PropertyDefinitionDuplicatedWarning,
28
30
  PropertyNotFoundWarning,
31
+ PropertyOverwritingWarning,
32
+ PropertySkippedWarning,
29
33
  PropertyTypeNotSupportedWarning,
30
34
  PropertyValueTypeUndefinedWarning,
31
35
  )
@@ -52,6 +56,8 @@ __all__ = [
52
56
  "PropertyTypeNotSupportedWarning",
53
57
  "PropertyNotFoundWarning",
54
58
  "PropertyValueTypeUndefinedWarning",
59
+ "PropertyOverwritingWarning",
60
+ "PropertySkippedWarning",
55
61
  "ResourceNeatWarning",
56
62
  "ResourcesDuplicatedWarning",
57
63
  "RegexViolationWarning",
@@ -64,7 +70,9 @@ __all__ = [
64
70
  "NotSupportedViewContainerLimitWarning",
65
71
  "NotSupportedHasDataFilterLimitWarning",
66
72
  "UndefinedViewWarning",
73
+ "CDFAuthWarning",
67
74
  "user_modeling",
75
+ "CDFMaxIterationsWarning",
68
76
  ]
69
77
 
70
78
  _NEAT_WARNINGS_BY_NAME = {warning.__name__: warning for warning in _get_subclasses(NeatWarning, include_base=True)}
@@ -38,3 +38,19 @@ class FileItemNotSupportedWarning(NeatWarning):
38
38
 
39
39
  item: str
40
40
  filepath: Path
41
+
42
+
43
+ @dataclass(unsafe_hash=True)
44
+ class CDFAuthWarning(NeatWarning):
45
+ """Failed to {action} due to {reason}"""
46
+
47
+ action: str
48
+ reason: str
49
+
50
+
51
+ @dataclass(unsafe_hash=True)
52
+ class CDFMaxIterationsWarning(NeatWarning):
53
+ """The maximum number of iterations ({max_iterations}) has been reached. {message}"""
54
+
55
+ message: str
56
+ max_iterations: int
@@ -54,3 +54,19 @@ class PropertyValueTypeUndefinedWarning(PropertyWarning[T_Identifier]):
54
54
 
55
55
  default_action: str
56
56
  recommended_action: str | None = None
57
+
58
+
59
+ @dataclass(unsafe_hash=True)
60
+ class PropertyOverwritingWarning(PropertyWarning[T_Identifier]):
61
+ """Overwriting the {overwriting} for {property_name} in the {resource_type}
62
+ with identifier {identifier}."""
63
+
64
+ overwriting: tuple[str, ...]
65
+
66
+
67
+ @dataclass(unsafe_hash=True)
68
+ class PropertySkippedWarning(PropertyWarning[T_Identifier]):
69
+ """The {resource_type} with identifier {identifier} has a property {property_name}
70
+ which is skipped. {reason}."""
71
+
72
+ reason: str