cognite-neat 0.99.0__py3-none-any.whl → 0.99.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/_api/data_modeling_loaders.py +77 -4
- cognite/neat/_client/_api/schema.py +63 -2
- cognite/neat/_client/data_classes/schema.py +2 -348
- cognite/neat/_constants.py +27 -4
- cognite/neat/_graph/extractors/_classic_cdf/_classic.py +5 -5
- cognite/neat/_graph/loaders/_rdf2dms.py +2 -2
- cognite/neat/_graph/transformers/_classic_cdf.py +24 -13
- cognite/neat/_issues/_base.py +26 -17
- cognite/neat/_issues/errors/__init__.py +4 -2
- cognite/neat/_issues/errors/_external.py +7 -0
- cognite/neat/_issues/errors/_properties.py +2 -7
- cognite/neat/_issues/errors/_resources.py +1 -1
- cognite/neat/_issues/warnings/__init__.py +4 -2
- cognite/neat/_issues/warnings/_external.py +9 -1
- cognite/neat/_issues/warnings/_resources.py +26 -2
- cognite/neat/_issues/warnings/user_modeling.py +4 -4
- cognite/neat/_rules/_constants.py +2 -6
- cognite/neat/_rules/exporters/_rules2dms.py +4 -6
- cognite/neat/_rules/importers/__init__.py +1 -3
- cognite/neat/_rules/importers/_base.py +1 -1
- cognite/neat/_rules/importers/_dms2rules.py +3 -25
- cognite/neat/_rules/importers/_rdf/__init__.py +5 -0
- cognite/neat/_rules/importers/_rdf/_base.py +34 -11
- cognite/neat/_rules/importers/_rdf/_imf2rules.py +91 -0
- cognite/neat/_rules/importers/_rdf/_inference2rules.py +18 -2
- cognite/neat/_rules/importers/_rdf/_owl2rules.py +80 -0
- cognite/neat/_rules/importers/_rdf/_shared.py +138 -441
- cognite/neat/_rules/models/dms/__init__.py +2 -0
- cognite/neat/_rules/models/dms/_exporter.py +32 -30
- cognite/neat/_rules/models/dms/_rules.py +3 -45
- cognite/neat/_rules/models/dms/_validation.py +389 -122
- cognite/neat/_rules/models/information/__init__.py +2 -0
- cognite/neat/_rules/models/information/_rules.py +0 -59
- cognite/neat/_rules/models/information/_validation.py +9 -9
- cognite/neat/_rules/models/mapping/_classic2core.py +1 -1
- cognite/neat/_rules/models/mapping/_classic2core.yaml +8 -4
- cognite/neat/_rules/transformers/_pipelines.py +1 -1
- cognite/neat/_rules/transformers/_verification.py +29 -4
- cognite/neat/_session/_base.py +16 -41
- cognite/neat/_session/_prepare.py +6 -5
- cognite/neat/_session/_to.py +5 -2
- cognite/neat/_session/exceptions.py +4 -0
- cognite/neat/_utils/rdf_.py +6 -4
- cognite/neat/_version.py +1 -1
- cognite/neat/_workflows/steps/lib/current/rules_exporter.py +0 -88
- cognite/neat/_workflows/steps/lib/current/rules_importer.py +2 -16
- cognite/neat/_workflows/steps/lib/current/rules_validator.py +3 -5
- {cognite_neat-0.99.0.dist-info → cognite_neat-0.99.1.dist-info}/METADATA +1 -1
- {cognite_neat-0.99.0.dist-info → cognite_neat-0.99.1.dist-info}/RECORD +52 -60
- cognite/neat/_rules/importers/_rdf/_imf2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +0 -86
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +0 -29
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +0 -130
- cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2rules.py +0 -154
- cognite/neat/_rules/importers/_rdf/_owl2rules/__init__.py +0 -3
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +0 -58
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +0 -65
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +0 -59
- cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2rules.py +0 -39
- {cognite_neat-0.99.0.dist-info → cognite_neat-0.99.1.dist-info}/LICENSE +0 -0
- {cognite_neat-0.99.0.dist-info → cognite_neat-0.99.1.dist-info}/WHEEL +0 -0
- {cognite_neat-0.99.0.dist-info → cognite_neat-0.99.1.dist-info}/entry_points.txt +0 -0
|
@@ -7,9 +7,9 @@ from cognite.client import CogniteClient
|
|
|
7
7
|
from cognite.client.exceptions import CogniteAPIError
|
|
8
8
|
from rdflib import Namespace
|
|
9
9
|
|
|
10
|
-
from cognite.neat._constants import
|
|
10
|
+
from cognite.neat._constants import CLASSIC_CDF_NAMESPACE
|
|
11
11
|
from cognite.neat._graph.extractors._base import BaseExtractor
|
|
12
|
-
from cognite.neat._issues.warnings import
|
|
12
|
+
from cognite.neat._issues.warnings import CDFAuthWarning
|
|
13
13
|
from cognite.neat._shared import Triple
|
|
14
14
|
from cognite.neat._utils.collection_ import chunker
|
|
15
15
|
from cognite.neat._utils.rdf_ import remove_namespace_from_uri
|
|
@@ -98,7 +98,7 @@ class ClassicGraphExtractor(BaseExtractor):
|
|
|
98
98
|
raise ValueError("Exactly one of data_set_external_id or root_asset_external_id must be set.")
|
|
99
99
|
self._root_asset_external_id = root_asset_external_id
|
|
100
100
|
self._data_set_external_id = data_set_external_id
|
|
101
|
-
self._namespace = namespace or
|
|
101
|
+
self._namespace = namespace or CLASSIC_CDF_NAMESPACE
|
|
102
102
|
self._extractor_args = dict(namespace=self._namespace, unpack_metadata=False, as_write=True, camel_case=True)
|
|
103
103
|
|
|
104
104
|
self._source_external_ids_by_type: dict[InstanceIdPrefix, set[str]] = defaultdict(set)
|
|
@@ -117,12 +117,12 @@ class ClassicGraphExtractor(BaseExtractor):
|
|
|
117
117
|
try:
|
|
118
118
|
yield from self._extract_labels()
|
|
119
119
|
except CogniteAPIError as e:
|
|
120
|
-
warnings.warn(
|
|
120
|
+
warnings.warn(CDFAuthWarning("extract labels", str(e)), stacklevel=2)
|
|
121
121
|
|
|
122
122
|
try:
|
|
123
123
|
yield from self._extract_data_sets()
|
|
124
124
|
except CogniteAPIError as e:
|
|
125
|
-
warnings.warn(
|
|
125
|
+
warnings.warn(CDFAuthWarning("extract data sets", str(e)), stacklevel=2)
|
|
126
126
|
|
|
127
127
|
def _extract_core_start_nodes(self):
|
|
128
128
|
for core_node in self._classic_node_types:
|
|
@@ -19,7 +19,7 @@ from rdflib import RDF
|
|
|
19
19
|
from cognite.neat._graph._tracking import LogTracker, Tracker
|
|
20
20
|
from cognite.neat._issues import IssueList, NeatIssue, NeatIssueList
|
|
21
21
|
from cognite.neat._issues.errors import (
|
|
22
|
-
|
|
22
|
+
ResourceConversionError,
|
|
23
23
|
ResourceCreationError,
|
|
24
24
|
ResourceDuplicatedError,
|
|
25
25
|
ResourceRetrievalError,
|
|
@@ -88,7 +88,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
|
|
|
88
88
|
data_model = rules.as_schema().as_read_model()
|
|
89
89
|
except Exception as e:
|
|
90
90
|
issues.append(
|
|
91
|
-
|
|
91
|
+
ResourceConversionError(
|
|
92
92
|
identifier=rules.metadata.as_identifier(),
|
|
93
93
|
resource_type="DMS Rules",
|
|
94
94
|
target_format="read DMS model",
|
|
@@ -254,7 +254,7 @@ class RelationshipToSchemaTransformer(BaseTransformer):
|
|
|
254
254
|
self._namespace = namespace
|
|
255
255
|
|
|
256
256
|
_NOT_PROPERTIES: frozenset[str] = frozenset(
|
|
257
|
-
{"
|
|
257
|
+
{"sourceExternalId", "targetExternalId", "externalId", "sourceType", "targetType"}
|
|
258
258
|
)
|
|
259
259
|
_RELATIONSHIP_NODE_TYPES: tuple[str, ...] = tuple(["Asset", "Event", "File", "Sequence", "TimeSeries"])
|
|
260
260
|
description = "Replaces relationships with a schema"
|
|
@@ -266,8 +266,8 @@ class RelationshipToSchemaTransformer(BaseTransformer):
|
|
|
266
266
|
SELECT (COUNT(?instance) AS ?instanceCount)
|
|
267
267
|
WHERE {{
|
|
268
268
|
?instance a classic:Relationship .
|
|
269
|
-
?instance classic:
|
|
270
|
-
?instance classic:
|
|
269
|
+
?instance classic:sourceType classic:{source_type} .
|
|
270
|
+
?instance classic:targetType classic:{target_type} .
|
|
271
271
|
}}"""
|
|
272
272
|
|
|
273
273
|
_instances = """PREFIX classic: <{namespace}>
|
|
@@ -275,15 +275,15 @@ WHERE {{
|
|
|
275
275
|
SELECT ?instance
|
|
276
276
|
WHERE {{
|
|
277
277
|
?instance a classic:Relationship .
|
|
278
|
-
?instance classic:
|
|
279
|
-
?instance classic:
|
|
278
|
+
?instance classic:sourceType classic:{source_type} .
|
|
279
|
+
?instance classic:targetType classic:{target_type} .
|
|
280
280
|
}}"""
|
|
281
281
|
_lookup_entity_query = """PREFIX classic: <{namespace}>
|
|
282
282
|
|
|
283
283
|
SELECT ?entity
|
|
284
284
|
WHERE {{
|
|
285
285
|
?entity a classic:{entity_type} .
|
|
286
|
-
?entity classic:
|
|
286
|
+
?entity classic:externalId "{external_id}" .
|
|
287
287
|
}}"""
|
|
288
288
|
|
|
289
289
|
def transform(self, graph: Graph) -> None:
|
|
@@ -309,8 +309,8 @@ WHERE {{
|
|
|
309
309
|
object_by_predicates = cast(
|
|
310
310
|
dict[str, URIRef | Literal], {remove_namespace_from_uri(row[1]): row[2] for row in result}
|
|
311
311
|
)
|
|
312
|
-
source_external_id = cast(URIRef, object_by_predicates["
|
|
313
|
-
target_source_id = cast(URIRef, object_by_predicates["
|
|
312
|
+
source_external_id = cast(URIRef, object_by_predicates["sourceExternalId"])
|
|
313
|
+
target_source_id = cast(URIRef, object_by_predicates["targetExternalId"])
|
|
314
314
|
try:
|
|
315
315
|
source_id = self._lookup_entity(graph, source_type, source_external_id)
|
|
316
316
|
except ValueError:
|
|
@@ -321,7 +321,7 @@ WHERE {{
|
|
|
321
321
|
except ValueError:
|
|
322
322
|
warnings.warn(ResourceNotFoundWarning(target_source_id, "class", str(instance_id), "class"), stacklevel=2)
|
|
323
323
|
return None
|
|
324
|
-
external_id = str(object_by_predicates["
|
|
324
|
+
external_id = str(object_by_predicates["externalId"])
|
|
325
325
|
# If there is properties on the relationship, we create a new intermediate node
|
|
326
326
|
self._create_node(graph, object_by_predicates, external_id, source_id, target_id, self._predicate(target_type))
|
|
327
327
|
|
|
@@ -347,7 +347,7 @@ WHERE {{
|
|
|
347
347
|
predicate: URIRef,
|
|
348
348
|
) -> None:
|
|
349
349
|
"""Creates a new intermediate node for the relationship with properties."""
|
|
350
|
-
# Create
|
|
350
|
+
# Create the entity with the properties
|
|
351
351
|
instance_id = self._namespace[external_id]
|
|
352
352
|
graph.add((instance_id, RDF.type, self._namespace["Edge"]))
|
|
353
353
|
for prop_name, object_ in objects_by_predicates.items():
|
|
@@ -355,9 +355,20 @@ WHERE {{
|
|
|
355
355
|
continue
|
|
356
356
|
graph.add((instance_id, self._namespace[prop_name], object_))
|
|
357
357
|
|
|
358
|
-
#
|
|
359
|
-
|
|
360
|
-
|
|
358
|
+
# Target and Source IDs will always be a combination of Asset, Sequence, Event, TimeSeries, and File.
|
|
359
|
+
# If we assume source ID is an asset and target ID is a time series, then
|
|
360
|
+
# before we had relationship pointing to both: timeseries <- relationship -> asset
|
|
361
|
+
# After, we want asset -> timeseries, and asset.edgeSource -> Edge
|
|
362
|
+
# and the new edge will point to the asset and the timeseries through startNode and endNode
|
|
363
|
+
|
|
364
|
+
# Link the two entities directly,
|
|
365
|
+
graph.add((source_id, predicate, target_id))
|
|
366
|
+
# Create the new edge
|
|
367
|
+
graph.add((instance_id, self._namespace["startNode"], source_id))
|
|
368
|
+
graph.add((instance_id, self._namespace["endNode"], target_id))
|
|
369
|
+
|
|
370
|
+
# Link the source to the edge properties
|
|
371
|
+
graph.add((source_id, self._namespace["edgeSource"], instance_id))
|
|
361
372
|
|
|
362
373
|
def _predicate(self, target_type: str) -> URIRef:
|
|
363
374
|
return self._namespace[f"relationship{target_type.capitalize()}"]
|
cognite/neat/_issues/_base.py
CHANGED
|
@@ -210,7 +210,7 @@ class NeatError(NeatIssue, Exception):
|
|
|
210
210
|
"""This is the base class for all exceptions (errors) used in Neat."""
|
|
211
211
|
|
|
212
212
|
@classmethod
|
|
213
|
-
def
|
|
213
|
+
def from_errors(cls, errors: "list[ErrorDetails | NeatError]", **kwargs) -> "list[NeatError]":
|
|
214
214
|
"""Convert a list of pydantic errors to a list of Error instances.
|
|
215
215
|
|
|
216
216
|
This is intended to be overridden in subclasses to handle specific error types.
|
|
@@ -219,24 +219,36 @@ class NeatError(NeatIssue, Exception):
|
|
|
219
219
|
read_info_by_sheet = kwargs.get("read_info_by_sheet")
|
|
220
220
|
|
|
221
221
|
for error in errors:
|
|
222
|
-
if
|
|
222
|
+
if (
|
|
223
|
+
isinstance(error, dict)
|
|
224
|
+
and error["type"] == "is_instance_of"
|
|
225
|
+
and error["loc"][1] == "is-instance[SheetList]"
|
|
226
|
+
):
|
|
223
227
|
# Skip the error for SheetList, as it is not relevant for the user. This is an
|
|
224
228
|
# internal class used to have helper methods for a lists as .to_pandas()
|
|
225
229
|
continue
|
|
226
|
-
|
|
227
|
-
if isinstance(
|
|
230
|
+
neat_error: NeatError | None = None
|
|
231
|
+
if isinstance(error, dict) and isinstance(ctx := error.get("ctx"), dict) and "error" in ctx:
|
|
232
|
+
neat_error = ctx["error"]
|
|
233
|
+
elif isinstance(error, NeatError | MultiValueError):
|
|
234
|
+
neat_error = error
|
|
235
|
+
|
|
236
|
+
if isinstance(neat_error, MultiValueError):
|
|
228
237
|
if read_info_by_sheet:
|
|
229
|
-
for caught_error in
|
|
238
|
+
for caught_error in neat_error.errors:
|
|
230
239
|
cls._adjust_row_numbers(caught_error, read_info_by_sheet) # type: ignore[arg-type]
|
|
231
|
-
all_errors.extend(
|
|
232
|
-
elif isinstance(
|
|
240
|
+
all_errors.extend(neat_error.errors) # type: ignore[arg-type]
|
|
241
|
+
elif isinstance(neat_error, NeatError):
|
|
233
242
|
if read_info_by_sheet:
|
|
234
|
-
cls._adjust_row_numbers(
|
|
235
|
-
all_errors.append(
|
|
236
|
-
elif len(error["loc"]) >= 4 and read_info_by_sheet:
|
|
243
|
+
cls._adjust_row_numbers(neat_error, read_info_by_sheet)
|
|
244
|
+
all_errors.append(neat_error)
|
|
245
|
+
elif isinstance(error, dict) and len(error["loc"]) >= 4 and read_info_by_sheet:
|
|
237
246
|
all_errors.append(RowError.from_pydantic_error(error, read_info_by_sheet))
|
|
238
|
-
|
|
247
|
+
elif isinstance(error, dict):
|
|
239
248
|
all_errors.append(DefaultPydanticError.from_pydantic_error(error))
|
|
249
|
+
else:
|
|
250
|
+
# This is unreachable. However, in case it turns out to be reachable, we want to know about it.
|
|
251
|
+
raise ValueError(f"Unsupported error type: {error}")
|
|
240
252
|
return all_errors
|
|
241
253
|
|
|
242
254
|
@staticmethod
|
|
@@ -511,13 +523,10 @@ def catch_issues(
|
|
|
511
523
|
try:
|
|
512
524
|
yield future_result
|
|
513
525
|
except ValidationError as e:
|
|
514
|
-
issues.extend(error_cls.
|
|
515
|
-
future_result._result = "failure"
|
|
516
|
-
except MultiValueError as e:
|
|
517
|
-
issues.extend(e.errors)
|
|
526
|
+
issues.extend(error_cls.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
|
|
518
527
|
future_result._result = "failure"
|
|
519
|
-
except NeatError as e:
|
|
520
|
-
issues.
|
|
528
|
+
except (NeatError, MultiValueError) as e:
|
|
529
|
+
issues.extend(error_cls.from_errors([e], **(error_args or {}))) # type: ignore[arg-type, list-item]
|
|
521
530
|
future_result._result = "failure"
|
|
522
531
|
else:
|
|
523
532
|
future_result._result = "success"
|
|
@@ -2,6 +2,7 @@ from cognite.neat._issues._base import DefaultPydanticError, NeatError, RowError
|
|
|
2
2
|
|
|
3
3
|
from ._external import (
|
|
4
4
|
AuthorizationError,
|
|
5
|
+
CDFMissingClientError,
|
|
5
6
|
FileMissingRequiredFieldError,
|
|
6
7
|
FileNotAFileError,
|
|
7
8
|
FileNotFoundNeatError,
|
|
@@ -20,7 +21,7 @@ from ._properties import (
|
|
|
20
21
|
)
|
|
21
22
|
from ._resources import (
|
|
22
23
|
ResourceChangedError,
|
|
23
|
-
|
|
24
|
+
ResourceConversionError,
|
|
24
25
|
ResourceCreationError,
|
|
25
26
|
ResourceDuplicatedError,
|
|
26
27
|
ResourceError,
|
|
@@ -58,7 +59,7 @@ __all__ = [
|
|
|
58
59
|
"ResourceError",
|
|
59
60
|
"ResourceNotDefinedError",
|
|
60
61
|
"ResourceMissingIdentifierError",
|
|
61
|
-
"
|
|
62
|
+
"ResourceConversionError",
|
|
62
63
|
"WorkflowConfigurationNotSetError",
|
|
63
64
|
"WorkFlowMissingDataError",
|
|
64
65
|
"WorkflowStepNotInitializedError",
|
|
@@ -70,6 +71,7 @@ __all__ = [
|
|
|
70
71
|
"RowError",
|
|
71
72
|
"NeatTypeError",
|
|
72
73
|
"ReversedConnectionNotFeasibleError",
|
|
74
|
+
"CDFMissingClientError",
|
|
73
75
|
]
|
|
74
76
|
|
|
75
77
|
_NEAT_ERRORS_BY_NAME = {error.__name__: error for error in _get_subclasses(NeatError, include_base=True)}
|
|
@@ -65,3 +65,10 @@ class FileNotAFileError(NeatError, FileNotFoundError):
|
|
|
65
65
|
|
|
66
66
|
fix = "Make sure to provide a valid file"
|
|
67
67
|
filepath: Path
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@dataclass(unsafe_hash=True)
|
|
71
|
+
class CDFMissingClientError(NeatError, RuntimeError):
|
|
72
|
+
"""CDF client is required: {reason}"""
|
|
73
|
+
|
|
74
|
+
reason: str
|
|
@@ -34,14 +34,9 @@ class PropertyTypeNotSupportedError(PropertyError[T_Identifier]):
|
|
|
34
34
|
|
|
35
35
|
@dataclass(unsafe_hash=True)
|
|
36
36
|
class ReversedConnectionNotFeasibleError(PropertyError[T_Identifier]):
|
|
37
|
-
"""The {resource_type} {
|
|
38
|
-
since view {source_view_id} does not have direct connection {direct_connection} defined,
|
|
39
|
-
or {direct_connection} value type is not {target_view_id}
|
|
40
|
-
"""
|
|
37
|
+
"""The {resource_type} {identifier}.{property_name} cannot be created: {reason}"""
|
|
41
38
|
|
|
42
|
-
|
|
43
|
-
source_view_id: str
|
|
44
|
-
direct_connection: str
|
|
39
|
+
reason: str
|
|
45
40
|
|
|
46
41
|
|
|
47
42
|
# This is a generic error that should be used sparingly
|
|
@@ -64,7 +64,7 @@ class ResourceNotDefinedError(ResourceError[T_Identifier]):
|
|
|
64
64
|
|
|
65
65
|
|
|
66
66
|
@dataclass(unsafe_hash=True)
|
|
67
|
-
class
|
|
67
|
+
class ResourceConversionError(ResourceError, ValueError):
|
|
68
68
|
"""Failed to convert the {resource_type} {identifier} to {target_format}: {reason}"""
|
|
69
69
|
|
|
70
70
|
fix = "Check the error message and correct the rules."
|
|
@@ -6,7 +6,8 @@ from cognite.neat._issues._base import DefaultWarning, NeatWarning, _get_subclas
|
|
|
6
6
|
|
|
7
7
|
from . import user_modeling
|
|
8
8
|
from ._external import (
|
|
9
|
-
|
|
9
|
+
CDFAuthWarning,
|
|
10
|
+
CDFMaxIterationsWarning,
|
|
10
11
|
FileItemNotSupportedWarning,
|
|
11
12
|
FileMissingRequiredFieldWarning,
|
|
12
13
|
FileReadWarning,
|
|
@@ -69,8 +70,9 @@ __all__ = [
|
|
|
69
70
|
"NotSupportedViewContainerLimitWarning",
|
|
70
71
|
"NotSupportedHasDataFilterLimitWarning",
|
|
71
72
|
"UndefinedViewWarning",
|
|
72
|
-
"
|
|
73
|
+
"CDFAuthWarning",
|
|
73
74
|
"user_modeling",
|
|
75
|
+
"CDFMaxIterationsWarning",
|
|
74
76
|
]
|
|
75
77
|
|
|
76
78
|
_NEAT_WARNINGS_BY_NAME = {warning.__name__: warning for warning in _get_subclasses(NeatWarning, include_base=True)}
|
|
@@ -41,8 +41,16 @@ class FileItemNotSupportedWarning(NeatWarning):
|
|
|
41
41
|
|
|
42
42
|
|
|
43
43
|
@dataclass(unsafe_hash=True)
|
|
44
|
-
class
|
|
44
|
+
class CDFAuthWarning(NeatWarning):
|
|
45
45
|
"""Failed to {action} due to {reason}"""
|
|
46
46
|
|
|
47
47
|
action: str
|
|
48
48
|
reason: str
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
@dataclass(unsafe_hash=True)
|
|
52
|
+
class CDFMaxIterationsWarning(NeatWarning):
|
|
53
|
+
"""The maximum number of iterations ({max_iterations}) has been reached. {message}"""
|
|
54
|
+
|
|
55
|
+
message: str
|
|
56
|
+
max_iterations: int
|
|
@@ -24,6 +24,30 @@ class ResourceNotFoundWarning(ResourceNeatWarning, Generic[T_Identifier, T_Refer
|
|
|
24
24
|
referred_type: str
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
@dataclass(unsafe_hash=True)
|
|
28
|
+
class ResourceNotDefinedWarning(ResourceNeatWarning, Generic[T_Identifier, T_ReferenceIdentifier]):
|
|
29
|
+
"""The {resource_type} {identifier} is not defined in the {location}"""
|
|
30
|
+
|
|
31
|
+
extra = "{column_name} {row_number} in {sheet_name}"
|
|
32
|
+
fix = "Define the {resource_type} {identifier} in {location}."
|
|
33
|
+
|
|
34
|
+
location: str
|
|
35
|
+
column_name: str | None = None
|
|
36
|
+
row_number: int | None = None
|
|
37
|
+
sheet_name: str | None = None
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass(unsafe_hash=True)
|
|
41
|
+
class ResourceRedefinedWarning(ResourceNeatWarning, Generic[T_Identifier, T_ReferenceIdentifier]):
|
|
42
|
+
"""The {resource_type} {identifier} feature {feature} is being redefine from {current_value} to {new_value}.
|
|
43
|
+
This will be ignored."""
|
|
44
|
+
|
|
45
|
+
fix = "Avoid redefinition {resource_type} features"
|
|
46
|
+
feature: str
|
|
47
|
+
current_value: str
|
|
48
|
+
new_value: str
|
|
49
|
+
|
|
50
|
+
|
|
27
51
|
@dataclass(unsafe_hash=True)
|
|
28
52
|
class ResourcesDuplicatedWarning(NeatWarning, Generic[T_Identifier]):
|
|
29
53
|
"""Duplicated {resource_type} with identifiers {resources} were found. {default_action}"""
|
|
@@ -37,12 +61,12 @@ class ResourcesDuplicatedWarning(NeatWarning, Generic[T_Identifier]):
|
|
|
37
61
|
|
|
38
62
|
@dataclass(unsafe_hash=True)
|
|
39
63
|
class ResourceRetrievalWarning(NeatWarning, Generic[T_Identifier]):
|
|
40
|
-
"""Failed to retrieve {resource_type} with
|
|
64
|
+
"""Failed to retrieve {resource_type} with identifier(s) {resources}. Continuing without
|
|
41
65
|
these resources."""
|
|
42
66
|
|
|
43
67
|
extra = "The error was: {error}"
|
|
44
68
|
|
|
45
|
-
fix = "Check the error."
|
|
69
|
+
fix = "Check the error and fix accordingly."
|
|
46
70
|
|
|
47
71
|
resources: frozenset[T_Identifier]
|
|
48
72
|
resource_type: ResourceType
|
|
@@ -17,7 +17,7 @@ __all__ = [
|
|
|
17
17
|
"HasDataFilterOnNoPropertiesViewWarning",
|
|
18
18
|
"NodeTypeFilterOnParentViewWarning",
|
|
19
19
|
"HasDataFilterOnViewWithReferencesWarning",
|
|
20
|
-
"
|
|
20
|
+
"ContainerPropertyLimitWarning",
|
|
21
21
|
"NotNeatSupportedFilterWarning",
|
|
22
22
|
"ParentInDifferentSpaceWarning",
|
|
23
23
|
]
|
|
@@ -89,15 +89,15 @@ class HasDataFilterOnViewWithReferencesWarning(UserModelingWarning):
|
|
|
89
89
|
|
|
90
90
|
|
|
91
91
|
@dataclass(unsafe_hash=True)
|
|
92
|
-
class
|
|
93
|
-
"""The number of properties in the {
|
|
92
|
+
class ContainerPropertyLimitWarning(UserModelingWarning):
|
|
93
|
+
"""The number of properties in the {container_id} view is {count} which
|
|
94
94
|
is more than the API limit {limit} properties.
|
|
95
95
|
This can lead to performance issues.
|
|
96
96
|
Reduce the number of properties in the view."""
|
|
97
97
|
|
|
98
98
|
fix = "Reduce the number of properties in the view"
|
|
99
99
|
|
|
100
|
-
|
|
100
|
+
container_id: ContainerId
|
|
101
101
|
count: int
|
|
102
102
|
limit: int = DMS_CONTAINER_PROPERTY_SIZE_LIMIT
|
|
103
103
|
|
|
@@ -106,13 +106,9 @@ VIEW_ID_COMPLIANCE_REGEX = (
|
|
|
106
106
|
DMS_PROPERTY_ID_COMPLIANCE_REGEX = (
|
|
107
107
|
rf"(?!^({'|'.join(get_reserved_words('property'))})$)" r"(^[a-zA-Z][a-zA-Z0-9_]{0,253}[a-zA-Z0-9]?$)"
|
|
108
108
|
)
|
|
109
|
-
CLASS_ID_COMPLIANCE_REGEX = (
|
|
110
|
-
rf"(?!^({'|'.join(get_reserved_words('class'))})$)" r"(^[a-zA-Z][a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
111
|
-
)
|
|
109
|
+
CLASS_ID_COMPLIANCE_REGEX = rf"(?!^({'|'.join(get_reserved_words('class'))})$)" r"(^[a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
112
110
|
|
|
113
|
-
INFORMATION_PROPERTY_ID_COMPLIANCE_REGEX = (
|
|
114
|
-
r"^(\*)|(?!^(Property|property)$)(^[a-zA-Z][a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
115
|
-
)
|
|
111
|
+
INFORMATION_PROPERTY_ID_COMPLIANCE_REGEX = r"^(\*)|(?!^(Property|property)$)(^[a-zA-Z0-9._-]{0,253}[a-zA-Z0-9]?$)"
|
|
116
112
|
VERSION_COMPLIANCE_REGEX = r"^[a-zA-Z0-9]([.a-zA-Z0-9_-]{0,41}[a-zA-Z0-9])?$"
|
|
117
113
|
|
|
118
114
|
|
|
@@ -39,8 +39,6 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
39
39
|
If set, only export components in the given spaces. Defaults to None which means all spaces.
|
|
40
40
|
existing_handling (Literal["fail", "skip", "update", "force"], optional): How to handle existing components.
|
|
41
41
|
Defaults to "update". See below for details.
|
|
42
|
-
export_pipeline (bool, optional): Whether to export the pipeline. Defaults to False. This means setting
|
|
43
|
-
up transformations, RAW databases and tables to populate the data model.
|
|
44
42
|
instance_space (str, optional): The space to use for the instance. Defaults to None.
|
|
45
43
|
suppress_warnings (bool, optional): Suppress warnings. Defaults to False.
|
|
46
44
|
|
|
@@ -58,14 +56,12 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
58
56
|
export_components: Component | Collection[Component] = "all",
|
|
59
57
|
include_space: set[str] | None = None,
|
|
60
58
|
existing_handling: Literal["fail", "skip", "update", "force"] = "update",
|
|
61
|
-
export_pipeline: bool = False,
|
|
62
59
|
instance_space: str | None = None,
|
|
63
60
|
suppress_warnings: bool = False,
|
|
64
61
|
):
|
|
65
62
|
self.export_components = {export_components} if isinstance(export_components, str) else set(export_components)
|
|
66
63
|
self.include_space = include_space
|
|
67
64
|
self.existing_handling = existing_handling
|
|
68
|
-
self.export_pipeline = export_pipeline
|
|
69
65
|
self.instance_space = instance_space
|
|
70
66
|
self.suppress_warnings = suppress_warnings
|
|
71
67
|
self._schema: DMSSchema | None = None
|
|
@@ -106,7 +102,8 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
106
102
|
return exclude
|
|
107
103
|
|
|
108
104
|
def export(self, rules: DMSRules) -> DMSSchema:
|
|
109
|
-
|
|
105
|
+
# We do not want to include CogniteCore/CogniteProcess Inudstries in the schema
|
|
106
|
+
return rules.as_schema(instance_space=self.instance_space, remove_cdf_spaces=True)
|
|
110
107
|
|
|
111
108
|
def delete_from_cdf(
|
|
112
109
|
self, rules: DMSRules, client: NeatClient, dry_run: bool = False, skip_space: bool = False
|
|
@@ -115,7 +112,8 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
|
|
|
115
112
|
|
|
116
113
|
# we need to reverse order in which we are picking up the items to delete
|
|
117
114
|
# as they are sorted in the order of creation and we need to delete them in reverse order
|
|
118
|
-
for items
|
|
115
|
+
for items in reversed(to_export):
|
|
116
|
+
loader = client.loaders.get_loader(items)
|
|
119
117
|
if skip_space and isinstance(items, SpaceApplyList):
|
|
120
118
|
continue
|
|
121
119
|
item_ids = loader.get_ids(items)
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
from ._base import BaseImporter
|
|
2
2
|
from ._dms2rules import DMSImporter
|
|
3
3
|
from ._dtdl2rules import DTDLImporter
|
|
4
|
-
from ._rdf
|
|
5
|
-
from ._rdf._inference2rules import InferenceImporter
|
|
6
|
-
from ._rdf._owl2rules import OWLImporter
|
|
4
|
+
from ._rdf import IMFImporter, InferenceImporter, OWLImporter
|
|
7
5
|
from ._spreadsheet2rules import ExcelImporter, GoogleSheetImporter
|
|
8
6
|
from ._yaml2rules import YAMLImporter
|
|
9
7
|
|
|
@@ -85,7 +85,7 @@ def _handle_issues(
|
|
|
85
85
|
try:
|
|
86
86
|
yield future_result
|
|
87
87
|
except ValidationError as e:
|
|
88
|
-
issues.extend(error_cls.
|
|
88
|
+
issues.extend(error_cls.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
|
|
89
89
|
future_result._result = "failure"
|
|
90
90
|
else:
|
|
91
91
|
future_result._result = "success"
|
|
@@ -80,9 +80,6 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
80
80
|
self.issue_list = IssueList(read_issues)
|
|
81
81
|
self._all_containers_by_id = schema.containers.copy()
|
|
82
82
|
self._all_views_by_id = schema.views.copy()
|
|
83
|
-
if schema.reference:
|
|
84
|
-
self._all_containers_by_id.update(schema.reference.containers.items())
|
|
85
|
-
self._all_views_by_id.update(schema.reference.views.items())
|
|
86
83
|
|
|
87
84
|
def update_referenced_containers(self, containers: Iterable[dm.ContainerApply]) -> None:
|
|
88
85
|
"""Update the referenced containers. This is useful to add Cognite containers identified after the root schema
|
|
@@ -97,7 +94,6 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
97
94
|
cls,
|
|
98
95
|
client: NeatClient,
|
|
99
96
|
data_model_id: DataModelIdentifier,
|
|
100
|
-
reference_model_id: DataModelIdentifier | None = None,
|
|
101
97
|
) -> "DMSImporter":
|
|
102
98
|
"""Create a DMSImporter ready to convert the given data model to rules.
|
|
103
99
|
|
|
@@ -111,7 +107,7 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
111
107
|
DMSImporter: DMSImporter instance
|
|
112
108
|
"""
|
|
113
109
|
|
|
114
|
-
data_model_ids = [data_model_id
|
|
110
|
+
data_model_ids = [data_model_id]
|
|
115
111
|
data_models = client.data_modeling.data_models.retrieve(data_model_ids, inline_views=True)
|
|
116
112
|
|
|
117
113
|
user_models = cls._find_model_in_list(data_models, data_model_id)
|
|
@@ -128,34 +124,16 @@ class DMSImporter(BaseImporter[DMSInputRules]):
|
|
|
128
124
|
)
|
|
129
125
|
user_model = user_models.latest_version()
|
|
130
126
|
|
|
131
|
-
if reference_model_id:
|
|
132
|
-
ref_models = cls._find_model_in_list(data_models, reference_model_id)
|
|
133
|
-
if len(ref_models) == 0:
|
|
134
|
-
return cls(
|
|
135
|
-
DMSSchema(),
|
|
136
|
-
[
|
|
137
|
-
ResourceRetrievalError(
|
|
138
|
-
dm.DataModelId.load(reference_model_id),
|
|
139
|
-
"data model",
|
|
140
|
-
"Data Model is missing in CDF",
|
|
141
|
-
)
|
|
142
|
-
],
|
|
143
|
-
)
|
|
144
|
-
ref_model: dm.DataModel[dm.View] | None = ref_models.latest_version()
|
|
145
|
-
else:
|
|
146
|
-
ref_model = None
|
|
147
|
-
|
|
148
127
|
issue_list = IssueList()
|
|
149
128
|
with _handle_issues(issue_list) as result:
|
|
150
|
-
schema =
|
|
129
|
+
schema = NeatClient(client).schema.retrieve_data_model(user_model)
|
|
151
130
|
|
|
152
131
|
if result.result == "failure" or issue_list.has_errors:
|
|
153
132
|
return cls(DMSSchema(), issue_list)
|
|
154
133
|
|
|
155
134
|
metadata = cls._create_metadata_from_model(user_model)
|
|
156
|
-
ref_metadata = cls._create_metadata_from_model(ref_model) if ref_model else None
|
|
157
135
|
|
|
158
|
-
return cls(schema, issue_list, metadata,
|
|
136
|
+
return cls(schema, issue_list, metadata, None)
|
|
159
137
|
|
|
160
138
|
@classmethod
|
|
161
139
|
def _find_model_in_list(
|