cognite-neat 0.99.1__py3-none-any.whl → 0.100.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/_api/data_modeling_loaders.py +383 -182
- cognite/neat/_client/data_classes/data_modeling.py +4 -0
- cognite/neat/_graph/extractors/_base.py +7 -0
- cognite/neat/_graph/extractors/_classic_cdf/_classic.py +23 -13
- cognite/neat/_graph/loaders/_rdf2dms.py +50 -11
- cognite/neat/_graph/transformers/__init__.py +3 -3
- cognite/neat/_graph/transformers/_classic_cdf.py +120 -52
- cognite/neat/_issues/warnings/__init__.py +2 -0
- cognite/neat/_issues/warnings/_resources.py +15 -0
- cognite/neat/_rules/analysis/_base.py +15 -5
- cognite/neat/_rules/analysis/_dms.py +20 -0
- cognite/neat/_rules/analysis/_information.py +22 -0
- cognite/neat/_rules/exporters/_base.py +3 -5
- cognite/neat/_rules/exporters/_rules2dms.py +190 -198
- cognite/neat/_rules/importers/_rdf/_inference2rules.py +22 -5
- cognite/neat/_rules/models/_base_rules.py +19 -0
- cognite/neat/_rules/models/_types.py +5 -0
- cognite/neat/_rules/models/dms/_exporter.py +215 -93
- cognite/neat/_rules/models/dms/_rules.py +4 -4
- cognite/neat/_rules/models/dms/_rules_input.py +8 -3
- cognite/neat/_rules/models/dms/_validation.py +42 -11
- cognite/neat/_rules/models/entities/_multi_value.py +3 -0
- cognite/neat/_rules/models/information/_rules.py +17 -2
- cognite/neat/_rules/models/information/_rules_input.py +11 -2
- cognite/neat/_rules/models/information/_validation.py +99 -3
- cognite/neat/_rules/models/mapping/_classic2core.yaml +1 -1
- cognite/neat/_rules/transformers/__init__.py +2 -1
- cognite/neat/_rules/transformers/_converters.py +163 -61
- cognite/neat/_rules/transformers/_mapping.py +132 -2
- cognite/neat/_session/_base.py +42 -31
- cognite/neat/_session/_mapping.py +105 -5
- cognite/neat/_session/_prepare.py +43 -9
- cognite/neat/_session/_read.py +50 -4
- cognite/neat/_session/_set.py +1 -0
- cognite/neat/_session/_to.py +34 -11
- cognite/neat/_session/_wizard.py +5 -0
- cognite/neat/_session/engine/_interface.py +3 -2
- cognite/neat/_store/_base.py +79 -19
- cognite/neat/_utils/collection_.py +22 -0
- cognite/neat/_utils/rdf_.py +24 -0
- cognite/neat/_version.py +2 -2
- cognite/neat/_workflows/steps/lib/current/rules_exporter.py +3 -3
- {cognite_neat-0.99.1.dist-info → cognite_neat-0.100.0.dist-info}/METADATA +1 -1
- {cognite_neat-0.99.1.dist-info → cognite_neat-0.100.0.dist-info}/RECORD +47 -47
- {cognite_neat-0.99.1.dist-info → cognite_neat-0.100.0.dist-info}/LICENSE +0 -0
- {cognite_neat-0.99.1.dist-info → cognite_neat-0.100.0.dist-info}/WHEEL +0 -0
- {cognite_neat-0.99.1.dist-info → cognite_neat-0.100.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import warnings
|
|
2
2
|
from abc import ABC, abstractmethod
|
|
3
|
-
from collections.abc import Sequence
|
|
3
|
+
from collections.abc import Callable, Collection, Iterable, Sequence
|
|
4
|
+
from dataclasses import dataclass, field
|
|
4
5
|
from graphlib import TopologicalSorter
|
|
5
|
-
from typing import TYPE_CHECKING, Any,
|
|
6
|
+
from typing import TYPE_CHECKING, Any, ClassVar, Generic, TypeVar, cast
|
|
6
7
|
|
|
7
8
|
from cognite.client.data_classes import filters
|
|
8
9
|
from cognite.client.data_classes._base import (
|
|
9
|
-
CogniteResourceList,
|
|
10
10
|
T_CogniteResourceList,
|
|
11
11
|
T_WritableCogniteResource,
|
|
12
12
|
T_WriteClass,
|
|
@@ -23,6 +23,10 @@ from cognite.client.data_classes.data_modeling import (
|
|
|
23
23
|
DataModelList,
|
|
24
24
|
EdgeConnection,
|
|
25
25
|
MappedProperty,
|
|
26
|
+
Node,
|
|
27
|
+
NodeApply,
|
|
28
|
+
NodeApplyList,
|
|
29
|
+
NodeList,
|
|
26
30
|
RequiresConstraint,
|
|
27
31
|
Space,
|
|
28
32
|
SpaceApply,
|
|
@@ -48,6 +52,8 @@ from cognite.client.data_classes.data_modeling.views import (
|
|
|
48
52
|
from cognite.client.exceptions import CogniteAPIError
|
|
49
53
|
from cognite.client.utils.useful_types import SequenceNotStr
|
|
50
54
|
|
|
55
|
+
from cognite.neat._client.data_classes.data_modeling import Component
|
|
56
|
+
from cognite.neat._client.data_classes.schema import DMSSchema
|
|
51
57
|
from cognite.neat._issues.warnings import CDFMaxIterationsWarning
|
|
52
58
|
from cognite.neat._shared import T_ID
|
|
53
59
|
|
|
@@ -56,6 +62,16 @@ if TYPE_CHECKING:
|
|
|
56
62
|
|
|
57
63
|
T_WritableCogniteResourceList = TypeVar("T_WritableCogniteResourceList", bound=WriteableCogniteResourceList)
|
|
58
64
|
|
|
65
|
+
T_Item = TypeVar("T_Item")
|
|
66
|
+
T_Out = TypeVar("T_Out", bound=Iterable)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@dataclass
|
|
70
|
+
class MultiCogniteAPIError(Exception, Generic[T_ID, T_WritableCogniteResourceList]):
|
|
71
|
+
success: T_WritableCogniteResourceList
|
|
72
|
+
failed: list[T_ID] = field(default_factory=list)
|
|
73
|
+
errors: list[CogniteAPIError] = field(default_factory=list)
|
|
74
|
+
|
|
59
75
|
|
|
60
76
|
class ResourceLoader(
|
|
61
77
|
ABC,
|
|
@@ -68,6 +84,7 @@ class ResourceLoader(
|
|
|
68
84
|
"""
|
|
69
85
|
|
|
70
86
|
resource_name: str
|
|
87
|
+
dependencies: "ClassVar[frozenset[type[ResourceLoader]]]" = frozenset()
|
|
71
88
|
|
|
72
89
|
def __init__(self, client: "NeatClient") -> None:
|
|
73
90
|
# This is exposed to allow for disabling the cache.
|
|
@@ -89,36 +106,79 @@ class ResourceLoader(
|
|
|
89
106
|
return [cls.get_id(item) for item in items]
|
|
90
107
|
|
|
91
108
|
def create(self, items: Sequence[T_WriteClass]) -> T_WritableCogniteResourceList:
|
|
92
|
-
|
|
109
|
+
# Containers can have dependencies on other containers, so we sort them before creating them.
|
|
110
|
+
items = self.sort_by_dependencies(items)
|
|
111
|
+
|
|
112
|
+
exception: MultiCogniteAPIError[T_ID, T_WritableCogniteResourceList] | None = None
|
|
113
|
+
try:
|
|
114
|
+
created = self._fallback_one_by_one(self._create, items)
|
|
115
|
+
except MultiCogniteAPIError as e:
|
|
116
|
+
created = e.success
|
|
117
|
+
exception = e
|
|
118
|
+
|
|
93
119
|
if self.cache:
|
|
94
120
|
self._items_by_id.update({self.get_id(item): item for item in created})
|
|
121
|
+
|
|
122
|
+
if exception is not None:
|
|
123
|
+
raise exception
|
|
124
|
+
|
|
95
125
|
return created
|
|
96
126
|
|
|
97
127
|
def retrieve(self, ids: SequenceNotStr[T_ID]) -> T_WritableCogniteResourceList:
|
|
98
128
|
if not self.cache:
|
|
99
|
-
|
|
129
|
+
# We now that SequenceNotStr = Sequence
|
|
130
|
+
return self._fallback_one_by_one(self._retrieve, ids) # type: ignore[arg-type]
|
|
131
|
+
exception: MultiCogniteAPIError[T_ID, T_WritableCogniteResourceList] | None = None
|
|
100
132
|
missing_ids = [id for id in ids if id not in self._items_by_id.keys()]
|
|
101
133
|
if missing_ids:
|
|
102
|
-
|
|
134
|
+
try:
|
|
135
|
+
retrieved = self._retrieve(missing_ids)
|
|
136
|
+
except MultiCogniteAPIError as e:
|
|
137
|
+
retrieved = e.success
|
|
138
|
+
exception = e
|
|
103
139
|
self._items_by_id.update({self.get_id(item): item for item in retrieved})
|
|
140
|
+
if exception is not None:
|
|
141
|
+
raise exception
|
|
104
142
|
# We need to check the cache again, in case we didn't retrieve all the items.
|
|
105
143
|
return self._create_list([self._items_by_id[id] for id in ids if id in self._items_by_id])
|
|
106
144
|
|
|
107
|
-
def update(
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
145
|
+
def update(
|
|
146
|
+
self, items: Sequence[T_WriteClass], force: bool = False, drop_data: bool = False
|
|
147
|
+
) -> T_WritableCogniteResourceList:
|
|
148
|
+
exception: MultiCogniteAPIError[T_ID, T_WritableCogniteResourceList] | None = None
|
|
149
|
+
if force:
|
|
150
|
+
updated = self._update_force(items, drop_data=drop_data)
|
|
151
|
+
else:
|
|
152
|
+
try:
|
|
153
|
+
updated = self._fallback_one_by_one(self._update, items)
|
|
154
|
+
except MultiCogniteAPIError as e:
|
|
155
|
+
updated = e.success
|
|
156
|
+
exception = e
|
|
157
|
+
|
|
158
|
+
if self.cache:
|
|
159
|
+
self._items_by_id.update({self.get_id(item): item for item in updated})
|
|
160
|
+
|
|
161
|
+
if exception is not None:
|
|
162
|
+
raise exception
|
|
163
|
+
|
|
112
164
|
return updated
|
|
113
165
|
|
|
114
166
|
def delete(self, ids: SequenceNotStr[T_ID] | Sequence[T_WriteClass]) -> list[T_ID]:
|
|
115
167
|
id_list = [self.get_id(item) for item in ids]
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
168
|
+
exception: MultiCogniteAPIError[T_ID, T_WritableCogniteResourceList] | None = None
|
|
169
|
+
try:
|
|
170
|
+
# We know that SequenceNotStr = Sequence
|
|
171
|
+
deleted = self._fallback_one_by_one(self._delete, id_list) # type: ignore[arg-type]
|
|
172
|
+
except MultiCogniteAPIError as e:
|
|
173
|
+
deleted = e.success
|
|
174
|
+
exception = e
|
|
175
|
+
|
|
176
|
+
if self.cache:
|
|
177
|
+
for id in deleted:
|
|
178
|
+
self._items_by_id.pop(id, None)
|
|
179
|
+
if exception is not None:
|
|
180
|
+
raise exception
|
|
181
|
+
|
|
122
182
|
return deleted
|
|
123
183
|
|
|
124
184
|
@abstractmethod
|
|
@@ -141,9 +201,77 @@ class ResourceLoader(
|
|
|
141
201
|
def _create_list(self, items: Sequence[T_WritableCogniteResource]) -> T_WritableCogniteResourceList:
|
|
142
202
|
raise NotImplementedError
|
|
143
203
|
|
|
204
|
+
def has_data(self, item_id: T_ID) -> bool:
|
|
205
|
+
return False
|
|
206
|
+
|
|
144
207
|
def are_equal(self, local: T_WriteClass, remote: T_WritableCogniteResource) -> bool:
|
|
145
208
|
return local == remote.as_write()
|
|
146
209
|
|
|
210
|
+
def sort_by_dependencies(self, items: Sequence[T_WriteClass]) -> list[T_WriteClass]:
|
|
211
|
+
return list(items)
|
|
212
|
+
|
|
213
|
+
def _update_force(
|
|
214
|
+
self,
|
|
215
|
+
items: Sequence[T_WriteClass],
|
|
216
|
+
drop_data: bool = False,
|
|
217
|
+
tried_force_update: set[T_ID] | None = None,
|
|
218
|
+
success: T_WritableCogniteResourceList | None = None,
|
|
219
|
+
) -> T_WritableCogniteResourceList:
|
|
220
|
+
tried_force_update = tried_force_update or set()
|
|
221
|
+
try:
|
|
222
|
+
return self._update(items)
|
|
223
|
+
except CogniteAPIError as e:
|
|
224
|
+
failed_ids = {self.get_id(failed) for failed in e.failed + e.unknown}
|
|
225
|
+
success_ids = [self.get_id(success) for success in e.successful]
|
|
226
|
+
success_ = self.retrieve(success_ids)
|
|
227
|
+
if success is None:
|
|
228
|
+
success = success_
|
|
229
|
+
else:
|
|
230
|
+
success.extend(success_)
|
|
231
|
+
to_redeploy: list[T_WriteClass] = []
|
|
232
|
+
for item in items:
|
|
233
|
+
item_id = self.get_id(item)
|
|
234
|
+
if item_id in failed_ids:
|
|
235
|
+
if tried_force_update and item_id in tried_force_update:
|
|
236
|
+
# Avoid infinite loop
|
|
237
|
+
continue
|
|
238
|
+
tried_force_update.add(item_id)
|
|
239
|
+
if self.has_data(item_id) and not drop_data:
|
|
240
|
+
continue
|
|
241
|
+
to_redeploy.append(item)
|
|
242
|
+
if not to_redeploy:
|
|
243
|
+
# Avoid infinite loop
|
|
244
|
+
raise e
|
|
245
|
+
self.delete(to_redeploy)
|
|
246
|
+
forced = self._update_force(to_redeploy, drop_data, tried_force_update, success)
|
|
247
|
+
forced.extend(success)
|
|
248
|
+
return forced
|
|
249
|
+
|
|
250
|
+
def _fallback_one_by_one(self, method: Callable[[Sequence[T_Item]], T_Out], items: Sequence[T_Item]) -> T_Out:
|
|
251
|
+
try:
|
|
252
|
+
return method(items)
|
|
253
|
+
except CogniteAPIError as e:
|
|
254
|
+
exception = MultiCogniteAPIError[T_ID, T_WritableCogniteResourceList](self._create_list([]))
|
|
255
|
+
success = {self.get_id(success) for success in e.successful}
|
|
256
|
+
if success:
|
|
257
|
+
# Need read version of the items to put into cache.
|
|
258
|
+
retrieve_items = self.retrieve(list(success))
|
|
259
|
+
exception.success.extend(retrieve_items)
|
|
260
|
+
for item in items:
|
|
261
|
+
# We know that item is either T_ID or T_WriteClass
|
|
262
|
+
# but the T_Item cannot be bound to both types at the same time.
|
|
263
|
+
item_id = self.get_id(item) # type: ignore[arg-type]
|
|
264
|
+
if item_id in success:
|
|
265
|
+
continue
|
|
266
|
+
try:
|
|
267
|
+
item_result = method([item])
|
|
268
|
+
except CogniteAPIError as item_exception:
|
|
269
|
+
exception.errors.append(item_exception)
|
|
270
|
+
exception.failed.extend(self.get_ids(item_exception.failed))
|
|
271
|
+
else:
|
|
272
|
+
exception.success.extend(item_result)
|
|
273
|
+
raise exception from None
|
|
274
|
+
|
|
147
275
|
|
|
148
276
|
class DataModelingLoader(
|
|
149
277
|
ResourceLoader[T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList],
|
|
@@ -155,40 +283,10 @@ class DataModelingLoader(
|
|
|
155
283
|
return item.space in space
|
|
156
284
|
raise ValueError(f"Item {item} does not have a space attribute")
|
|
157
285
|
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
self, items: Sequence[T_WriteClass], existing_handling: Literal["fail", "skip", "update", "force"] = "fail"
|
|
163
|
-
) -> T_WritableCogniteResourceList:
|
|
164
|
-
if existing_handling != "force":
|
|
165
|
-
return super().create(items)
|
|
166
|
-
|
|
167
|
-
created = self._create_force(items, set())
|
|
168
|
-
if self.cache:
|
|
169
|
-
self._items_by_id.update({self.get_id(item): item for item in created})
|
|
170
|
-
return created
|
|
171
|
-
|
|
172
|
-
def _create_force(
|
|
173
|
-
self,
|
|
174
|
-
items: Sequence[T_WriteClass],
|
|
175
|
-
tried_force_deploy: set[T_ID],
|
|
176
|
-
) -> T_WritableCogniteResourceList:
|
|
177
|
-
try:
|
|
178
|
-
return self._create(items)
|
|
179
|
-
except CogniteAPIError as e:
|
|
180
|
-
failed_ids = {self.get_id(failed) for failed in e.failed}
|
|
181
|
-
to_redeploy = [
|
|
182
|
-
item
|
|
183
|
-
for item in items
|
|
184
|
-
if self.get_id(item) in failed_ids and self.get_id(item) not in tried_force_deploy
|
|
185
|
-
]
|
|
186
|
-
if not to_redeploy:
|
|
187
|
-
# Avoid infinite loop
|
|
188
|
-
raise e
|
|
189
|
-
tried_force_deploy.update([self.get_id(item) for item in to_redeploy])
|
|
190
|
-
self.delete(to_redeploy)
|
|
191
|
-
return self._create_force(to_redeploy, tried_force_deploy)
|
|
286
|
+
@classmethod
|
|
287
|
+
@abstractmethod
|
|
288
|
+
def items_from_schema(cls, schema: DMSSchema) -> T_CogniteResourceList:
|
|
289
|
+
raise NotImplementedError
|
|
192
290
|
|
|
193
291
|
|
|
194
292
|
class SpaceLoader(DataModelingLoader[str, SpaceApply, Space, SpaceApplyList, SpaceList]):
|
|
@@ -260,9 +358,147 @@ class SpaceLoader(DataModelingLoader[str, SpaceApply, Space, SpaceApplyList, Spa
|
|
|
260
358
|
deleted_space = self._client.data_modeling.spaces.delete(space)
|
|
261
359
|
print(f"Deleted space {deleted_space}")
|
|
262
360
|
|
|
361
|
+
@classmethod
|
|
362
|
+
def items_from_schema(cls, schema: DMSSchema) -> SpaceApplyList:
|
|
363
|
+
return SpaceApplyList(schema.spaces.values())
|
|
364
|
+
|
|
365
|
+
def has_data(self, item_id: str) -> bool:
|
|
366
|
+
return bool(self._client.data_modeling.instances.list("node", limit=1, space=item_id)) or bool(
|
|
367
|
+
self._client.data_modeling.instances.list("edge", limit=1, space=item_id)
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
class ContainerLoader(DataModelingLoader[ContainerId, ContainerApply, Container, ContainerApplyList, ContainerList]):
|
|
372
|
+
resource_name = "containers"
|
|
373
|
+
dependencies = frozenset({SpaceLoader})
|
|
374
|
+
|
|
375
|
+
@classmethod
|
|
376
|
+
def get_id(cls, item: Container | ContainerApply | ContainerId | dict) -> ContainerId:
|
|
377
|
+
if isinstance(item, Container | ContainerApply):
|
|
378
|
+
return item.as_id()
|
|
379
|
+
if isinstance(item, dict):
|
|
380
|
+
return ContainerId.load(item)
|
|
381
|
+
return item
|
|
382
|
+
|
|
383
|
+
def sort_by_dependencies(self, items: Sequence[ContainerApply]) -> list[ContainerApply]:
|
|
384
|
+
container_by_id = {container.as_id(): container for container in items}
|
|
385
|
+
container_dependencies = {
|
|
386
|
+
container.as_id(): {
|
|
387
|
+
const.require
|
|
388
|
+
for const in container.constraints.values()
|
|
389
|
+
if isinstance(const, RequiresConstraint) and const.require in container_by_id
|
|
390
|
+
}
|
|
391
|
+
for container in items
|
|
392
|
+
}
|
|
393
|
+
return [
|
|
394
|
+
container_by_id[container_id] for container_id in TopologicalSorter(container_dependencies).static_order()
|
|
395
|
+
]
|
|
396
|
+
|
|
397
|
+
def _create(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
398
|
+
return self._client.data_modeling.containers.apply(items)
|
|
399
|
+
|
|
400
|
+
def retrieve(self, ids: SequenceNotStr[ContainerId], include_connected: bool = False) -> ContainerList:
|
|
401
|
+
if not include_connected:
|
|
402
|
+
return super().retrieve(ids)
|
|
403
|
+
# Retrieve recursively updates the cache.
|
|
404
|
+
return self._retrieve_recursive(ids)
|
|
405
|
+
|
|
406
|
+
def _retrieve(self, ids: SequenceNotStr[ContainerId]) -> ContainerList:
|
|
407
|
+
return self._client.data_modeling.containers.retrieve(cast(Sequence, ids))
|
|
408
|
+
|
|
409
|
+
def _update(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
410
|
+
return self._create(items)
|
|
411
|
+
|
|
412
|
+
def _delete(self, ids: SequenceNotStr[ContainerId]) -> list[ContainerId]:
|
|
413
|
+
return self._client.data_modeling.containers.delete(cast(Sequence, ids))
|
|
414
|
+
|
|
415
|
+
def _create_list(self, items: Sequence[Container]) -> ContainerList:
|
|
416
|
+
return ContainerList(items)
|
|
417
|
+
|
|
418
|
+
def _retrieve_recursive(self, container_ids: SequenceNotStr[ContainerId]) -> ContainerList:
|
|
419
|
+
"""Containers can reference each other through the 'requires' constraint.
|
|
420
|
+
|
|
421
|
+
This method retrieves all containers that are referenced by other containers through the 'requires' constraint,
|
|
422
|
+
including their parents.
|
|
423
|
+
"""
|
|
424
|
+
max_iterations = 10 # Limiting the number of iterations to avoid infinite loops
|
|
425
|
+
found = ContainerList([])
|
|
426
|
+
found_ids: set[ContainerId] = set()
|
|
427
|
+
last_batch = list(container_ids)
|
|
428
|
+
for _ in range(max_iterations):
|
|
429
|
+
if not last_batch:
|
|
430
|
+
break
|
|
431
|
+
to_retrieve_from_cdf: set[ContainerId] = set()
|
|
432
|
+
batch_ids: list[ContainerId] = []
|
|
433
|
+
for container_id in last_batch:
|
|
434
|
+
if container_id in found_ids:
|
|
435
|
+
continue
|
|
436
|
+
elif container_id in self._items_by_id:
|
|
437
|
+
container = self._items_by_id[container_id]
|
|
438
|
+
found.append(container)
|
|
439
|
+
batch_ids.extend(self.get_connected_containers(container, found_ids))
|
|
440
|
+
else:
|
|
441
|
+
to_retrieve_from_cdf.add(container_id)
|
|
442
|
+
|
|
443
|
+
if to_retrieve_from_cdf:
|
|
444
|
+
retrieved_batch = self._client.data_modeling.containers.retrieve(list(to_retrieve_from_cdf))
|
|
445
|
+
self._items_by_id.update({view.as_id(): view for view in retrieved_batch})
|
|
446
|
+
found.extend(retrieved_batch)
|
|
447
|
+
found_ids.update({view.as_id() for view in retrieved_batch})
|
|
448
|
+
for container in retrieved_batch:
|
|
449
|
+
batch_ids.extend(self.get_connected_containers(container, found_ids))
|
|
450
|
+
|
|
451
|
+
last_batch = batch_ids
|
|
452
|
+
else:
|
|
453
|
+
warnings.warn(
|
|
454
|
+
CDFMaxIterationsWarning(
|
|
455
|
+
"The maximum number of iterations was reached while resolving referenced containers."
|
|
456
|
+
"There might be referenced containers that are not included in the list of containers.",
|
|
457
|
+
max_iterations=max_iterations,
|
|
458
|
+
),
|
|
459
|
+
stacklevel=2,
|
|
460
|
+
)
|
|
461
|
+
|
|
462
|
+
if self.cache is False:
|
|
463
|
+
# We must update the cache to retrieve recursively.
|
|
464
|
+
# If the cache is disabled, bust the cache to avoid storing the retrieved views.
|
|
465
|
+
self.bust_cache()
|
|
466
|
+
return found
|
|
467
|
+
|
|
468
|
+
@staticmethod
|
|
469
|
+
def get_connected_containers(
|
|
470
|
+
container: Container | ContainerApply, skip: set[ContainerId] | None = None
|
|
471
|
+
) -> set[ContainerId]:
|
|
472
|
+
connected_containers = set()
|
|
473
|
+
for constraint in container.constraints.values():
|
|
474
|
+
if isinstance(constraint, RequiresConstraint):
|
|
475
|
+
connected_containers.add(constraint.require)
|
|
476
|
+
if skip:
|
|
477
|
+
return {container_id for container_id in connected_containers if container_id not in skip}
|
|
478
|
+
return connected_containers
|
|
479
|
+
|
|
480
|
+
def are_equal(self, local: ContainerApply, remote: Container) -> bool:
|
|
481
|
+
local_dumped = local.dump(camel_case=True)
|
|
482
|
+
if "usedFor" not in local_dumped:
|
|
483
|
+
# Setting used_for to "node" as it is the default value in the CDF.
|
|
484
|
+
local_dumped["usedFor"] = "node"
|
|
485
|
+
|
|
486
|
+
return local_dumped == remote.as_write().dump(camel_case=True)
|
|
487
|
+
|
|
488
|
+
@classmethod
|
|
489
|
+
def items_from_schema(cls, schema: DMSSchema) -> ContainerApplyList:
|
|
490
|
+
return ContainerApplyList(schema.containers.values())
|
|
491
|
+
|
|
492
|
+
def has_data(self, item_id: ContainerId) -> bool:
|
|
493
|
+
has_data = filters.HasData(containers=[item_id])
|
|
494
|
+
return bool(self._client.data_modeling.instances.list("node", limit=1, filter=has_data)) or bool(
|
|
495
|
+
self._client.data_modeling.instances.list("edge", limit=1, filter=has_data)
|
|
496
|
+
)
|
|
497
|
+
|
|
263
498
|
|
|
264
499
|
class ViewLoader(DataModelingLoader[ViewId, ViewApply, View, ViewApplyList, ViewList]):
|
|
265
500
|
resource_name = "views"
|
|
501
|
+
dependencies = frozenset({SpaceLoader, ContainerLoader})
|
|
266
502
|
|
|
267
503
|
@classmethod
|
|
268
504
|
def get_id(cls, item: View | ViewApply | ViewId | dict) -> ViewId:
|
|
@@ -341,19 +577,19 @@ class ViewLoader(DataModelingLoader[ViewId, ViewApply, View, ViewApplyList, View
|
|
|
341
577
|
include_connections: Whether to include all connected views.
|
|
342
578
|
include_ancestors: Whether to include all ancestors.
|
|
343
579
|
"""
|
|
344
|
-
last_batch =
|
|
580
|
+
last_batch = set(view_ids)
|
|
345
581
|
found = ViewList([])
|
|
346
582
|
found_ids: set[ViewId] = set()
|
|
347
583
|
while last_batch:
|
|
348
584
|
to_retrieve_from_cdf: set[ViewId] = set()
|
|
349
|
-
batch_ids:
|
|
585
|
+
batch_ids: set[ViewId] = set()
|
|
350
586
|
for view_id in last_batch:
|
|
351
587
|
if view_id in found_ids:
|
|
352
588
|
continue
|
|
353
589
|
elif view_id in self._items_by_id:
|
|
354
590
|
view = self._items_by_id[view_id]
|
|
355
591
|
found.append(view)
|
|
356
|
-
batch_ids.
|
|
592
|
+
batch_ids.update(self.get_connected_views(view, include_ancestors, include_connections, found_ids))
|
|
357
593
|
else:
|
|
358
594
|
to_retrieve_from_cdf.add(view_id)
|
|
359
595
|
|
|
@@ -363,7 +599,7 @@ class ViewLoader(DataModelingLoader[ViewId, ViewApply, View, ViewApplyList, View
|
|
|
363
599
|
found.extend(retrieved_batch)
|
|
364
600
|
found_ids.update({view.as_id() for view in retrieved_batch})
|
|
365
601
|
for view in retrieved_batch:
|
|
366
|
-
batch_ids.
|
|
602
|
+
batch_ids.update(self.get_connected_views(view, include_ancestors, include_connections, found_ids))
|
|
367
603
|
|
|
368
604
|
last_batch = batch_ids
|
|
369
605
|
|
|
@@ -403,126 +639,14 @@ class ViewLoader(DataModelingLoader[ViewId, ViewApply, View, ViewApplyList, View
|
|
|
403
639
|
def _create_list(self, items: Sequence[View]) -> ViewList:
|
|
404
640
|
return ViewList(items)
|
|
405
641
|
|
|
406
|
-
|
|
407
|
-
class ContainerLoader(DataModelingLoader[ContainerId, ContainerApply, Container, ContainerApplyList, ContainerList]):
|
|
408
|
-
resource_name = "containers"
|
|
409
|
-
|
|
410
642
|
@classmethod
|
|
411
|
-
def
|
|
412
|
-
|
|
413
|
-
return item.as_id()
|
|
414
|
-
if isinstance(item, dict):
|
|
415
|
-
return ContainerId.load(item)
|
|
416
|
-
return item
|
|
417
|
-
|
|
418
|
-
def sort_by_dependencies(self, items: Sequence[ContainerApply]) -> list[ContainerApply]:
|
|
419
|
-
container_by_id = {container.as_id(): container for container in items}
|
|
420
|
-
container_dependencies = {
|
|
421
|
-
container.as_id(): {
|
|
422
|
-
const.require
|
|
423
|
-
for const in container.constraints.values()
|
|
424
|
-
if isinstance(const, RequiresConstraint) and const.require in container_by_id
|
|
425
|
-
}
|
|
426
|
-
for container in items
|
|
427
|
-
}
|
|
428
|
-
return [
|
|
429
|
-
container_by_id[container_id] for container_id in TopologicalSorter(container_dependencies).static_order()
|
|
430
|
-
]
|
|
431
|
-
|
|
432
|
-
def _create(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
433
|
-
return self._client.data_modeling.containers.apply(items)
|
|
434
|
-
|
|
435
|
-
def retrieve(self, ids: SequenceNotStr[ContainerId], include_connected: bool = False) -> ContainerList:
|
|
436
|
-
if not include_connected:
|
|
437
|
-
return super().retrieve(ids)
|
|
438
|
-
# Retrieve recursively updates the cache.
|
|
439
|
-
return self._retrieve_recursive(ids)
|
|
440
|
-
|
|
441
|
-
def _retrieve(self, ids: SequenceNotStr[ContainerId]) -> ContainerList:
|
|
442
|
-
return self._client.data_modeling.containers.retrieve(cast(Sequence, ids))
|
|
443
|
-
|
|
444
|
-
def _update(self, items: Sequence[ContainerApply]) -> ContainerList:
|
|
445
|
-
return self._create(items)
|
|
446
|
-
|
|
447
|
-
def _delete(self, ids: SequenceNotStr[ContainerId]) -> list[ContainerId]:
|
|
448
|
-
return self._client.data_modeling.containers.delete(cast(Sequence, ids))
|
|
449
|
-
|
|
450
|
-
def _create_list(self, items: Sequence[Container]) -> ContainerList:
|
|
451
|
-
return ContainerList(items)
|
|
452
|
-
|
|
453
|
-
def _retrieve_recursive(self, container_ids: SequenceNotStr[ContainerId]) -> ContainerList:
|
|
454
|
-
"""Containers can reference each other through the 'requires' constraint.
|
|
455
|
-
|
|
456
|
-
This method retrieves all containers that are referenced by other containers through the 'requires' constraint,
|
|
457
|
-
including their parents.
|
|
458
|
-
"""
|
|
459
|
-
max_iterations = 10 # Limiting the number of iterations to avoid infinite loops
|
|
460
|
-
found = ContainerList([])
|
|
461
|
-
found_ids: set[ContainerId] = set()
|
|
462
|
-
last_batch = list(container_ids)
|
|
463
|
-
for _ in range(max_iterations):
|
|
464
|
-
if not last_batch:
|
|
465
|
-
break
|
|
466
|
-
to_retrieve_from_cdf: set[ContainerId] = set()
|
|
467
|
-
batch_ids: list[ContainerId] = []
|
|
468
|
-
for container_id in last_batch:
|
|
469
|
-
if container_id in found_ids:
|
|
470
|
-
continue
|
|
471
|
-
elif container_id in self._items_by_id:
|
|
472
|
-
container = self._items_by_id[container_id]
|
|
473
|
-
found.append(container)
|
|
474
|
-
batch_ids.extend(self.get_connected_containers(container, found_ids))
|
|
475
|
-
else:
|
|
476
|
-
to_retrieve_from_cdf.add(container_id)
|
|
477
|
-
|
|
478
|
-
if to_retrieve_from_cdf:
|
|
479
|
-
retrieved_batch = self._client.data_modeling.containers.retrieve(list(to_retrieve_from_cdf))
|
|
480
|
-
self._items_by_id.update({view.as_id(): view for view in retrieved_batch})
|
|
481
|
-
found.extend(retrieved_batch)
|
|
482
|
-
found_ids.update({view.as_id() for view in retrieved_batch})
|
|
483
|
-
for container in retrieved_batch:
|
|
484
|
-
batch_ids.extend(self.get_connected_containers(container, found_ids))
|
|
485
|
-
|
|
486
|
-
last_batch = batch_ids
|
|
487
|
-
else:
|
|
488
|
-
warnings.warn(
|
|
489
|
-
CDFMaxIterationsWarning(
|
|
490
|
-
"The maximum number of iterations was reached while resolving referenced containers."
|
|
491
|
-
"There might be referenced containers that are not included in the list of containers.",
|
|
492
|
-
max_iterations=max_iterations,
|
|
493
|
-
),
|
|
494
|
-
stacklevel=2,
|
|
495
|
-
)
|
|
496
|
-
|
|
497
|
-
if self.cache is False:
|
|
498
|
-
# We must update the cache to retrieve recursively.
|
|
499
|
-
# If the cache is disabled, bust the cache to avoid storing the retrieved views.
|
|
500
|
-
self.bust_cache()
|
|
501
|
-
return found
|
|
502
|
-
|
|
503
|
-
@staticmethod
|
|
504
|
-
def get_connected_containers(
|
|
505
|
-
container: Container | ContainerApply, skip: set[ContainerId] | None = None
|
|
506
|
-
) -> set[ContainerId]:
|
|
507
|
-
connected_containers = set()
|
|
508
|
-
for constraint in container.constraints.values():
|
|
509
|
-
if isinstance(constraint, RequiresConstraint):
|
|
510
|
-
connected_containers.add(constraint.require)
|
|
511
|
-
if skip:
|
|
512
|
-
return {container_id for container_id in connected_containers if container_id not in skip}
|
|
513
|
-
return connected_containers
|
|
514
|
-
|
|
515
|
-
def are_equal(self, local: ContainerApply, remote: Container) -> bool:
|
|
516
|
-
local_dumped = local.dump(camel_case=True)
|
|
517
|
-
if "usedFor" not in local_dumped:
|
|
518
|
-
# Setting used_for to "node" as it is the default value in the CDF.
|
|
519
|
-
local_dumped["usedFor"] = "node"
|
|
520
|
-
|
|
521
|
-
return local_dumped == remote.as_write().dump(camel_case=True)
|
|
643
|
+
def items_from_schema(cls, schema: DMSSchema) -> ViewApplyList:
|
|
644
|
+
return ViewApplyList(schema.views.values())
|
|
522
645
|
|
|
523
646
|
|
|
524
647
|
class DataModelLoader(DataModelingLoader[DataModelId, DataModelApply, DataModel, DataModelApplyList, DataModelList]):
|
|
525
648
|
resource_name = "data_models"
|
|
649
|
+
dependencies = frozenset({SpaceLoader, ViewLoader})
|
|
526
650
|
|
|
527
651
|
@classmethod
|
|
528
652
|
def get_id(cls, item: DataModel | DataModelApply | DataModelId | dict) -> DataModelId:
|
|
@@ -562,6 +686,72 @@ class DataModelLoader(DataModelingLoader[DataModelId, DataModelApply, DataModel,
|
|
|
562
686
|
|
|
563
687
|
return local_dumped == cdf_resource_dumped
|
|
564
688
|
|
|
689
|
+
@classmethod
|
|
690
|
+
def items_from_schema(cls, schema: DMSSchema) -> DataModelApplyList:
|
|
691
|
+
return DataModelApplyList([schema.data_model])
|
|
692
|
+
|
|
693
|
+
|
|
694
|
+
class NodeLoader(DataModelingLoader[NodeId, NodeApply, Node, NodeApplyList, NodeList]):
|
|
695
|
+
resource_name = "nodes"
|
|
696
|
+
dependencies = frozenset({SpaceLoader, ContainerLoader, ViewLoader})
|
|
697
|
+
|
|
698
|
+
@classmethod
|
|
699
|
+
def get_id(cls, item: Node | NodeApply | NodeId | dict) -> NodeId:
|
|
700
|
+
if isinstance(item, Node | NodeApply):
|
|
701
|
+
return item.as_id()
|
|
702
|
+
if isinstance(item, dict):
|
|
703
|
+
return NodeId.load(item)
|
|
704
|
+
return item
|
|
705
|
+
|
|
706
|
+
def _create(self, items: Sequence[NodeApply]) -> NodeList:
|
|
707
|
+
self._client.data_modeling.instances.apply(items)
|
|
708
|
+
return self._retrieve([item.as_id() for item in items])
|
|
709
|
+
|
|
710
|
+
def _retrieve(self, ids: SequenceNotStr[NodeId]) -> NodeList:
|
|
711
|
+
return self._client.data_modeling.instances.retrieve(cast(Sequence, ids)).nodes
|
|
712
|
+
|
|
713
|
+
def _update(self, items: Sequence[NodeApply]) -> NodeList:
|
|
714
|
+
self._client.data_modeling.instances.apply(items, replace=True)
|
|
715
|
+
return self._retrieve([item.as_id() for item in items])
|
|
716
|
+
|
|
717
|
+
def _delete(self, ids: SequenceNotStr[NodeId]) -> list[NodeId]:
|
|
718
|
+
return list(self._client.data_modeling.instances.delete(nodes=cast(Sequence, ids)).nodes)
|
|
719
|
+
|
|
720
|
+
def _create_list(self, items: Sequence[Node]) -> NodeList:
|
|
721
|
+
return NodeList(items)
|
|
722
|
+
|
|
723
|
+
def are_equal(self, local: NodeApply, remote: Node) -> bool:
|
|
724
|
+
local_dumped = local.dump()
|
|
725
|
+
|
|
726
|
+
# Note reading from a container is not supported.
|
|
727
|
+
sources = [
|
|
728
|
+
source_prop_pair.source
|
|
729
|
+
for source_prop_pair in local.sources or []
|
|
730
|
+
if isinstance(source_prop_pair.source, ViewId)
|
|
731
|
+
]
|
|
732
|
+
if sources:
|
|
733
|
+
try:
|
|
734
|
+
cdf_resource_with_properties = self._client.data_modeling.instances.retrieve(
|
|
735
|
+
nodes=remote.as_id(), sources=sources
|
|
736
|
+
).nodes[0]
|
|
737
|
+
except CogniteAPIError:
|
|
738
|
+
# View does not exist, so node does not exist.
|
|
739
|
+
return False
|
|
740
|
+
else:
|
|
741
|
+
cdf_resource_with_properties = remote
|
|
742
|
+
cdf_resource_dumped = cdf_resource_with_properties.as_write().dump()
|
|
743
|
+
|
|
744
|
+
if "existingVersion" not in local_dumped:
|
|
745
|
+
# Existing version is typically not set when creating nodes, but we get it back
|
|
746
|
+
# when we retrieve the node from the server.
|
|
747
|
+
local_dumped["existingVersion"] = cdf_resource_dumped.get("existingVersion", None)
|
|
748
|
+
|
|
749
|
+
return local_dumped == cdf_resource_dumped
|
|
750
|
+
|
|
751
|
+
@classmethod
|
|
752
|
+
def items_from_schema(cls, schema: DMSSchema) -> NodeApplyList:
|
|
753
|
+
return NodeApplyList(schema.node_types.values())
|
|
754
|
+
|
|
565
755
|
|
|
566
756
|
class DataModelLoaderAPI:
|
|
567
757
|
def __init__(self, client: "NeatClient") -> None:
|
|
@@ -570,16 +760,27 @@ class DataModelLoaderAPI:
|
|
|
570
760
|
self.views = ViewLoader(client)
|
|
571
761
|
self.containers = ContainerLoader(client)
|
|
572
762
|
self.data_models = DataModelLoader(client)
|
|
763
|
+
self.nodes = NodeLoader(client)
|
|
764
|
+
self._loaders: list[DataModelingLoader] = [
|
|
765
|
+
self.spaces,
|
|
766
|
+
self.views,
|
|
767
|
+
self.containers,
|
|
768
|
+
self.data_models,
|
|
769
|
+
self.nodes,
|
|
770
|
+
]
|
|
573
771
|
|
|
574
|
-
def
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
772
|
+
def by_dependency_order(
|
|
773
|
+
self, component: Component | Collection[Component] | None = None
|
|
774
|
+
) -> list[DataModelingLoader]:
|
|
775
|
+
loader_by_type = {type(loader): loader for loader in self._loaders}
|
|
776
|
+
loader_iterable = (
|
|
777
|
+
loader_by_type[loader_cls] # type: ignore[index]
|
|
778
|
+
for loader_cls in TopologicalSorter(
|
|
779
|
+
{type(loader): loader.dependencies for loader in self._loaders} # type: ignore[attr-defined]
|
|
780
|
+
).static_order()
|
|
781
|
+
)
|
|
782
|
+
if component is None:
|
|
783
|
+
return list(loader_iterable)
|
|
784
|
+
components = {component} if isinstance(component, str) else set(component)
|
|
785
|
+
components = {{"node_type": "nodes"}.get(component, component) for component in components}
|
|
786
|
+
return [loader for loader in loader_iterable if loader.resource_name in components]
|