cognite-neat 0.99.0__py3-none-any.whl → 0.100.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (84) hide show
  1. cognite/neat/_client/_api/data_modeling_loaders.py +390 -116
  2. cognite/neat/_client/_api/schema.py +63 -2
  3. cognite/neat/_client/data_classes/data_modeling.py +4 -0
  4. cognite/neat/_client/data_classes/schema.py +2 -348
  5. cognite/neat/_constants.py +27 -4
  6. cognite/neat/_graph/extractors/_base.py +7 -0
  7. cognite/neat/_graph/extractors/_classic_cdf/_classic.py +28 -18
  8. cognite/neat/_graph/loaders/_rdf2dms.py +52 -13
  9. cognite/neat/_graph/transformers/__init__.py +3 -3
  10. cognite/neat/_graph/transformers/_classic_cdf.py +135 -56
  11. cognite/neat/_issues/_base.py +26 -17
  12. cognite/neat/_issues/errors/__init__.py +4 -2
  13. cognite/neat/_issues/errors/_external.py +7 -0
  14. cognite/neat/_issues/errors/_properties.py +2 -7
  15. cognite/neat/_issues/errors/_resources.py +1 -1
  16. cognite/neat/_issues/warnings/__init__.py +6 -2
  17. cognite/neat/_issues/warnings/_external.py +9 -1
  18. cognite/neat/_issues/warnings/_resources.py +41 -2
  19. cognite/neat/_issues/warnings/user_modeling.py +4 -4
  20. cognite/neat/_rules/_constants.py +2 -6
  21. cognite/neat/_rules/analysis/_base.py +15 -5
  22. cognite/neat/_rules/analysis/_dms.py +20 -0
  23. cognite/neat/_rules/analysis/_information.py +22 -0
  24. cognite/neat/_rules/exporters/_base.py +3 -5
  25. cognite/neat/_rules/exporters/_rules2dms.py +190 -200
  26. cognite/neat/_rules/importers/__init__.py +1 -3
  27. cognite/neat/_rules/importers/_base.py +1 -1
  28. cognite/neat/_rules/importers/_dms2rules.py +3 -25
  29. cognite/neat/_rules/importers/_rdf/__init__.py +5 -0
  30. cognite/neat/_rules/importers/_rdf/_base.py +34 -11
  31. cognite/neat/_rules/importers/_rdf/_imf2rules.py +91 -0
  32. cognite/neat/_rules/importers/_rdf/_inference2rules.py +40 -7
  33. cognite/neat/_rules/importers/_rdf/_owl2rules.py +80 -0
  34. cognite/neat/_rules/importers/_rdf/_shared.py +138 -441
  35. cognite/neat/_rules/models/_base_rules.py +19 -0
  36. cognite/neat/_rules/models/_types.py +5 -0
  37. cognite/neat/_rules/models/dms/__init__.py +2 -0
  38. cognite/neat/_rules/models/dms/_exporter.py +247 -123
  39. cognite/neat/_rules/models/dms/_rules.py +7 -49
  40. cognite/neat/_rules/models/dms/_rules_input.py +8 -3
  41. cognite/neat/_rules/models/dms/_validation.py +421 -123
  42. cognite/neat/_rules/models/entities/_multi_value.py +3 -0
  43. cognite/neat/_rules/models/information/__init__.py +2 -0
  44. cognite/neat/_rules/models/information/_rules.py +17 -61
  45. cognite/neat/_rules/models/information/_rules_input.py +11 -2
  46. cognite/neat/_rules/models/information/_validation.py +107 -11
  47. cognite/neat/_rules/models/mapping/_classic2core.py +1 -1
  48. cognite/neat/_rules/models/mapping/_classic2core.yaml +8 -4
  49. cognite/neat/_rules/transformers/__init__.py +2 -1
  50. cognite/neat/_rules/transformers/_converters.py +163 -61
  51. cognite/neat/_rules/transformers/_mapping.py +132 -2
  52. cognite/neat/_rules/transformers/_pipelines.py +1 -1
  53. cognite/neat/_rules/transformers/_verification.py +29 -4
  54. cognite/neat/_session/_base.py +46 -60
  55. cognite/neat/_session/_mapping.py +105 -5
  56. cognite/neat/_session/_prepare.py +49 -14
  57. cognite/neat/_session/_read.py +50 -4
  58. cognite/neat/_session/_set.py +1 -0
  59. cognite/neat/_session/_to.py +38 -12
  60. cognite/neat/_session/_wizard.py +5 -0
  61. cognite/neat/_session/engine/_interface.py +3 -2
  62. cognite/neat/_session/exceptions.py +4 -0
  63. cognite/neat/_store/_base.py +79 -19
  64. cognite/neat/_utils/collection_.py +22 -0
  65. cognite/neat/_utils/rdf_.py +30 -4
  66. cognite/neat/_version.py +2 -2
  67. cognite/neat/_workflows/steps/lib/current/rules_exporter.py +3 -91
  68. cognite/neat/_workflows/steps/lib/current/rules_importer.py +2 -16
  69. cognite/neat/_workflows/steps/lib/current/rules_validator.py +3 -5
  70. {cognite_neat-0.99.0.dist-info → cognite_neat-0.100.0.dist-info}/METADATA +1 -1
  71. {cognite_neat-0.99.0.dist-info → cognite_neat-0.100.0.dist-info}/RECORD +74 -82
  72. cognite/neat/_rules/importers/_rdf/_imf2rules/__init__.py +0 -3
  73. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2classes.py +0 -86
  74. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2metadata.py +0 -29
  75. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2properties.py +0 -130
  76. cognite/neat/_rules/importers/_rdf/_imf2rules/_imf2rules.py +0 -154
  77. cognite/neat/_rules/importers/_rdf/_owl2rules/__init__.py +0 -3
  78. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2classes.py +0 -58
  79. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2metadata.py +0 -65
  80. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2properties.py +0 -59
  81. cognite/neat/_rules/importers/_rdf/_owl2rules/_owl2rules.py +0 -39
  82. {cognite_neat-0.99.0.dist-info → cognite_neat-0.100.0.dist-info}/LICENSE +0 -0
  83. {cognite_neat-0.99.0.dist-info → cognite_neat-0.100.0.dist-info}/WHEEL +0 -0
  84. {cognite_neat-0.99.0.dist-info → cognite_neat-0.100.0.dist-info}/entry_points.txt +0 -0
@@ -1,20 +1,24 @@
1
1
  import warnings
2
- from collections.abc import Collection, Hashable, Iterable, Sequence
2
+ from collections.abc import Callable, Collection, Hashable, Iterable
3
+ from dataclasses import dataclass, field
3
4
  from pathlib import Path
4
- from typing import Literal, TypeAlias, cast
5
+ from typing import Generic, Literal
5
6
 
6
- from cognite.client.data_classes._base import CogniteResource, CogniteResourceList
7
+ from cognite.client.data_classes._base import (
8
+ T_CogniteResourceList,
9
+ T_WritableCogniteResource,
10
+ T_WriteClass,
11
+ )
7
12
  from cognite.client.data_classes.data_modeling import (
8
- ContainerApplyList,
9
- DataModelApply,
10
13
  DataModelApplyList,
11
14
  DataModelId,
12
- SpaceApplyList,
13
15
  ViewApplyList,
14
16
  )
15
17
  from cognite.client.exceptions import CogniteAPIError
16
18
 
17
19
  from cognite.neat._client import DataModelingLoader, NeatClient
20
+ from cognite.neat._client._api.data_modeling_loaders import MultiCogniteAPIError, T_WritableCogniteResourceList
21
+ from cognite.neat._client.data_classes.data_modeling import Component
18
22
  from cognite.neat._client.data_classes.schema import DMSSchema
19
23
  from cognite.neat._issues import IssueList
20
24
  from cognite.neat._issues.warnings import (
@@ -22,11 +26,44 @@ from cognite.neat._issues.warnings import (
22
26
  ResourceRetrievalWarning,
23
27
  )
24
28
  from cognite.neat._rules.models.dms import DMSRules
29
+ from cognite.neat._shared import T_ID
25
30
  from cognite.neat._utils.upload import UploadResult
26
31
 
27
32
  from ._base import CDFExporter
28
33
 
29
- Component: TypeAlias = Literal["all", "spaces", "data_models", "views", "containers", "node_types"]
34
+
35
+ @dataclass
36
+ class ItemCategorized(Generic[T_ID, T_WriteClass]):
37
+ resource_name: str
38
+ as_id: Callable[[T_WriteClass], T_ID]
39
+ to_create: list[T_WriteClass] = field(default_factory=list)
40
+ to_update: list[T_WriteClass] = field(default_factory=list)
41
+ to_delete: list[T_WriteClass] = field(default_factory=list)
42
+ to_skip: list[T_WriteClass] = field(default_factory=list)
43
+ unchanged: list[T_WriteClass] = field(default_factory=list)
44
+
45
+ @property
46
+ def to_create_ids(self) -> list[T_ID]:
47
+ return [self.as_id(item) for item in self.to_create]
48
+
49
+ @property
50
+ def to_update_ids(self) -> list[T_ID]:
51
+ return [self.as_id(item) for item in self.to_update]
52
+
53
+ @property
54
+ def to_skip_ids(self) -> list[T_ID]:
55
+ return [self.as_id(item) for item in self.to_skip]
56
+
57
+ @property
58
+ def to_delete_ids(self) -> list[T_ID]:
59
+ return [self.as_id(item) for item in self.to_delete]
60
+
61
+ @property
62
+ def unchanged_ids(self) -> list[T_ID]:
63
+ return [self.as_id(item) for item in self.unchanged]
64
+
65
+ def item_ids(self) -> Iterable[T_ID]:
66
+ yield from (self.as_id(item) for item in self.to_create + self.to_update + self.to_delete + self.unchanged)
30
67
 
31
68
 
32
69
  class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
@@ -37,38 +74,39 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
37
74
  Which components to export. Defaults to frozenset({"all"}).
38
75
  include_space (set[str], optional):
39
76
  If set, only export components in the given spaces. Defaults to None which means all spaces.
40
- existing_handling (Literal["fail", "skip", "update", "force"], optional): How to handle existing components.
77
+ existing (Literal["fail", "skip", "update", "force"], optional): How to handle existing components.
41
78
  Defaults to "update". See below for details.
42
- export_pipeline (bool, optional): Whether to export the pipeline. Defaults to False. This means setting
43
- up transformations, RAW databases and tables to populate the data model.
44
79
  instance_space (str, optional): The space to use for the instance. Defaults to None.
45
80
  suppress_warnings (bool, optional): Suppress warnings. Defaults to False.
81
+ remove_cdf_spaces (bool, optional): Skip views and containers that are system are in system spaces.
46
82
 
47
83
  ... note::
48
84
 
49
85
  - "fail": If any component already exists, the export will fail.
50
86
  - "skip": If any component already exists, it will be skipped.
51
- - "update": If any component already exists, it will be updated.
87
+ - "update": If any component already exists, it will
52
88
  - "force": If any component already exists, it will be deleted and recreated.
53
89
 
54
90
  """
55
91
 
56
92
  def __init__(
57
93
  self,
58
- export_components: Component | Collection[Component] = "all",
94
+ export_components: Component | Collection[Component] | None = None,
59
95
  include_space: set[str] | None = None,
60
- existing_handling: Literal["fail", "skip", "update", "force"] = "update",
61
- export_pipeline: bool = False,
96
+ existing: Literal["fail", "skip", "update", "force", "recreate"] = "update",
62
97
  instance_space: str | None = None,
63
98
  suppress_warnings: bool = False,
99
+ drop_data: bool = False,
100
+ remove_cdf_spaces: bool = True,
64
101
  ):
65
- self.export_components = {export_components} if isinstance(export_components, str) else set(export_components)
102
+ self.export_components = export_components
66
103
  self.include_space = include_space
67
- self.existing_handling = existing_handling
68
- self.export_pipeline = export_pipeline
104
+ self.existing = existing
105
+ self.drop_data = drop_data
69
106
  self.instance_space = instance_space
70
107
  self.suppress_warnings = suppress_warnings
71
108
  self._schema: DMSSchema | None = None
109
+ self.remove_cdf_spaces = remove_cdf_spaces
72
110
 
73
111
  def export_to_file(self, rules: DMSRules, filepath: Path) -> None:
74
112
  """Export the rules to a file(s).
@@ -99,28 +137,28 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
99
137
  schema.to_zip(filepath, exclude=exclude)
100
138
 
101
139
  def _create_exclude_set(self):
102
- if "all" in self.export_components:
140
+ if self.export_components is None:
103
141
  exclude = set()
104
142
  else:
105
- exclude = {"spaces", "data_models", "views", "containers", "node_types"} - self.export_components
143
+ exclude = {"spaces", "data_models", "views", "containers", "node_types"} - set(self.export_components)
106
144
  return exclude
107
145
 
108
146
  def export(self, rules: DMSRules) -> DMSSchema:
109
- return rules.as_schema(include_pipeline=self.export_pipeline, instance_space=self.instance_space)
147
+ # We do not want to include CogniteCore/CogniteProcess Industries in the schema
148
+ return rules.as_schema(instance_space=self.instance_space, remove_cdf_spaces=self.remove_cdf_spaces)
110
149
 
111
150
  def delete_from_cdf(
112
151
  self, rules: DMSRules, client: NeatClient, dry_run: bool = False, skip_space: bool = False
113
152
  ) -> Iterable[UploadResult]:
114
- to_export = self._prepare_exporters(rules)
153
+ schema = self.export(rules)
115
154
 
116
155
  # we need to reverse order in which we are picking up the items to delete
117
156
  # as they are sorted in the order of creation and we need to delete them in reverse order
118
- for items, loader in reversed(to_export):
119
- if skip_space and isinstance(items, SpaceApplyList):
120
- continue
157
+ for loader in reversed(client.loaders.by_dependency_order(self.export_components)):
158
+ items = loader.items_from_schema(schema)
121
159
  item_ids = loader.get_ids(items)
122
160
  existing_items = loader.retrieve(item_ids)
123
- existing_ids = loader.get_ids(existing_items)
161
+ existing_ids = set(loader.get_ids(existing_items))
124
162
  to_delete: list[Hashable] = []
125
163
  for item_id in item_ids:
126
164
  if (
@@ -133,168 +171,132 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
133
171
  if item_id in existing_ids:
134
172
  to_delete.append(item_id)
135
173
 
136
- deleted: set[Hashable] = set()
137
- failed_deleted: set[Hashable] = set()
138
- error_messages: list[str] = []
174
+ result = UploadResult(loader.resource_name) # type: ignore[var-annotated]
139
175
  if dry_run:
140
- deleted.update(to_delete)
141
- elif to_delete:
176
+ result.deleted.update(to_delete)
177
+ yield result
178
+ continue
179
+
180
+ if to_delete:
142
181
  try:
143
- loader.delete(to_delete)
144
- except CogniteAPIError as e:
145
- failed_deleted.update(loader.get_id(item) for item in e.failed + e.unknown)
146
- deleted.update(loader.get_id(item) for item in e.successful)
147
- error_messages.append(f"Failed delete: {e.message}")
182
+ deleted = loader.delete(to_delete)
183
+ except MultiCogniteAPIError as e:
184
+ result.deleted.update([loader.get_id(item) for item in e.success])
185
+ result.failed_deleted.update([loader.get_id(item) for item in e.failed])
186
+ for error in e.errors:
187
+ result.error_messages.append(f"Failed to delete {loader.resource_name}: {error!s}")
148
188
  else:
149
- deleted.update(to_delete)
150
-
151
- yield UploadResult(
152
- name=loader.resource_name,
153
- deleted=deleted,
154
- failed_deleted=failed_deleted,
155
- error_messages=error_messages,
156
- )
189
+ result.deleted.update(deleted)
190
+ yield result
157
191
 
158
192
  def export_to_cdf_iterable(
159
- self, rules: DMSRules, client: NeatClient, dry_run: bool = False, fallback_one_by_one: bool = False
193
+ self, rules: DMSRules, client: NeatClient, dry_run: bool = False
160
194
  ) -> Iterable[UploadResult]:
161
- to_export = self._prepare_exporters(rules)
195
+ schema = self.export(rules)
162
196
 
163
- result_by_name = {}
164
- if self.existing_handling == "force":
165
- for delete_result in self.delete_from_cdf(rules, client, dry_run, skip_space=True):
166
- result_by_name[delete_result.name] = delete_result
197
+ categorized_items_by_loader = self._categorize_by_loader(client, schema)
167
198
 
168
- redeploy_data_model = False
169
- for items in to_export:
170
- # The conversion from DMS to GraphQL does not seem to be triggered even if the views
171
- # are changed. This is a workaround to force the conversion.
172
- is_redeploying = isinstance(items, DataModelApplyList) and redeploy_data_model
173
- loader = client.loaders.get_loader(items)
174
-
175
- to_create, to_delete, to_update, unchanged = self._categorize_items_for_upload(
176
- loader, items, is_redeploying
177
- )
199
+ is_failing = self.existing == "fail" and any(
200
+ loader.resource_name for loader, categorized in categorized_items_by_loader.items() if categorized.to_update
201
+ )
178
202
 
203
+ for loader, items in categorized_items_by_loader.items():
179
204
  issue_list = IssueList()
180
- warning_list = self._validate(loader, items, client)
181
- issue_list.extend(warning_list)
182
-
183
- created: set[Hashable] = set()
184
- skipped: set[Hashable] = set()
185
- changed: set[Hashable] = set()
186
- deleted: set[Hashable] = set()
187
- failed_created: set[Hashable] = set()
188
- failed_changed: set[Hashable] = set()
189
- failed_deleted: set[Hashable] = set()
190
- error_messages: list[str] = []
205
+
206
+ if items.resource_name == client.loaders.data_models.resource_name:
207
+ warning_list = self._validate(list(items.item_ids()), client)
208
+ issue_list.extend(warning_list)
209
+
210
+ results = UploadResult(loader.resource_name, issues=issue_list) # type: ignore[var-annotated]
211
+ if is_failing:
212
+ # If any component already exists, the export will fail.
213
+ # This is the same if we run dry_run or not.
214
+ results.failed_upserted.update(items.to_update_ids)
215
+ results.failed_created.update(items.to_create_ids)
216
+ results.failed_deleted.update(items.to_delete_ids)
217
+ results.unchanged.update(items.unchanged_ids)
218
+ results.error_messages.append("Existing components found and existing_handling is 'fail'")
219
+ yield results
220
+ continue
221
+
222
+ results.unchanged.update(items.unchanged_ids)
223
+ results.skipped.update(items.to_skip_ids)
191
224
  if dry_run:
192
- if self.existing_handling in ["update", "force"]:
193
- changed.update(loader.get_id(item) for item in to_update)
194
- elif self.existing_handling == "skip":
195
- skipped.update(loader.get_id(item) for item in to_update)
196
- elif self.existing_handling == "fail":
197
- failed_changed.update(loader.get_id(item) for item in to_update)
225
+ if self.existing in ["update", "force"]:
226
+ # Assume all changed are successful
227
+ results.changed.update(items.to_update_ids)
228
+ elif self.existing == "skip":
229
+ results.skipped.update(items.to_update_ids)
230
+ results.deleted.update(items.to_delete_ids)
231
+ results.created.update(items.to_create_ids)
232
+ yield results
233
+ continue
234
+
235
+ if items.to_delete_ids:
236
+ try:
237
+ deleted = loader.delete(items.to_delete_ids)
238
+ except MultiCogniteAPIError as e:
239
+ results.deleted.update([loader.get_id(item) for item in e.success])
240
+ results.failed_deleted.update([loader.get_id(item) for item in e.failed])
241
+ for error in e.errors:
242
+ results.error_messages.append(f"Failed to delete {loader.resource_name}: {error!s}")
198
243
  else:
199
- raise ValueError(f"Unsupported existing_handling {self.existing_handling}")
200
- created.update(loader.get_id(item) for item in to_create)
201
- deleted.update(loader.get_id(item) for item in to_delete)
202
- else:
203
- if to_delete:
204
- try:
205
- loader.delete(to_delete)
206
- except CogniteAPIError as e:
207
- if fallback_one_by_one:
208
- for item in to_delete:
209
- try:
210
- loader.delete([item])
211
- except CogniteAPIError as item_e:
212
- failed_deleted.add(loader.get_id(item))
213
- error_messages.append(f"Failed delete: {item_e!s}")
214
- else:
215
- deleted.add(loader.get_id(item))
216
- else:
217
- error_messages.append(f"Failed delete: {e!s}")
218
- failed_deleted.update(loader.get_id(item) for item in e.failed + e.unknown)
219
- else:
220
- deleted.update(loader.get_id(item) for item in to_delete)
221
-
222
- if isinstance(items, DataModelApplyList):
223
- to_create = loader.sort_by_dependencies(to_create)
244
+ results.deleted.update(deleted)
224
245
 
246
+ if items.to_create:
225
247
  try:
226
- loader.create(to_create)
227
- except CogniteAPIError as e:
228
- if fallback_one_by_one:
229
- for item in to_create:
230
- try:
231
- loader.create([item])
232
- except CogniteAPIError as item_e:
233
- failed_created.add(loader.get_id(item))
234
- error_messages.append(f"Failed create: {item_e!s}")
235
- else:
236
- created.add(loader.get_id(item))
237
- else:
238
- failed_created.update(loader.get_id(item) for item in e.failed + e.unknown)
239
- created.update(loader.get_id(item) for item in e.successful)
240
- error_messages.append(f"Failed create: {e!s}")
248
+ created = loader.create(items.to_create)
249
+ except MultiCogniteAPIError as e:
250
+ results.created.update([loader.get_id(item) for item in e.success])
251
+ results.failed_created.update([loader.get_id(item) for item in e.failed])
252
+ for error in e.errors:
253
+ results.error_messages.append(f"Failed to create {loader.resource_name}: {error!s}")
241
254
  else:
242
- created.update(loader.get_id(item) for item in to_create)
243
-
244
- if self.existing_handling in ["update", "force"]:
245
- try:
246
- loader.update(to_update)
247
- except CogniteAPIError as e:
248
- if fallback_one_by_one:
249
- for item in to_update:
250
- try:
251
- loader.update([item])
252
- except CogniteAPIError as e_item:
253
- failed_changed.add(loader.get_id(item))
254
- error_messages.append(f"Failed update: {e_item!s}")
255
- else:
256
- changed.add(loader.get_id(item))
257
- else:
258
- failed_changed.update(loader.get_id(item) for item in e.failed + e.unknown)
259
- changed.update(loader.get_id(item) for item in e.successful)
260
- error_messages.append(f"Failed update: {e!s}")
261
- else:
262
- changed.update(loader.get_id(item) for item in to_update)
263
- elif self.existing_handling == "skip":
264
- skipped.update(loader.get_id(item) for item in to_update)
265
- elif self.existing_handling == "fail":
266
- failed_changed.update(loader.get_id(item) for item in to_update)
267
-
268
- if loader.resource_name in result_by_name:
269
- delete_result = result_by_name[loader.resource_name]
270
- deleted.update(delete_result.deleted)
271
- failed_deleted.update(delete_result.failed_deleted)
272
- error_messages.extend(delete_result.error_messages)
273
-
274
- yield UploadResult(
275
- name=loader.resource_name,
276
- created=created,
277
- changed=changed,
278
- deleted=deleted,
279
- unchanged={loader.get_id(item) for item in unchanged},
280
- skipped=skipped,
281
- failed_created=failed_created,
282
- failed_changed=failed_changed,
283
- failed_deleted=failed_deleted,
284
- error_messages=error_messages,
285
- issues=issue_list,
286
- )
255
+ results.created.update(loader.get_ids(created))
256
+
257
+ if items.to_update and self.existing == "skip":
258
+ results.skipped.update(items.to_update_ids)
259
+ elif items.to_update:
260
+ try:
261
+ updated = loader.update(items.to_update, force=self.existing == "force", drop_data=self.drop_data)
262
+ except MultiCogniteAPIError as e:
263
+ results.changed.update([loader.get_id(item) for item in e.success])
264
+ results.failed_changed.update([loader.get_id(item) for item in e.failed])
265
+ for error in e.errors:
266
+ results.error_messages.append(f"Failed to update {loader.resource_name}: {error!s}")
267
+ else:
268
+ results.changed.update(loader.get_ids(updated))
269
+
270
+ yield results
287
271
 
288
- if isinstance(items, ViewApplyList) and (created or changed):
272
+ def _categorize_by_loader(self, client: NeatClient, schema: DMSSchema) -> dict[DataModelingLoader, ItemCategorized]:
273
+ categorized_items_by_loader: dict[DataModelingLoader, ItemCategorized] = {}
274
+ redeploy_data_model = False
275
+ for loader in client.loaders.by_dependency_order(self.export_components):
276
+ items = loader.items_from_schema(schema)
277
+ # The conversion from DMS to GraphQL does not seem to be triggered even if the views
278
+ # are changed. This is a workaround to force the conversion.
279
+ is_redeploying = isinstance(items, DataModelApplyList) and redeploy_data_model
280
+
281
+ categorized = self._categorize_items_for_upload(loader, items, is_redeploying)
282
+ categorized_items_by_loader[loader] = categorized
283
+
284
+ if isinstance(items, ViewApplyList) and (categorized.to_create or categorized.to_update):
289
285
  redeploy_data_model = True
286
+ return categorized_items_by_loader
290
287
 
291
288
  def _categorize_items_for_upload(
292
- self, loader: DataModelingLoader, items: Sequence[CogniteResource], is_redeploying
293
- ) -> tuple[list[CogniteResource], list[CogniteResource], list[CogniteResource], list[CogniteResource]]:
289
+ self,
290
+ loader: DataModelingLoader[
291
+ T_ID, T_WriteClass, T_WritableCogniteResource, T_CogniteResourceList, T_WritableCogniteResourceList
292
+ ],
293
+ items: T_CogniteResourceList,
294
+ is_redeploying: bool,
295
+ ) -> ItemCategorized[T_ID, T_WriteClass]:
294
296
  item_ids = loader.get_ids(items)
295
297
  cdf_items = loader.retrieve(item_ids)
296
298
  cdf_item_by_id = {loader.get_id(item): item for item in cdf_items}
297
- to_create, to_update, unchanged, to_delete = [], [], [], []
299
+ categorized = ItemCategorized[T_ID, T_WriteClass](loader.resource_name, loader.get_id)
298
300
  for item in items:
299
301
  if (
300
302
  isinstance(items, DataModelApplyList)
@@ -305,50 +307,38 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
305
307
 
306
308
  cdf_item = cdf_item_by_id.get(loader.get_id(item))
307
309
  if cdf_item is None:
308
- to_create.append(item)
309
- elif is_redeploying:
310
- to_update.append(item)
311
- to_delete.append(cdf_item)
310
+ categorized.to_create.append(item)
311
+ elif is_redeploying or self.existing == "recreate":
312
+ if loader.has_data(cdf_item) and not self.drop_data:
313
+ categorized.to_skip.append(cdf_item)
314
+ else:
315
+ categorized.to_delete.append(cdf_item.as_write())
316
+ categorized.to_create.append(item)
312
317
  elif loader.are_equal(item, cdf_item):
313
- unchanged.append(item)
318
+ categorized.unchanged.append(item)
314
319
  else:
315
- to_update.append(item)
316
- return to_create, to_delete, to_update, unchanged
320
+ categorized.to_update.append(item)
321
+ return categorized
317
322
 
318
- def _prepare_exporters(self, rules: DMSRules) -> list[CogniteResourceList]:
319
- schema = self.export(rules)
320
- to_export: list[CogniteResourceList] = []
321
- if self.export_components.intersection({"all", "spaces"}):
322
- to_export.append(SpaceApplyList(schema.spaces.values()))
323
- if self.export_components.intersection({"all", "containers"}):
324
- to_export.append(ContainerApplyList(schema.containers.values()))
325
- if self.export_components.intersection({"all", "views"}):
326
- to_export.append(ViewApplyList(schema.views.values()))
327
- if self.export_components.intersection({"all", "data_models"}):
328
- to_export.append(DataModelApplyList([schema.data_model]))
329
- return to_export
330
-
331
- def _validate(self, loader: DataModelingLoader, items: CogniteResourceList, client: NeatClient) -> IssueList:
323
+ def _validate(self, items: list[DataModelId], client: NeatClient) -> IssueList:
332
324
  issue_list = IssueList()
333
- if isinstance(items, DataModelApplyList):
334
- models = cast(list[DataModelApply], items)
335
- if other_models := self._exist_other_data_models(client, models):
336
- warning = PrincipleOneModelOneSpaceWarning(
337
- f"There are multiple data models in the same space {models[0].space}. "
338
- f"Other data models in the space are {other_models}.",
339
- )
340
- if not self.suppress_warnings:
341
- warnings.warn(warning, stacklevel=2)
342
- issue_list.append(warning)
325
+ if other_models := self._exist_other_data_models(client, items):
326
+ warning = PrincipleOneModelOneSpaceWarning(
327
+ f"There are multiple data models in the same space {items[0].space}. "
328
+ f"Other data models in the space are {other_models}.",
329
+ )
330
+ if not self.suppress_warnings:
331
+ warnings.warn(warning, stacklevel=2)
332
+ issue_list.append(warning)
343
333
 
344
334
  return issue_list
345
335
 
346
336
  @classmethod
347
- def _exist_other_data_models(cls, client: NeatClient, models: list[DataModelApply]) -> list[DataModelId]:
348
- if not models:
337
+ def _exist_other_data_models(cls, client: NeatClient, model_ids: list[DataModelId]) -> list[DataModelId]:
338
+ if not model_ids:
349
339
  return []
350
- space = models[0].space
351
- external_id = models[0].external_id
340
+ space = model_ids[0].space
341
+ external_id = model_ids[0].external_id
352
342
  try:
353
343
  data_models = client.data_modeling.data_models.list(space=space, limit=25, all_versions=False)
354
344
  except CogniteAPIError as e:
@@ -1,9 +1,7 @@
1
1
  from ._base import BaseImporter
2
2
  from ._dms2rules import DMSImporter
3
3
  from ._dtdl2rules import DTDLImporter
4
- from ._rdf._imf2rules import IMFImporter
5
- from ._rdf._inference2rules import InferenceImporter
6
- from ._rdf._owl2rules import OWLImporter
4
+ from ._rdf import IMFImporter, InferenceImporter, OWLImporter
7
5
  from ._spreadsheet2rules import ExcelImporter, GoogleSheetImporter
8
6
  from ._yaml2rules import YAMLImporter
9
7
 
@@ -85,7 +85,7 @@ def _handle_issues(
85
85
  try:
86
86
  yield future_result
87
87
  except ValidationError as e:
88
- issues.extend(error_cls.from_pydantic_errors(e.errors(), **(error_args or {})))
88
+ issues.extend(error_cls.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
89
89
  future_result._result = "failure"
90
90
  else:
91
91
  future_result._result = "success"
@@ -80,9 +80,6 @@ class DMSImporter(BaseImporter[DMSInputRules]):
80
80
  self.issue_list = IssueList(read_issues)
81
81
  self._all_containers_by_id = schema.containers.copy()
82
82
  self._all_views_by_id = schema.views.copy()
83
- if schema.reference:
84
- self._all_containers_by_id.update(schema.reference.containers.items())
85
- self._all_views_by_id.update(schema.reference.views.items())
86
83
 
87
84
  def update_referenced_containers(self, containers: Iterable[dm.ContainerApply]) -> None:
88
85
  """Update the referenced containers. This is useful to add Cognite containers identified after the root schema
@@ -97,7 +94,6 @@ class DMSImporter(BaseImporter[DMSInputRules]):
97
94
  cls,
98
95
  client: NeatClient,
99
96
  data_model_id: DataModelIdentifier,
100
- reference_model_id: DataModelIdentifier | None = None,
101
97
  ) -> "DMSImporter":
102
98
  """Create a DMSImporter ready to convert the given data model to rules.
103
99
 
@@ -111,7 +107,7 @@ class DMSImporter(BaseImporter[DMSInputRules]):
111
107
  DMSImporter: DMSImporter instance
112
108
  """
113
109
 
114
- data_model_ids = [data_model_id, reference_model_id] if reference_model_id else [data_model_id]
110
+ data_model_ids = [data_model_id]
115
111
  data_models = client.data_modeling.data_models.retrieve(data_model_ids, inline_views=True)
116
112
 
117
113
  user_models = cls._find_model_in_list(data_models, data_model_id)
@@ -128,34 +124,16 @@ class DMSImporter(BaseImporter[DMSInputRules]):
128
124
  )
129
125
  user_model = user_models.latest_version()
130
126
 
131
- if reference_model_id:
132
- ref_models = cls._find_model_in_list(data_models, reference_model_id)
133
- if len(ref_models) == 0:
134
- return cls(
135
- DMSSchema(),
136
- [
137
- ResourceRetrievalError(
138
- dm.DataModelId.load(reference_model_id),
139
- "data model",
140
- "Data Model is missing in CDF",
141
- )
142
- ],
143
- )
144
- ref_model: dm.DataModel[dm.View] | None = ref_models.latest_version()
145
- else:
146
- ref_model = None
147
-
148
127
  issue_list = IssueList()
149
128
  with _handle_issues(issue_list) as result:
150
- schema = DMSSchema.from_data_model(NeatClient(client), user_model, ref_model)
129
+ schema = NeatClient(client).schema.retrieve_data_model(user_model)
151
130
 
152
131
  if result.result == "failure" or issue_list.has_errors:
153
132
  return cls(DMSSchema(), issue_list)
154
133
 
155
134
  metadata = cls._create_metadata_from_model(user_model)
156
- ref_metadata = cls._create_metadata_from_model(ref_model) if ref_model else None
157
135
 
158
- return cls(schema, issue_list, metadata, ref_metadata)
136
+ return cls(schema, issue_list, metadata, None)
159
137
 
160
138
  @classmethod
161
139
  def _find_model_in_list(
@@ -0,0 +1,5 @@
1
+ from ._imf2rules import IMFImporter
2
+ from ._inference2rules import InferenceImporter
3
+ from ._owl2rules import OWLImporter
4
+
5
+ __all__ = ["IMFImporter", "OWLImporter", "InferenceImporter"]