cognite-neat 0.104.0__py3-none-any.whl → 0.105.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (141) hide show
  1. cognite/neat/_client/_api/data_modeling_loaders.py +83 -23
  2. cognite/neat/_client/_api/schema.py +2 -1
  3. cognite/neat/_client/data_classes/neat_sequence.py +261 -0
  4. cognite/neat/_client/data_classes/schema.py +5 -1
  5. cognite/neat/_client/testing.py +33 -0
  6. cognite/neat/_constants.py +56 -0
  7. cognite/neat/_graph/extractors/_classic_cdf/_base.py +6 -5
  8. cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +225 -11
  9. cognite/neat/_graph/extractors/_mock_graph_generator.py +1 -1
  10. cognite/neat/_graph/loaders/_rdf2dms.py +13 -2
  11. cognite/neat/_graph/transformers/__init__.py +3 -1
  12. cognite/neat/_graph/transformers/_classic_cdf.py +2 -1
  13. cognite/neat/_graph/transformers/_value_type.py +72 -0
  14. cognite/neat/_issues/__init__.py +0 -2
  15. cognite/neat/_issues/_base.py +19 -35
  16. cognite/neat/_issues/warnings/__init__.py +4 -1
  17. cognite/neat/_issues/warnings/_general.py +7 -0
  18. cognite/neat/_issues/warnings/_resources.py +11 -0
  19. cognite/neat/_rules/exporters/_rules2dms.py +35 -1
  20. cognite/neat/_rules/exporters/_rules2excel.py +2 -2
  21. cognite/neat/_rules/importers/_dms2rules.py +66 -55
  22. cognite/neat/_rules/models/_base_rules.py +4 -1
  23. cognite/neat/_rules/models/entities/_wrapped.py +10 -5
  24. cognite/neat/_rules/models/mapping/_classic2core.yaml +239 -38
  25. cognite/neat/_rules/transformers/__init__.py +8 -2
  26. cognite/neat/_rules/transformers/_converters.py +271 -188
  27. cognite/neat/_rules/transformers/_mapping.py +75 -59
  28. cognite/neat/_rules/transformers/_verification.py +2 -3
  29. cognite/neat/_session/_inspect.py +3 -1
  30. cognite/neat/_session/_prepare.py +112 -24
  31. cognite/neat/_session/_read.py +33 -70
  32. cognite/neat/_session/_state.py +2 -2
  33. cognite/neat/_session/_to.py +2 -2
  34. cognite/neat/_store/_rules_store.py +4 -8
  35. cognite/neat/_utils/reader/_base.py +27 -0
  36. cognite/neat/_version.py +1 -1
  37. {cognite_neat-0.104.0.dist-info → cognite_neat-0.105.0.dist-info}/METADATA +3 -2
  38. cognite_neat-0.105.0.dist-info/RECORD +179 -0
  39. {cognite_neat-0.104.0.dist-info → cognite_neat-0.105.0.dist-info}/WHEEL +1 -1
  40. cognite/neat/_app/api/__init__.py +0 -0
  41. cognite/neat/_app/api/asgi/metrics.py +0 -4
  42. cognite/neat/_app/api/configuration.py +0 -98
  43. cognite/neat/_app/api/context_manager/__init__.py +0 -3
  44. cognite/neat/_app/api/context_manager/manager.py +0 -16
  45. cognite/neat/_app/api/data_classes/__init__.py +0 -0
  46. cognite/neat/_app/api/data_classes/rest.py +0 -59
  47. cognite/neat/_app/api/explorer.py +0 -66
  48. cognite/neat/_app/api/routers/configuration.py +0 -25
  49. cognite/neat/_app/api/routers/crud.py +0 -102
  50. cognite/neat/_app/api/routers/metrics.py +0 -10
  51. cognite/neat/_app/api/routers/workflows.py +0 -224
  52. cognite/neat/_app/api/utils/__init__.py +0 -0
  53. cognite/neat/_app/api/utils/data_mapping.py +0 -17
  54. cognite/neat/_app/api/utils/logging.py +0 -26
  55. cognite/neat/_app/api/utils/query_templates.py +0 -92
  56. cognite/neat/_app/main.py +0 -17
  57. cognite/neat/_app/monitoring/__init__.py +0 -0
  58. cognite/neat/_app/monitoring/metrics.py +0 -69
  59. cognite/neat/_app/ui/index.html +0 -1
  60. cognite/neat/_app/ui/neat-app/.gitignore +0 -23
  61. cognite/neat/_app/ui/neat-app/README.md +0 -70
  62. cognite/neat/_app/ui/neat-app/build/asset-manifest.json +0 -14
  63. cognite/neat/_app/ui/neat-app/build/favicon.ico +0 -0
  64. cognite/neat/_app/ui/neat-app/build/img/architect-icon.svg +0 -116
  65. cognite/neat/_app/ui/neat-app/build/img/developer-icon.svg +0 -112
  66. cognite/neat/_app/ui/neat-app/build/img/sme-icon.svg +0 -34
  67. cognite/neat/_app/ui/neat-app/build/index.html +0 -1
  68. cognite/neat/_app/ui/neat-app/build/logo192.png +0 -0
  69. cognite/neat/_app/ui/neat-app/build/manifest.json +0 -25
  70. cognite/neat/_app/ui/neat-app/build/robots.txt +0 -3
  71. cognite/neat/_app/ui/neat-app/build/static/css/main.72e3d92e.css +0 -2
  72. cognite/neat/_app/ui/neat-app/build/static/css/main.72e3d92e.css.map +0 -1
  73. cognite/neat/_app/ui/neat-app/build/static/js/main.5a52cf09.js +0 -3
  74. cognite/neat/_app/ui/neat-app/build/static/js/main.5a52cf09.js.LICENSE.txt +0 -88
  75. cognite/neat/_app/ui/neat-app/build/static/js/main.5a52cf09.js.map +0 -1
  76. cognite/neat/_app/ui/neat-app/build/static/media/logo.8093b84df9ed36a174c629d6fe0b730d.svg +0 -1
  77. cognite/neat/_app/ui/neat-app/package-lock.json +0 -18306
  78. cognite/neat/_app/ui/neat-app/package.json +0 -62
  79. cognite/neat/_app/ui/neat-app/public/favicon.ico +0 -0
  80. cognite/neat/_app/ui/neat-app/public/img/architect-icon.svg +0 -116
  81. cognite/neat/_app/ui/neat-app/public/img/developer-icon.svg +0 -112
  82. cognite/neat/_app/ui/neat-app/public/img/sme-icon.svg +0 -34
  83. cognite/neat/_app/ui/neat-app/public/index.html +0 -43
  84. cognite/neat/_app/ui/neat-app/public/logo192.png +0 -0
  85. cognite/neat/_app/ui/neat-app/public/manifest.json +0 -25
  86. cognite/neat/_app/ui/neat-app/public/robots.txt +0 -3
  87. cognite/neat/_app/ui/neat-app/src/App.css +0 -38
  88. cognite/neat/_app/ui/neat-app/src/App.js +0 -17
  89. cognite/neat/_app/ui/neat-app/src/App.test.js +0 -8
  90. cognite/neat/_app/ui/neat-app/src/MainContainer.tsx +0 -70
  91. cognite/neat/_app/ui/neat-app/src/components/JsonViewer.tsx +0 -43
  92. cognite/neat/_app/ui/neat-app/src/components/LocalUploader.tsx +0 -124
  93. cognite/neat/_app/ui/neat-app/src/components/OverviewComponentEditorDialog.tsx +0 -63
  94. cognite/neat/_app/ui/neat-app/src/components/StepEditorDialog.tsx +0 -511
  95. cognite/neat/_app/ui/neat-app/src/components/TabPanel.tsx +0 -36
  96. cognite/neat/_app/ui/neat-app/src/components/Utils.tsx +0 -56
  97. cognite/neat/_app/ui/neat-app/src/components/WorkflowDeleteDialog.tsx +0 -60
  98. cognite/neat/_app/ui/neat-app/src/components/WorkflowExecutionReport.tsx +0 -112
  99. cognite/neat/_app/ui/neat-app/src/components/WorkflowImportExportDialog.tsx +0 -67
  100. cognite/neat/_app/ui/neat-app/src/components/WorkflowMetadataDialog.tsx +0 -79
  101. cognite/neat/_app/ui/neat-app/src/index.css +0 -13
  102. cognite/neat/_app/ui/neat-app/src/index.js +0 -13
  103. cognite/neat/_app/ui/neat-app/src/logo.svg +0 -1
  104. cognite/neat/_app/ui/neat-app/src/reportWebVitals.js +0 -13
  105. cognite/neat/_app/ui/neat-app/src/setupTests.js +0 -5
  106. cognite/neat/_app/ui/neat-app/src/types/WorkflowTypes.ts +0 -388
  107. cognite/neat/_app/ui/neat-app/src/views/AboutView.tsx +0 -61
  108. cognite/neat/_app/ui/neat-app/src/views/ConfigView.tsx +0 -184
  109. cognite/neat/_app/ui/neat-app/src/views/GlobalConfigView.tsx +0 -180
  110. cognite/neat/_app/ui/neat-app/src/views/WorkflowView.tsx +0 -570
  111. cognite/neat/_app/ui/neat-app/tsconfig.json +0 -27
  112. cognite/neat/_workflows/__init__.py +0 -17
  113. cognite/neat/_workflows/base.py +0 -590
  114. cognite/neat/_workflows/cdf_store.py +0 -393
  115. cognite/neat/_workflows/examples/Export_DMS/workflow.yaml +0 -89
  116. cognite/neat/_workflows/examples/Export_Semantic_Data_Model/workflow.yaml +0 -66
  117. cognite/neat/_workflows/examples/Import_DMS/workflow.yaml +0 -65
  118. cognite/neat/_workflows/examples/Validate_Rules/workflow.yaml +0 -67
  119. cognite/neat/_workflows/examples/Validate_Solution_Model/workflow.yaml +0 -64
  120. cognite/neat/_workflows/manager.py +0 -292
  121. cognite/neat/_workflows/model.py +0 -203
  122. cognite/neat/_workflows/steps/__init__.py +0 -0
  123. cognite/neat/_workflows/steps/data_contracts.py +0 -109
  124. cognite/neat/_workflows/steps/lib/__init__.py +0 -0
  125. cognite/neat/_workflows/steps/lib/current/__init__.py +0 -6
  126. cognite/neat/_workflows/steps/lib/current/graph_extractor.py +0 -100
  127. cognite/neat/_workflows/steps/lib/current/graph_loader.py +0 -51
  128. cognite/neat/_workflows/steps/lib/current/graph_store.py +0 -48
  129. cognite/neat/_workflows/steps/lib/current/rules_exporter.py +0 -537
  130. cognite/neat/_workflows/steps/lib/current/rules_importer.py +0 -323
  131. cognite/neat/_workflows/steps/lib/current/rules_validator.py +0 -106
  132. cognite/neat/_workflows/steps/lib/io/__init__.py +0 -1
  133. cognite/neat/_workflows/steps/lib/io/io_steps.py +0 -393
  134. cognite/neat/_workflows/steps/step_model.py +0 -79
  135. cognite/neat/_workflows/steps_registry.py +0 -218
  136. cognite/neat/_workflows/tasks.py +0 -18
  137. cognite/neat/_workflows/triggers.py +0 -169
  138. cognite/neat/_workflows/utils.py +0 -19
  139. cognite_neat-0.104.0.dist-info/RECORD +0 -276
  140. {cognite_neat-0.104.0.dist-info → cognite_neat-0.105.0.dist-info}/LICENSE +0 -0
  141. {cognite_neat-0.104.0.dist-info → cognite_neat-0.105.0.dist-info}/entry_points.txt +0 -0
@@ -1,37 +1,251 @@
1
- from collections.abc import Iterable
1
+ import itertools
2
+ import json
3
+ from collections.abc import Callable, Iterable, Set
2
4
  from pathlib import Path
5
+ from typing import Any
3
6
 
4
7
  from cognite.client import CogniteClient
5
- from cognite.client.data_classes import Sequence, SequenceFilter, SequenceList
8
+ from cognite.client.data_classes import Sequence, SequenceFilter
9
+ from rdflib import RDF, XSD, Literal, Namespace, URIRef
6
10
 
7
- from ._base import ClassicCDFBaseExtractor, InstanceIdPrefix
11
+ from cognite.neat._client.data_classes.neat_sequence import NeatSequence, NeatSequenceList
12
+ from cognite.neat._shared import Triple
8
13
 
14
+ from ._base import DEFAULT_SKIP_METADATA_VALUES, ClassicCDFBaseExtractor, InstanceIdPrefix
9
15
 
10
- class SequencesExtractor(ClassicCDFBaseExtractor[Sequence]):
11
- """Extract data from Cognite Data Fusions Sequences into Neat."""
16
+
17
+ class SequencesExtractor(ClassicCDFBaseExtractor[NeatSequence]):
18
+ """Extract data from Cognite Data Fusions Sequences into Neat.
19
+
20
+ Args:
21
+ items (Iterable[T_CogniteResource]): An iterable of classic resource.
22
+ namespace (Namespace, optional): The namespace to use. Defaults to DEFAULT_NAMESPACE.
23
+ to_type (Callable[[T_CogniteResource], str | None], optional): A function to convert an item to a type.
24
+ Defaults to None. If None or if the function returns None, the asset will be set to the default type.
25
+ total (int, optional): The total number of items to load. If passed, you will get a progress bar if rich
26
+ is installed. Defaults to None.
27
+ limit (int, optional): The maximal number of items to load. Defaults to None. This is typically used for
28
+ testing setup of the extractor. For example, if you are extracting 100 000 assets, you might want to
29
+ limit the extraction to 1000 assets to test the setup.
30
+ unpack_metadata (bool, optional): Whether to unpack metadata. Defaults to False, which yields the metadata as
31
+ a JSON string.
32
+ skip_metadata_values (set[str] | frozenset[str] | None, optional): If you are unpacking metadata, then
33
+ values in this set will be skipped.
34
+ camel_case (bool, optional): Whether to use camelCase instead of snake_case for property names.
35
+ Defaults to True.
36
+ as_write (bool, optional): Whether to use the write/request format of the items. Defaults to False.
37
+ unpack_columns (bool, optional): Whether to unpack columns. Defaults to False.
38
+ """
12
39
 
13
40
  _default_rdf_type = "Sequence"
41
+ _column_rdf_type = "ColumnClass"
14
42
  _instance_id_prefix = InstanceIdPrefix.sequence
15
43
 
44
+ def __init__(
45
+ self,
46
+ items: Iterable[NeatSequence],
47
+ namespace: Namespace | None = None,
48
+ to_type: Callable[[NeatSequence], str | None] | None = None,
49
+ total: int | None = None,
50
+ limit: int | None = None,
51
+ unpack_metadata: bool = True,
52
+ skip_metadata_values: Set[str] | None = DEFAULT_SKIP_METADATA_VALUES,
53
+ camel_case: bool = True,
54
+ as_write: bool = False,
55
+ unpack_columns: bool = False,
56
+ ):
57
+ super().__init__(
58
+ items, namespace, to_type, total, limit, unpack_metadata, skip_metadata_values, camel_case, as_write
59
+ )
60
+ self.unpack_columns = unpack_columns
61
+
16
62
  @classmethod
17
- def _from_dataset(cls, client: CogniteClient, data_set_external_id: str) -> tuple[int | None, Iterable[Sequence]]:
63
+ def from_dataset(
64
+ cls,
65
+ client: CogniteClient,
66
+ data_set_external_id: str,
67
+ namespace: Namespace | None = None,
68
+ to_type: Callable[[NeatSequence], str | None] | None = None,
69
+ limit: int | None = None,
70
+ unpack_metadata: bool = True,
71
+ skip_metadata_values: Set[str] | None = DEFAULT_SKIP_METADATA_VALUES,
72
+ camel_case: bool = True,
73
+ as_write: bool = False,
74
+ unpack_columns: bool = False,
75
+ ):
76
+ total, items = cls._from_dataset(client, data_set_external_id)
77
+ return cls(
78
+ items,
79
+ namespace,
80
+ to_type,
81
+ total,
82
+ limit,
83
+ unpack_metadata,
84
+ skip_metadata_values,
85
+ camel_case,
86
+ as_write,
87
+ unpack_columns,
88
+ )
89
+
90
+ @classmethod
91
+ def from_hierarchy(
92
+ cls,
93
+ client: CogniteClient,
94
+ root_asset_external_id: str,
95
+ namespace: Namespace | None = None,
96
+ to_type: Callable[[NeatSequence], str | None] | None = None,
97
+ limit: int | None = None,
98
+ unpack_metadata: bool = True,
99
+ skip_metadata_values: Set[str] | None = DEFAULT_SKIP_METADATA_VALUES,
100
+ camel_case: bool = True,
101
+ as_write: bool = False,
102
+ unpack_columns: bool = False,
103
+ ):
104
+ total, items = cls._from_hierarchy(client, root_asset_external_id)
105
+ return cls(
106
+ items,
107
+ namespace,
108
+ to_type,
109
+ total,
110
+ limit,
111
+ unpack_metadata,
112
+ skip_metadata_values,
113
+ camel_case,
114
+ as_write,
115
+ unpack_columns,
116
+ )
117
+
118
+ @classmethod
119
+ def from_file(
120
+ cls,
121
+ file_path: str | Path,
122
+ namespace: Namespace | None = None,
123
+ to_type: Callable[[NeatSequence], str | None] | None = None,
124
+ limit: int | None = None,
125
+ unpack_metadata: bool = True,
126
+ skip_metadata_values: Set[str] | None = DEFAULT_SKIP_METADATA_VALUES,
127
+ camel_case: bool = True,
128
+ as_write: bool = False,
129
+ unpack_columns: bool = False,
130
+ ):
131
+ total, items = cls._from_file(file_path)
132
+ return cls(
133
+ items,
134
+ namespace,
135
+ to_type,
136
+ total,
137
+ limit,
138
+ unpack_metadata,
139
+ skip_metadata_values,
140
+ camel_case,
141
+ as_write,
142
+ unpack_columns,
143
+ )
144
+
145
+ @classmethod
146
+ def _from_dataset(
147
+ cls, client: CogniteClient, data_set_external_id: str
148
+ ) -> tuple[int | None, Iterable[NeatSequence]]:
18
149
  total = client.sequences.aggregate_count(
19
150
  filter=SequenceFilter(data_set_ids=[{"externalId": data_set_external_id}])
20
151
  )
21
152
  items = client.sequences(data_set_external_ids=data_set_external_id)
22
- return total, items
153
+ return total, cls._lookup_rows(items, client)
23
154
 
24
155
  @classmethod
25
156
  def _from_hierarchy(
26
157
  cls, client: CogniteClient, root_asset_external_id: str
27
- ) -> tuple[int | None, Iterable[Sequence]]:
158
+ ) -> tuple[int | None, Iterable[NeatSequence]]:
28
159
  total = client.sequences.aggregate_count(
29
160
  filter=SequenceFilter(asset_subtree_ids=[{"externalId": root_asset_external_id}])
30
161
  )
31
162
  items = client.sequences(asset_subtree_external_ids=[root_asset_external_id])
32
- return total, items
163
+ return total, cls._lookup_rows(items, client)
33
164
 
34
165
  @classmethod
35
- def _from_file(cls, file_path: str | Path) -> tuple[int | None, Iterable[Sequence]]:
36
- sequences = SequenceList.load(Path(file_path).read_text())
166
+ def _from_file(cls, file_path: str | Path) -> tuple[int | None, Iterable[NeatSequence]]:
167
+ sequences = NeatSequenceList.load(Path(file_path).read_text())
37
168
  return len(sequences), sequences
169
+
170
+ @classmethod
171
+ def _lookup_rows(cls, sequence_iterable: Iterable[Sequence], client: CogniteClient) -> Iterable[NeatSequence]:
172
+ iterator = iter(sequence_iterable)
173
+ for sequences in iter(lambda: list(itertools.islice(iterator, client.config.max_workers)), []):
174
+ # The PySDK uses max_workers to limit the number of requests made in parallel.
175
+ # We can only get one set of sequence rows per request, so we chunk the sequences up into groups of
176
+ # max_workers and then make a request to get all the rows for those sequences in one go.
177
+ sequence_list = list(sequences)
178
+ row_list = client.sequences.rows.retrieve(id=[seq.id for seq in sequence_list])
179
+ rows_by_sequence_id = {row.id: row.rows for row in row_list}
180
+ for seq in sequence_list:
181
+ yield NeatSequence.from_cognite_sequence(seq, rows_by_sequence_id.get(seq.id))
182
+
183
+ def _item2triples_special_cases(self, id_: URIRef, dumped: dict[str, Any]) -> list[Triple]:
184
+ """For sequences, columns and rows are special cases.'"""
185
+ if self.unpack_columns:
186
+ return self._unpack_columns(id_, dumped)
187
+ else:
188
+ return self._default_columns_and_rows(id_, dumped)
189
+
190
+ def _default_columns_and_rows(self, id_: URIRef, dumped: dict[str, Any]) -> list[Triple]:
191
+ triples: list[Triple] = []
192
+ if "columns" in dumped:
193
+ columns = dumped.pop("columns")
194
+ triples.extend(
195
+ [
196
+ (
197
+ id_,
198
+ self.namespace.columns,
199
+ # Rows have a rowNumber, so we introduce colNumber here to be consistent.
200
+ Literal(json.dumps({"colNumber": no, **col}), datatype=XSD._NS["json"]),
201
+ )
202
+ for no, col in enumerate(columns, 1)
203
+ ]
204
+ )
205
+ if "rows" in dumped:
206
+ rows = dumped.pop("rows")
207
+ triples.extend(
208
+ [(id_, self.namespace.rows, Literal(json.dumps(row), datatype=XSD._NS["json"])) for row in rows]
209
+ )
210
+ return triples
211
+
212
+ def _unpack_columns(self, id_: URIRef, dumped: dict[str, Any]) -> list[Triple]:
213
+ triples: list[Triple] = []
214
+ columnValueTypes: list[str] = []
215
+ column_order: list[str] = []
216
+ if columns := dumped.pop("columns", None):
217
+ for col in columns:
218
+ external_id = col.pop("externalId")
219
+ column_order.append(external_id)
220
+ value_type = col.pop("valueType")
221
+ columnValueTypes.append(value_type)
222
+
223
+ col_id = self.namespace[f"Column_{external_id}"]
224
+ triples.append((id_, self.namespace[external_id], col_id))
225
+ type_ = self.namespace[self._column_rdf_type]
226
+ triples.append((col_id, RDF.type, type_))
227
+ if metadata := col.pop("metadata", None):
228
+ triples.extend(self._metadata_to_triples(col_id, metadata))
229
+ # Should only be name and description left in col
230
+ for key, value in col.items():
231
+ if value is None:
232
+ continue
233
+ triples.append((col_id, self.namespace[key], Literal(value, datatype=XSD.string)))
234
+
235
+ triples.append(
236
+ (id_, self.namespace.columnOrder, Literal(json.dumps(column_order), datatype=XSD._NS["json"]))
237
+ )
238
+ triples.append(
239
+ (id_, self.namespace.columnValueTypes, Literal(json.dumps(columnValueTypes), datatype=XSD._NS["json"]))
240
+ )
241
+ if rows := dumped.pop("rows", None):
242
+ values_by_column: list[list[Any]] = [[] for _ in column_order]
243
+ for row in rows:
244
+ for i, value in enumerate(row["values"]):
245
+ values_by_column[i].append(value)
246
+ for col_name, values in zip(column_order, values_by_column, strict=False):
247
+ triples.append(
248
+ (id_, self.namespace[f"{col_name}Values"], Literal(json.dumps(values), datatype=XSD._NS["json"]))
249
+ )
250
+
251
+ return triples
@@ -183,7 +183,7 @@ def _get_generation_order(
183
183
  parent_col: str = "source_class",
184
184
  child_col: str = "target_class",
185
185
  ) -> dict:
186
- parent_child_list: list[list[str]] = class_linkage[[parent_col, child_col]].values.tolist()
186
+ parent_child_list: list[list[str]] = class_linkage[[parent_col, child_col]].values.tolist() # type: ignore[assignment]
187
187
  # Build a directed graph and a list of all names that have no parent
188
188
  graph: dict[str, set] = {name: set() for tup in parent_child_list for name in tup}
189
189
  has_parent: dict[str, bool] = {name: False for tup in parent_child_list for name in tup}
@@ -19,6 +19,7 @@ from pydantic import BaseModel, ValidationInfo, create_model, field_validator
19
19
  from rdflib import RDF, URIRef
20
20
 
21
21
  from cognite.neat._client import NeatClient
22
+ from cognite.neat._constants import is_readonly_property
22
23
  from cognite.neat._graph._tracking import LogTracker, Tracker
23
24
  from cognite.neat._issues import IssueList, NeatIssue, NeatIssueList
24
25
  from cognite.neat._issues.errors import (
@@ -303,6 +304,9 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
303
304
  if isinstance(prop, dm.EdgeConnection):
304
305
  edge_by_property[prop_id] = prop_id, prop
305
306
  if isinstance(prop, dm.MappedProperty):
307
+ if is_readonly_property(prop.container, prop.container_property_identifier):
308
+ continue
309
+
306
310
  if isinstance(prop.type, dm.DirectRelation):
307
311
  if prop.container == dm.ContainerId("cdf_cdm", "CogniteTimeSeries") and prop_id == "unit":
308
312
  unit_properties.append(prop_id)
@@ -343,9 +347,14 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
343
347
 
344
348
  return value
345
349
 
346
- def parse_json_string(cls, value: Any, info: ValidationInfo) -> dict:
350
+ def parse_json_string(cls, value: Any, info: ValidationInfo) -> dict | list:
347
351
  if isinstance(value, dict):
348
352
  return value
353
+ elif isinstance(value, list):
354
+ try:
355
+ return [json.loads(v) if isinstance(v, str) else v for v in value]
356
+ except json.JSONDecodeError as error:
357
+ raise ValueError(f"Not valid JSON string for {info.field_name}: {value}, error {error}") from error
349
358
  elif isinstance(value, str):
350
359
  try:
351
360
  return json.loads(value)
@@ -401,7 +410,9 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
401
410
  space=self.instance_space,
402
411
  external_id=identifier,
403
412
  type=(dm.DirectRelationReference(view_id.space, view_id.external_id) if type_ is not None else None),
404
- sources=[dm.NodeOrEdgeData(source=view_id, properties=dict(created.model_dump().items()))],
413
+ sources=[
414
+ dm.NodeOrEdgeData(source=view_id, properties=dict(created.model_dump(exclude_unset=True).items()))
415
+ ],
405
416
  )
406
417
 
407
418
  def _create_edges(
@@ -15,7 +15,7 @@ from ._prune_graph import (
15
15
  PruneTypes,
16
16
  )
17
17
  from ._rdfpath import AddSelfReferenceProperty, MakeConnectionOnExactMatch
18
- from ._value_type import ConvertLiteral, LiteralToEntity, SplitMultiValueProperty
18
+ from ._value_type import ConnectionToLiteral, ConvertLiteral, LiteralToEntity, SplitMultiValueProperty
19
19
 
20
20
  __all__ = [
21
21
  "AddAssetDepth",
@@ -26,6 +26,7 @@ __all__ = [
26
26
  "AssetSequenceConnector",
27
27
  "AssetTimeSeriesConnector",
28
28
  "AttachPropertyFromTargetToSource",
29
+ "ConnectionToLiteral",
29
30
  "ConvertLiteral",
30
31
  "LiteralToEntity",
31
32
  "MakeConnectionOnExactMatch",
@@ -55,4 +56,5 @@ Transformers = (
55
56
  | PruneInstancesOfUnknownType
56
57
  | ConvertLiteral
57
58
  | LiteralToEntity
59
+ | ConnectionToLiteral
58
60
  )
@@ -375,7 +375,8 @@ WHERE {{
375
375
  ) -> list[Triple]:
376
376
  relationship_triples = cast(list[Triple], list(graph.query(f"DESCRIBE <{relationship_id}>")))
377
377
  object_by_predicates = cast(
378
- dict[str, URIRef | Literal], {remove_namespace_from_uri(row[1]): row[2] for row in relationship_triples}
378
+ dict[str, URIRef | Literal],
379
+ {remove_namespace_from_uri(row[1]): row[2] for row in relationship_triples if row[1] != RDF.type},
379
380
  )
380
381
  source_external_id = cast(URIRef, object_by_predicates["sourceExternalId"])
381
382
  target_source_id = cast(URIRef, object_by_predicates["targetExternalId"])
@@ -223,3 +223,75 @@ class LiteralToEntity(BaseTransformerStandardised):
223
223
  row_output.instances_modified_count += 1 # we modify the old entity
224
224
 
225
225
  return row_output
226
+
227
+
228
+ class ConnectionToLiteral(BaseTransformerStandardised):
229
+ description = "Converts an entity connection to a literal value"
230
+
231
+ def __init__(self, subject_type: URIRef | None, subject_predicate: URIRef) -> None:
232
+ self.subject_type = subject_type
233
+ self.subject_predicate = subject_predicate
234
+
235
+ def _iterate_query(self) -> str:
236
+ if self.subject_type is None:
237
+ query = """SELECT ?instance ?object
238
+ WHERE {{
239
+ ?instance <{subject_predicate}> ?object
240
+ FILTER(isIRI(?object))
241
+ }}"""
242
+ return query.format(subject_predicate=self.subject_predicate)
243
+ else:
244
+ query = """SELECT ?instance ?object
245
+ WHERE {{
246
+ ?instance a <{subject_type}> .
247
+ ?instance <{subject_predicate}> ?object
248
+ FILTER(isIRI(?object))
249
+ }}"""
250
+ return query.format(subject_type=self.subject_type, subject_predicate=self.subject_predicate)
251
+
252
+ def _skip_count_query(self) -> str:
253
+ if self.subject_type is None:
254
+ query = """SELECT (COUNT(?object) AS ?objectCount)
255
+ WHERE {{
256
+ ?instance <{subject_predicate}> ?object
257
+ FILTER(isLiteral(?object))
258
+ }}"""
259
+ return query.format(subject_predicate=self.subject_predicate)
260
+ else:
261
+ query = """SELECT (COUNT(?object) AS ?objectCount)
262
+ WHERE {{
263
+ ?instance a <{subject_type}> .
264
+ ?instance <{subject_predicate}> ?object
265
+ FILTER(isLiteral(?object))
266
+ }}"""
267
+ return query.format(subject_type=self.subject_type, subject_predicate=self.subject_predicate)
268
+
269
+ def _count_query(self) -> str:
270
+ if self.subject_type is None:
271
+ query = """SELECT (COUNT(?object) AS ?objectCount)
272
+ WHERE {{
273
+ ?instance <{subject_predicate}> ?object
274
+ FILTER(isIRI(?object))
275
+ }}"""
276
+ return query.format(subject_predicate=self.subject_predicate)
277
+ else:
278
+ query = """SELECT (COUNT(?object) AS ?objectCount)
279
+ WHERE {{
280
+ ?instance a <{subject_type}> .
281
+ ?instance <{subject_predicate}> ?object
282
+ FILTER(isIRI(?object))
283
+ }}"""
284
+
285
+ return query.format(subject_type=self.subject_type, subject_predicate=self.subject_predicate)
286
+
287
+ def operation(self, query_result_row: ResultRow) -> RowTransformationOutput:
288
+ row_output = RowTransformationOutput()
289
+
290
+ instance, object_entity = cast(tuple[URIRef, URIRef], query_result_row)
291
+ value = remove_namespace_from_uri(object_entity)
292
+
293
+ row_output.add_triples.append((instance, self.subject_predicate, rdflib.Literal(value)))
294
+ row_output.remove_triples.append((instance, self.subject_predicate, object_entity))
295
+ row_output.instances_modified_count += 1
296
+
297
+ return row_output
@@ -3,7 +3,6 @@ as some helper classes to handle them like NeatIssueList"""
3
3
 
4
4
  from ._base import (
5
5
  DefaultWarning,
6
- FutureResult,
7
6
  IssueList,
8
7
  MultiValueError,
9
8
  NeatError,
@@ -16,7 +15,6 @@ from ._base import (
16
15
 
17
16
  __all__ = [
18
17
  "DefaultWarning",
19
- "FutureResult",
20
18
  "IssueList",
21
19
  "MultiValueError",
22
20
  "NeatError",
@@ -425,6 +425,10 @@ class NeatIssueList(list, Sequence[T_NeatIssue], ABC):
425
425
  """Return True if this list contains any errors of the given type."""
426
426
  return any(isinstance(issue, error_type) for issue in self)
427
427
 
428
+ def has_warning_type(self, warning_type: type[NeatWarning]) -> bool:
429
+ """Return True if this list contains any warnings of the given type."""
430
+ return any(isinstance(issue, warning_type) for issue in self)
431
+
428
432
  def as_errors(self, operation: str = "Operation failed") -> ExceptionGroup:
429
433
  """Return an ExceptionGroup with all the errors in this list."""
430
434
  return ExceptionGroup(
@@ -505,55 +509,35 @@ def _get_subclasses(cls_: type[T_Cls], include_base: bool = False) -> Iterable[t
505
509
 
506
510
 
507
511
  @contextmanager
508
- def catch_warnings(
509
- issues: IssueList | None = None,
510
- warning_cls: type[NeatWarning] = DefaultWarning,
511
- ) -> Iterator[None]:
512
+ def catch_warnings() -> Iterator[IssueList]:
512
513
  """Catch warnings and append them to the issues list."""
514
+ issues = IssueList()
513
515
  with warnings.catch_warnings(record=True) as warning_logger:
514
516
  warnings.simplefilter("always")
515
517
  try:
516
- yield None
518
+ yield issues
517
519
  finally:
518
- if warning_logger and issues is not None:
519
- issues.extend([warning_cls.from_warning(warning) for warning in warning_logger]) # type: ignore[misc]
520
-
521
-
522
- class FutureResult:
523
- def __init__(self) -> None:
524
- self._result: Literal["success", "failure", "pending"] = "pending"
525
-
526
- @property
527
- def result(self) -> Literal["success", "failure", "pending"]:
528
- return self._result
520
+ if warning_logger:
521
+ issues.extend([NeatWarning.from_warning(warning) for warning in warning_logger]) # type: ignore[misc]
529
522
 
530
523
 
531
524
  @contextmanager
532
- def catch_issues(
533
- issues: IssueList,
534
- error_cls: type[NeatError] = NeatError,
535
- warning_cls: type[NeatWarning] = NeatWarning,
536
- error_args: dict[str, Any] | None = None,
537
- ) -> Iterator[FutureResult]:
525
+ def catch_issues(error_args: dict[str, Any] | None = None) -> Iterator[IssueList]:
538
526
  """This is an internal help function to handle issues and warnings.
539
527
 
540
528
  Args:
541
- issues: The issues list to append to.
542
- error_cls: The class used to convert errors to issues.
543
- warning_cls: The class used to convert warnings to issues.
529
+ error_args: Additional arguments to pass to the error class. The only use case as of (2025-01-03) is to pass
530
+ the read_info_by_sheet to the error class such that the row numbers can be adjusted to match the source
531
+ spreadsheet.
544
532
 
545
533
  Returns:
546
- FutureResult: A future result object that can be used to check the result of the context manager.
534
+ IssueList: The list of issues.
535
+
547
536
  """
548
- with catch_warnings(issues, warning_cls):
549
- future_result = FutureResult()
537
+ with catch_warnings() as issues:
550
538
  try:
551
- yield future_result
539
+ yield issues
552
540
  except ValidationError as e:
553
- issues.extend(error_cls.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
554
- future_result._result = "failure"
541
+ issues.extend(NeatError.from_errors(e.errors(), **(error_args or {}))) # type: ignore[arg-type]
555
542
  except (NeatError, MultiValueError) as e:
556
- issues.extend(error_cls.from_errors([e], **(error_args or {}))) # type: ignore[arg-type, list-item]
557
- future_result._result = "failure"
558
- else:
559
- future_result._result = "success"
543
+ issues.extend(NeatError.from_errors([e], **(error_args or {}))) # type: ignore[arg-type, list-item]
@@ -13,7 +13,7 @@ from ._external import (
13
13
  FileReadWarning,
14
14
  FileTypeUnexpectedWarning,
15
15
  )
16
- from ._general import NeatValueWarning, NotSupportedWarning, RegexViolationWarning
16
+ from ._general import MissingCogniteClientWarning, NeatValueWarning, NotSupportedWarning, RegexViolationWarning
17
17
  from ._models import (
18
18
  BreakingModelingPrincipleWarning,
19
19
  CDFNotSupportedWarning,
@@ -41,6 +41,7 @@ from ._resources import (
41
41
  ResourceRetrievalWarning,
42
42
  ResourcesDuplicatedWarning,
43
43
  ResourceTypeNotSupportedWarning,
44
+ ResourceUnknownWarning,
44
45
  )
45
46
 
46
47
  __all__ = [
@@ -53,6 +54,7 @@ __all__ = [
53
54
  "FileMissingRequiredFieldWarning",
54
55
  "FileReadWarning",
55
56
  "FileTypeUnexpectedWarning",
57
+ "MissingCogniteClientWarning",
56
58
  "NeatValueWarning",
57
59
  "NotSupportedHasDataFilterLimitWarning",
58
60
  "NotSupportedViewContainerLimitWarning",
@@ -73,6 +75,7 @@ __all__ = [
73
75
  "ResourceRegexViolationWarning",
74
76
  "ResourceRetrievalWarning",
75
77
  "ResourceTypeNotSupportedWarning",
78
+ "ResourceUnknownWarning",
76
79
  "ResourcesDuplicatedWarning",
77
80
  "UndefinedViewWarning",
78
81
  "UserModelingWarning",
@@ -27,3 +27,10 @@ class RegexViolationWarning(NeatWarning):
27
27
  identifier: str
28
28
  pattern_name: str
29
29
  motivation: str | None = None
30
+
31
+
32
+ @dataclass(unsafe_hash=True)
33
+ class MissingCogniteClientWarning(NeatWarning):
34
+ """Missing Cognite Client required for {functionality}"""
35
+
36
+ functionality: str
@@ -39,6 +39,17 @@ class ResourceNotFoundWarning(ResourceNeatWarning, Generic[T_Identifier, T_Refer
39
39
  referred_type: str
40
40
 
41
41
 
42
+ @dataclass(unsafe_hash=True)
43
+ class ResourceUnknownWarning(ResourceNeatWarning, Generic[T_Identifier, T_ReferenceIdentifier]):
44
+ """The {resource_type} with identifier {identifier} referred by {referred_type} {referred_by} is unknown.
45
+ Will continue, but the model is incomplete."""
46
+
47
+ referred_by: T_ReferenceIdentifier
48
+ referred_type: str
49
+
50
+ fix = "You can maybe retrieve the resource from the CDF."
51
+
52
+
42
53
  @dataclass(unsafe_hash=True)
43
54
  class ResourceNotDefinedWarning(ResourceNeatWarning, Generic[T_Identifier, T_ReferenceIdentifier]):
44
55
  """The {resource_type} {identifier} is not defined in the {location}"""
@@ -4,6 +4,7 @@ from dataclasses import dataclass, field
4
4
  from pathlib import Path
5
5
  from typing import Generic, Literal
6
6
 
7
+ from cognite.client import data_modeling as dm
7
8
  from cognite.client.data_classes._base import (
8
9
  T_CogniteResourceList,
9
10
  T_WritableCogniteResource,
@@ -19,7 +20,7 @@ from cognite.client.exceptions import CogniteAPIError
19
20
 
20
21
  from cognite.neat._client import DataModelingLoader, NeatClient
21
22
  from cognite.neat._client._api.data_modeling_loaders import MultiCogniteAPIError, T_WritableCogniteResourceList
22
- from cognite.neat._client.data_classes.data_modeling import Component
23
+ from cognite.neat._client.data_classes.data_modeling import Component, ViewApplyDict
23
24
  from cognite.neat._client.data_classes.schema import DMSSchema
24
25
  from cognite.neat._issues import IssueList
25
26
  from cognite.neat._issues.warnings import (
@@ -199,6 +200,10 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
199
200
  ) -> Iterable[UploadResult]:
200
201
  schema = self.export(rules)
201
202
 
203
+ # The CDF UI does not deal well with a child view overwriting a parent property with the same name
204
+ # This is a workaround to remove the duplicated properties
205
+ self._remove_duplicated_properties(schema.views, client)
206
+
202
207
  categorized_items_by_loader = self._categorize_by_loader(client, schema)
203
208
 
204
209
  is_failing = self.existing == "fail" and any(
@@ -376,3 +381,32 @@ class DMSExporter(CDFExporter[DMSRules, DMSSchema]):
376
381
  for data_model in data_models
377
382
  if (data_model.space, data_model.external_id) != (space, external_id)
378
383
  ]
384
+
385
+ @staticmethod
386
+ def _remove_duplicated_properties(views: ViewApplyDict, client: NeatClient) -> None:
387
+ parent_view_ids = {parent for view in views.values() for parent in view.implements}
388
+ parent_view_list = client.data_modeling.views.retrieve(
389
+ list(parent_view_ids), include_inherited_properties=False
390
+ )
391
+ parent_view_by_id = {view.as_id(): view.as_write() for view in parent_view_list}
392
+ for view in views.values():
393
+ if view.implements is None:
394
+ continue
395
+ for parent_id in view.implements:
396
+ if not (parent_view := parent_view_by_id.get(parent_id)):
397
+ continue
398
+ for shared_prop_id in set(view.properties or {}) & set(parent_view.properties or {}):
399
+ if view.properties is None or parent_view.properties is None:
400
+ continue
401
+ prop = view.properties[shared_prop_id]
402
+ parent_prop = parent_view.properties[shared_prop_id]
403
+ if (
404
+ isinstance(prop, dm.MappedPropertyApply)
405
+ and isinstance(parent_prop, dm.MappedPropertyApply)
406
+ and (
407
+ prop.container_property_identifier == parent_prop.container_property_identifier
408
+ and prop.container == parent_prop.container
409
+ and prop.source == parent_prop.source
410
+ )
411
+ ):
412
+ view.properties.pop(shared_prop_id)
@@ -229,8 +229,8 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
229
229
  if isinstance(selected_column, MergedCell):
230
230
  selected_column = column_cells[1]
231
231
 
232
- current = sheet.column_dimensions[selected_column.column_letter].width or (max_length + 0.5)
233
- sheet.column_dimensions[selected_column.column_letter].width = min(
232
+ current = sheet.column_dimensions[selected_column.column_letter].width or (max_length + 0.5) # type: ignore[union-attr]
233
+ sheet.column_dimensions[selected_column.column_letter].width = min( # type: ignore[union-attr]
234
234
  max(current, max_length + 0.5), MAX_COLUMN_WIDTH
235
235
  )
236
236
  return None