cognite-neat 0.96.6__py3-none-any.whl → 0.97.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (68) hide show
  1. cognite/neat/_constants.py +3 -1
  2. cognite/neat/_graph/extractors/__init__.py +3 -0
  3. cognite/neat/_graph/extractors/_base.py +1 -1
  4. cognite/neat/_graph/extractors/_classic_cdf/_assets.py +1 -1
  5. cognite/neat/_graph/extractors/_classic_cdf/_base.py +1 -1
  6. cognite/neat/_graph/extractors/_classic_cdf/_classic.py +1 -1
  7. cognite/neat/_graph/extractors/_classic_cdf/_data_sets.py +1 -1
  8. cognite/neat/_graph/extractors/_classic_cdf/_events.py +1 -1
  9. cognite/neat/_graph/extractors/_classic_cdf/_files.py +1 -1
  10. cognite/neat/_graph/extractors/_classic_cdf/_labels.py +1 -1
  11. cognite/neat/_graph/extractors/_classic_cdf/_relationships.py +1 -1
  12. cognite/neat/_graph/extractors/_classic_cdf/_sequences.py +1 -1
  13. cognite/neat/_graph/extractors/_classic_cdf/_timeseries.py +1 -1
  14. cognite/neat/_graph/extractors/_dexpi.py +1 -1
  15. cognite/neat/_graph/extractors/_dms.py +1 -1
  16. cognite/neat/_graph/extractors/_iodd.py +1 -1
  17. cognite/neat/_graph/extractors/_mock_graph_generator.py +1 -1
  18. cognite/neat/_graph/extractors/_rdf_file.py +1 -1
  19. cognite/neat/_graph/loaders/_rdf2dms.py +1 -1
  20. cognite/neat/_graph/queries/_base.py +1 -1
  21. cognite/neat/_graph/transformers/__init__.py +3 -1
  22. cognite/neat/_graph/transformers/_rdfpath.py +60 -1
  23. cognite/neat/_issues/errors/__init__.py +2 -0
  24. cognite/neat/_issues/errors/_properties.py +12 -0
  25. cognite/neat/_issues/warnings/__init__.py +2 -0
  26. cognite/neat/_issues/warnings/_models.py +11 -0
  27. cognite/neat/_rules/importers/__init__.py +11 -0
  28. cognite/neat/_rules/importers/_base.py +7 -0
  29. cognite/neat/_rules/importers/_dms2rules.py +12 -3
  30. cognite/neat/_rules/importers/_rdf/_inference2rules.py +17 -2
  31. cognite/neat/_rules/models/asset/_rules.py +6 -2
  32. cognite/neat/_rules/models/asset/_rules_input.py +6 -1
  33. cognite/neat/_rules/models/data_types.py +6 -0
  34. cognite/neat/_rules/models/dms/_rules.py +8 -1
  35. cognite/neat/_rules/models/dms/_rules_input.py +8 -0
  36. cognite/neat/_rules/models/dms/_validation.py +64 -2
  37. cognite/neat/_rules/models/domain.py +10 -0
  38. cognite/neat/_rules/models/entities/_loaders.py +3 -5
  39. cognite/neat/_rules/models/information/_rules.py +6 -2
  40. cognite/neat/_rules/models/information/_rules_input.py +6 -1
  41. cognite/neat/_rules/transformers/_base.py +7 -0
  42. cognite/neat/_rules/transformers/_converters.py +56 -4
  43. cognite/neat/_session/_base.py +94 -23
  44. cognite/neat/_session/_inspect.py +12 -4
  45. cognite/neat/_session/_prepare.py +144 -21
  46. cognite/neat/_session/_read.py +137 -30
  47. cognite/neat/_session/_set.py +22 -3
  48. cognite/neat/_session/_show.py +171 -45
  49. cognite/neat/_session/_state.py +79 -30
  50. cognite/neat/_session/_to.py +16 -17
  51. cognite/neat/_session/engine/__init__.py +4 -0
  52. cognite/neat/_session/engine/_import.py +7 -0
  53. cognite/neat/_session/engine/_interface.py +24 -0
  54. cognite/neat/_session/engine/_load.py +129 -0
  55. cognite/neat/_session/exceptions.py +13 -3
  56. cognite/neat/_shared.py +6 -1
  57. cognite/neat/_store/_base.py +3 -24
  58. cognite/neat/_store/_provenance.py +185 -42
  59. cognite/neat/_utils/rdf_.py +34 -1
  60. cognite/neat/_utils/reader/__init__.py +3 -0
  61. cognite/neat/_utils/reader/_base.py +162 -0
  62. cognite/neat/_version.py +2 -1
  63. {cognite_neat-0.96.6.dist-info → cognite_neat-0.97.0.dist-info}/METADATA +5 -3
  64. {cognite_neat-0.96.6.dist-info → cognite_neat-0.97.0.dist-info}/RECORD +67 -62
  65. cognite/neat/_graph/models.py +0 -7
  66. {cognite_neat-0.96.6.dist-info → cognite_neat-0.97.0.dist-info}/LICENSE +0 -0
  67. {cognite_neat-0.96.6.dist-info → cognite_neat-0.97.0.dist-info}/WHEEL +0 -0
  68. {cognite_neat-0.96.6.dist-info → cognite_neat-0.97.0.dist-info}/entry_points.txt +0 -0
@@ -1,12 +1,15 @@
1
1
  from collections.abc import Collection
2
- from typing import Literal, cast
2
+ from datetime import datetime, timezone
3
+ from typing import Literal
3
4
 
4
5
  from cognite.client.data_classes.data_modeling import DataModelIdentifier
6
+ from rdflib import URIRef
5
7
 
6
- from cognite.neat._issues._base import IssueList
8
+ from cognite.neat._graph.transformers._rdfpath import MakeConnectionOnExactMatch
7
9
  from cognite.neat._rules._shared import ReadRules
8
10
  from cognite.neat._rules.models.information._rules_input import InformationInputRules
9
11
  from cognite.neat._rules.transformers import ReduceCogniteModel, ToCompliantEntities, ToExtension
12
+ from cognite.neat._store._provenance import Change
10
13
 
11
14
  from ._state import SessionState
12
15
  from .exceptions import intercept_session_exceptions
@@ -18,6 +21,52 @@ class PrepareAPI:
18
21
  self._state = state
19
22
  self._verbose = verbose
20
23
  self.data_model = DataModelPrepareAPI(state, verbose)
24
+ self.instances = InstancePrepareAPI(state, verbose)
25
+
26
+
27
+ @intercept_session_exceptions
28
+ class InstancePrepareAPI:
29
+ def __init__(self, state: SessionState, verbose: bool) -> None:
30
+ self._state = state
31
+ self._verbose = verbose
32
+
33
+ def make_connection_on_exact_match(
34
+ self,
35
+ source: tuple[URIRef, URIRef],
36
+ target: tuple[URIRef, URIRef],
37
+ connection: URIRef | None = None,
38
+ limit: int | None = 100,
39
+ ) -> None:
40
+ """Make connection on exact match.
41
+
42
+ Args:
43
+ source: The source of the connection. A tuple of (rdf type, property) where
44
+ where property is the property that should be matched on the source
45
+ to make the connection with the target.
46
+ target: The target of the connection. A tuple of (rdf type, property) where
47
+ where property is the property that should be matched on the target
48
+ to make the connection with the source.
49
+
50
+ connection: new property to use for the connection. If None, the connection
51
+ will be made by lowercasing the target type.
52
+ limit: The maximum number of connections to make. If None, all connections
53
+
54
+
55
+ """
56
+
57
+ subject_type, subject_predicate = source
58
+ object_type, object_predicate = target
59
+
60
+ transformer = MakeConnectionOnExactMatch(
61
+ subject_type,
62
+ subject_predicate,
63
+ object_type,
64
+ object_predicate,
65
+ connection,
66
+ limit,
67
+ )
68
+
69
+ self._state.instances.store.transform(transformer)
21
70
 
22
71
 
23
72
  @intercept_session_exceptions
@@ -28,21 +77,32 @@ class DataModelPrepareAPI:
28
77
 
29
78
  def cdf_compliant_external_ids(self) -> None:
30
79
  """Convert data model component external ids to CDF compliant entities."""
31
- if input := self._state.information_input_rule:
32
- output = ToCompliantEntities().transform(input)
33
- self._state.input_rules.append(
34
- ReadRules(
35
- rules=cast(InformationInputRules, output.get_rules()),
36
- issues=IssueList(),
37
- read_context={},
38
- )
80
+ if input := self._state.data_model.last_info_unverified_rule:
81
+ source_id, rules = input
82
+
83
+ start = datetime.now(timezone.utc)
84
+ transformer = ToCompliantEntities()
85
+ output: ReadRules[InformationInputRules] = transformer.transform(rules)
86
+ end = datetime.now(timezone.utc)
87
+
88
+ change = Change.from_rules_activity(
89
+ output,
90
+ transformer.agent,
91
+ start,
92
+ end,
93
+ "Converted external ids to CDF compliant entities",
94
+ self._state.data_model.provenance.source_entity(source_id)
95
+ or self._state.data_model.provenance.target_entity(source_id),
39
96
  )
40
97
 
98
+ self._state.data_model.write(output, change)
99
+
41
100
  def to_enterprise(
42
101
  self,
43
102
  data_model_id: DataModelIdentifier,
44
103
  org_name: str = "My",
45
104
  dummy_property: str = "GUID",
105
+ move_connections: bool = False,
46
106
  ) -> None:
47
107
  """Uses the current data model as a basis to create enterprise data model
48
108
 
@@ -50,6 +110,7 @@ class DataModelPrepareAPI:
50
110
  data_model_id: The enterprise data model id that is being created
51
111
  org_name: Organization name to use for the views in the enterprise data model.
52
112
  dummy_property: The dummy property to use as placeholder for the views in the new data model.
113
+ move_connections: If True, the connections will be moved to the new data model.
53
114
 
54
115
  !!! note "Enterprise Data Model Creation"
55
116
  Always create an enterprise data model from a Cognite Data Model as this will
@@ -58,15 +119,39 @@ class DataModelPrepareAPI:
58
119
  - Atlas AI
59
120
  - ...
60
121
 
122
+ !!! note "Move Connections"
123
+ If you want to move the connections to the new data model, set the move_connections
124
+ to True. This will move the connections to the new data model and use new model
125
+ views as the source and target views.
126
+
61
127
  """
62
- if dms := self._state.last_verified_dms_rules:
63
- output = ToExtension(
128
+ if input := self._state.data_model.last_verified_dms_rules:
129
+ source_id, rules = input
130
+
131
+ start = datetime.now(timezone.utc)
132
+ transformer = ToExtension(
64
133
  new_model_id=data_model_id,
65
134
  org_name=org_name,
66
135
  type_="enterprise",
67
136
  dummy_property=dummy_property,
68
- ).transform(dms)
69
- self._state.verified_rules.append(output.rules)
137
+ move_connections=move_connections,
138
+ )
139
+ output = transformer.transform(rules)
140
+ end = datetime.now(timezone.utc)
141
+
142
+ change = Change.from_rules_activity(
143
+ output,
144
+ transformer.agent,
145
+ start,
146
+ end,
147
+ (
148
+ f"Prepared data model {data_model_id} to be enterprise data "
149
+ f"model on top of {rules.metadata.as_data_model_id()}"
150
+ ),
151
+ self._state.data_model.provenance.source_entity(source_id),
152
+ )
153
+
154
+ self._state.data_model.write(output.rules, change)
70
155
 
71
156
  def to_solution(
72
157
  self,
@@ -94,15 +179,33 @@ class DataModelPrepareAPI:
94
179
  the containers in the solution data model space.
95
180
 
96
181
  """
97
- if dms := self._state.last_verified_dms_rules:
98
- output = ToExtension(
182
+ if input := self._state.data_model.last_verified_dms_rules:
183
+ source_id, rules = input
184
+
185
+ start = datetime.now(timezone.utc)
186
+ transformer = ToExtension(
99
187
  new_model_id=data_model_id,
100
188
  org_name=org_name,
101
189
  type_="solution",
102
190
  mode=mode,
103
191
  dummy_property=dummy_property,
104
- ).transform(dms)
105
- self._state.verified_rules.append(output.rules)
192
+ )
193
+ output = transformer.transform(rules)
194
+ end = datetime.now(timezone.utc)
195
+
196
+ change = Change.from_rules_activity(
197
+ output,
198
+ transformer.agent,
199
+ start,
200
+ end,
201
+ (
202
+ f"Prepared data model {data_model_id} to be solution data model "
203
+ f"on top of {rules.metadata.as_data_model_id()}"
204
+ ),
205
+ self._state.data_model.provenance.source_entity(source_id),
206
+ )
207
+
208
+ self._state.data_model.write(output.rules, change)
106
209
 
107
210
  def reduce(self, drop: Collection[Literal["3D", "Annotation", "BaseViews"] | str]) -> None:
108
211
  """This is a special method that allow you to drop parts of the data model.
@@ -113,6 +216,26 @@ class DataModelPrepareAPI:
113
216
  drops multiple views at once. You can also pass externalIds of views to drop individual views.
114
217
 
115
218
  """
116
- if dms := self._state.last_verified_dms_rules:
117
- output = ReduceCogniteModel(drop).transform(dms)
118
- self._state.verified_rules.append(output.rules)
219
+ if input := self._state.data_model.last_verified_dms_rules:
220
+ source_id, rules = input
221
+ start = datetime.now(timezone.utc)
222
+
223
+ transformer = ReduceCogniteModel(drop)
224
+ output = transformer.transform(rules)
225
+ output.rules.metadata.version = f"{rules.metadata.version}.reduced"
226
+
227
+ end = datetime.now(timezone.utc)
228
+
229
+ change = Change.from_rules_activity(
230
+ output,
231
+ transformer.agent,
232
+ start,
233
+ end,
234
+ (
235
+ f"Reduced data model {rules.metadata.as_data_model_id()}"
236
+ f"on top of {rules.metadata.as_data_model_id()}"
237
+ ),
238
+ self._state.data_model.provenance.source_entity(source_id),
239
+ )
240
+
241
+ self._state.data_model.write(output.rules, change)
@@ -1,8 +1,10 @@
1
+ import tempfile
2
+ from datetime import datetime, timezone
1
3
  from pathlib import Path
2
4
  from typing import Any
3
5
 
4
6
  from cognite.client import CogniteClient
5
- from cognite.client.data_classes.data_modeling import DataModelIdentifier
7
+ from cognite.client.data_classes.data_modeling import DataModelId, DataModelIdentifier
6
8
 
7
9
  from cognite.neat._graph import examples as instances_examples
8
10
  from cognite.neat._graph import extractors
@@ -10,10 +12,15 @@ from cognite.neat._issues import IssueList
10
12
  from cognite.neat._issues.errors import NeatValueError
11
13
  from cognite.neat._rules import importers
12
14
  from cognite.neat._rules._shared import ReadRules
15
+ from cognite.neat._store._provenance import Activity as ProvenanceActivity
16
+ from cognite.neat._store._provenance import Change
17
+ from cognite.neat._store._provenance import Entity as ProvenanceEntity
18
+ from cognite.neat._utils.reader import GitHubReader, NeatReader, PathReader
13
19
 
14
20
  from ._state import SessionState
15
21
  from ._wizard import NeatObjectType, RDFFileType, object_wizard, rdf_dm_wizard
16
- from .exceptions import intercept_session_exceptions
22
+ from .engine import import_engine
23
+ from .exceptions import NeatSessionError, intercept_session_exceptions
17
24
 
18
25
 
19
26
  @intercept_session_exceptions
@@ -24,6 +31,7 @@ class ReadAPI:
24
31
  self.cdf = CDFReadAPI(state, client, verbose)
25
32
  self.rdf = RDFReadAPI(state, client, verbose)
26
33
  self.excel = ExcelReadAPI(state, client, verbose)
34
+ self.csv = CSVReadAPI(state, client, verbose)
27
35
 
28
36
 
29
37
  @intercept_session_exceptions
@@ -33,14 +41,17 @@ class BaseReadAPI:
33
41
  self._verbose = verbose
34
42
  self._client = client
35
43
 
36
- def _store_rules(self, io: Any, input_rules: ReadRules, source: str) -> None:
37
- if input_rules.rules:
38
- self._state.input_rules.append(input_rules)
39
- if self._verbose:
40
- if input_rules.issues.has_errors:
41
- print(f"{source} {type(io)} {io} read failed")
42
- else:
43
- print(f"{source} {type(io)} {io} read successfully")
44
+ def _store_rules(self, rules: ReadRules, change: Change) -> IssueList:
45
+ if self._verbose:
46
+ if rules.issues.has_errors:
47
+ print("Data model read failed")
48
+ else:
49
+ print("Data model read passed")
50
+
51
+ if rules.rules:
52
+ self._state.data_model.write(rules, change)
53
+
54
+ return rules.issues
44
55
 
45
56
  def _return_filepath(self, io: Any) -> Path:
46
57
  if isinstance(io, str):
@@ -64,10 +75,36 @@ class CDFReadAPI(BaseReadAPI):
64
75
  return self._client
65
76
 
66
77
  def data_model(self, data_model_id: DataModelIdentifier) -> IssueList:
78
+ data_model_id = DataModelId.load(data_model_id)
79
+
80
+ if not data_model_id.version:
81
+ raise NeatSessionError("Data model version is required to read a data model.")
82
+
83
+ # actual reading of data model
84
+ start = datetime.now(timezone.utc)
67
85
  importer = importers.DMSImporter.from_data_model_id(self._get_client, data_model_id)
68
- input_rules = importer.to_rules()
69
- self._store_rules(data_model_id, input_rules, "CDF")
70
- return input_rules.issues
86
+ rules = importer.to_rules()
87
+ end = datetime.now(timezone.utc)
88
+
89
+ # provenance information
90
+ source_entity = ProvenanceEntity.from_data_model_id(data_model_id)
91
+ agent = importer.agent
92
+ activity = ProvenanceActivity(
93
+ was_associated_with=agent,
94
+ ended_at_time=end,
95
+ used=source_entity,
96
+ started_at_time=start,
97
+ )
98
+ target_entity = ProvenanceEntity.from_rules(rules, agent, activity)
99
+ change = Change(
100
+ source_entity=source_entity,
101
+ agent=agent,
102
+ activity=activity,
103
+ target_entity=target_entity,
104
+ description=f"DMS Data model {data_model_id.as_tuple()} read as unverified data model",
105
+ )
106
+
107
+ return self._store_rules(rules, change)
71
108
 
72
109
 
73
110
  @intercept_session_exceptions
@@ -80,7 +117,7 @@ class CDFClassicAPI(BaseReadAPI):
80
117
 
81
118
  def assets(self, root_asset_external_id: str) -> None:
82
119
  extractor = extractors.AssetsExtractor.from_hierarchy(self._get_client, root_asset_external_id)
83
- self._state.store.write(extractor)
120
+ self._state.instances.store.write(extractor)
84
121
  if self._verbose:
85
122
  print(f"Asset hierarchy {root_asset_external_id} read successfully")
86
123
 
@@ -88,28 +125,94 @@ class CDFClassicAPI(BaseReadAPI):
88
125
  @intercept_session_exceptions
89
126
  class ExcelReadAPI(BaseReadAPI):
90
127
  def __call__(self, io: Any) -> IssueList:
91
- filepath = self._return_filepath(io)
92
- input_rules: ReadRules = importers.ExcelImporter(filepath).to_rules()
93
- self._store_rules(io, input_rules, "Excel")
128
+ reader = NeatReader.create(io)
129
+ start = datetime.now(timezone.utc)
130
+ if not isinstance(reader, PathReader):
131
+ raise NeatValueError("Only file paths are supported for Excel files")
132
+ importer: importers.ExcelImporter = importers.ExcelImporter(reader.path)
133
+ input_rules: ReadRules = importer.to_rules()
134
+ end = datetime.now(timezone.utc)
135
+
136
+ if input_rules.rules:
137
+ change = Change.from_rules_activity(
138
+ input_rules,
139
+ importer.agent,
140
+ start,
141
+ end,
142
+ description=f"Excel file {reader!s} read as unverified data model",
143
+ )
144
+ self._store_rules(input_rules, change)
145
+
94
146
  return input_rules.issues
95
147
 
96
148
 
149
+ @intercept_session_exceptions
150
+ class CSVReadAPI(BaseReadAPI):
151
+ def __call__(self, io: Any, type: str, primary_key: str) -> None:
152
+ reader = NeatReader.create(io)
153
+ if isinstance(reader, GitHubReader):
154
+ path = Path(tempfile.gettempdir()).resolve() / reader.name
155
+ path.write_text(reader.read_text())
156
+ elif isinstance(reader, PathReader):
157
+ path = reader.path
158
+ else:
159
+ raise NeatValueError("Only file paths are supported for CSV files")
160
+ engine = import_engine()
161
+ engine.set.source = ".csv"
162
+ engine.set.file = path
163
+ engine.set.type = type
164
+ engine.set.primary_key = primary_key
165
+ extractor = engine.create_extractor()
166
+
167
+ self._state.instances.store.write(extractor)
168
+
169
+
97
170
  @intercept_session_exceptions
98
171
  class RDFReadAPI(BaseReadAPI):
99
172
  def __init__(self, state: SessionState, client: CogniteClient | None, verbose: bool) -> None:
100
173
  super().__init__(state, client, verbose)
101
174
  self.examples = RDFExamples(state)
102
175
 
103
- def _ontology(self, io: Any) -> IssueList:
104
- filepath = self._return_filepath(io)
105
- input_rules: ReadRules = importers.OWLImporter.from_file(filepath).to_rules()
106
- self._store_rules(io, input_rules, "Ontology")
176
+ def ontology(self, io: Any) -> IssueList:
177
+ start = datetime.now(timezone.utc)
178
+ reader = NeatReader.create(io)
179
+ if not isinstance(reader, PathReader):
180
+ raise NeatValueError("Only file paths are supported for RDF files")
181
+ importer = importers.OWLImporter.from_file(reader.path)
182
+ input_rules: ReadRules = importer.to_rules()
183
+ end = datetime.now(timezone.utc)
184
+
185
+ if input_rules.rules:
186
+ change = Change.from_rules_activity(
187
+ input_rules,
188
+ importer.agent,
189
+ start,
190
+ end,
191
+ description=f"Ontology file {reader!s} read as unverified data model",
192
+ )
193
+ self._store_rules(input_rules, change)
194
+
107
195
  return input_rules.issues
108
196
 
109
- def _imf(self, io: Any) -> IssueList:
110
- filepath = self._return_filepath(io)
111
- input_rules: ReadRules = importers.IMFImporter.from_file(filepath).to_rules()
112
- self._store_rules(io, input_rules, "IMF Types")
197
+ def imf(self, io: Any) -> IssueList:
198
+ start = datetime.now(timezone.utc)
199
+ reader = NeatReader.create(io)
200
+ if not isinstance(reader, PathReader):
201
+ raise NeatValueError("Only file paths are supported for RDF files")
202
+ importer = importers.IMFImporter.from_file(reader.path)
203
+ input_rules: ReadRules = importer.to_rules()
204
+ end = datetime.now(timezone.utc)
205
+
206
+ if input_rules.rules:
207
+ change = Change.from_rules_activity(
208
+ input_rules,
209
+ importer.agent,
210
+ start,
211
+ end,
212
+ description=f"IMF Types file {reader!s} read as unverified data model",
213
+ )
214
+ self._store_rules(input_rules, change)
215
+
113
216
  return input_rules.issues
114
217
 
115
218
  def __call__(
@@ -124,17 +227,21 @@ class RDFReadAPI(BaseReadAPI):
124
227
  if type.lower() == "Data Model".lower():
125
228
  source = source or rdf_dm_wizard("What type of data model is the RDF?")
126
229
  if source == "Ontology":
127
- return self._ontology(io)
230
+ return self.ontology(io)
128
231
  elif source == "IMF":
129
- return self._imf(io)
232
+ return self.imf(io)
130
233
  else:
131
234
  raise ValueError(f"Expected ontology, imf or instances, got {source}")
132
235
 
133
236
  elif type.lower() == "Instances".lower():
134
- self._state.store.write(extractors.RdfFileExtractor(self._return_filepath(io)))
237
+ reader = NeatReader.create(io)
238
+ if not isinstance(reader, PathReader):
239
+ raise NeatValueError("Only file paths are supported for RDF files")
240
+
241
+ self._state.instances.store.write(extractors.RdfFileExtractor(reader.path))
135
242
  return IssueList()
136
243
  else:
137
- raise ValueError(f"Expected data model or instances, got {type}")
244
+ raise NeatSessionError(f"Expected data model or instances, got {type}")
138
245
 
139
246
 
140
247
  class RDFExamples:
@@ -143,5 +250,5 @@ class RDFExamples:
143
250
 
144
251
  @property
145
252
  def nordic44(self) -> IssueList:
146
- self._state.store.write(extractors.RdfFileExtractor(instances_examples.nordic44_knowledge_graph))
253
+ self._state.instances.store.write(extractors.RdfFileExtractor(instances_examples.nordic44_knowledge_graph))
147
254
  return IssueList()
@@ -1,6 +1,9 @@
1
+ from datetime import datetime, timezone
2
+
1
3
  from cognite.client import data_modeling as dm
2
4
 
3
5
  from cognite.neat._rules.transformers import SetIDDMSModel
6
+ from cognite.neat._store._provenance import Change
4
7
 
5
8
  from ._state import SessionState
6
9
  from .exceptions import intercept_session_exceptions
@@ -14,9 +17,25 @@ class SetAPI:
14
17
 
15
18
  def data_model_id(self, new_model_id: dm.DataModelId | tuple[str, str, str]) -> None:
16
19
  """Sets the data model ID of the latest verified data model."""
17
- if dms := self._state.last_verified_dms_rules:
18
- output = SetIDDMSModel(new_model_id).transform(dms)
19
- self._state.verified_rules.append(output.rules)
20
+ if res := self._state.data_model.last_verified_dms_rules:
21
+ source_id, rules = res
22
+
23
+ start = datetime.now(timezone.utc)
24
+ transformer = SetIDDMSModel(new_model_id)
25
+ output = transformer.transform(rules)
26
+ end = datetime.now(timezone.utc)
27
+
28
+ # Provenance
29
+ change = Change.from_rules_activity(
30
+ output,
31
+ transformer.agent,
32
+ start,
33
+ end,
34
+ "Changed data model id",
35
+ self._state.data_model.provenance.source_entity(source_id),
36
+ )
37
+
38
+ self._state.data_model.write(output, change)
20
39
  if self._verbose:
21
40
  print(f"Data model ID set to {new_model_id}")
22
41
  else: