cognite-neat 0.121.2__py3-none-any.whl → 0.122.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (51) hide show
  1. cognite/neat/_version.py +1 -1
  2. cognite/neat/core/_constants.py +1 -1
  3. cognite/neat/core/_data_model/catalog/__init__.py +1 -1
  4. cognite/neat/core/_data_model/exporters/__init__.py +1 -1
  5. cognite/neat/core/_data_model/exporters/_data_model2instance_template.py +11 -11
  6. cognite/neat/core/_data_model/importers/__init__.py +2 -2
  7. cognite/neat/core/_data_model/importers/_dict2data_model.py +11 -6
  8. cognite/neat/core/_data_model/importers/_rdf/__init__.py +3 -3
  9. cognite/neat/core/_data_model/importers/_rdf/_base.py +8 -8
  10. cognite/neat/core/_data_model/importers/_rdf/{_imf2rules.py → _imf2data_model.py} +2 -2
  11. cognite/neat/core/_data_model/importers/_rdf/{_inference2rules.py → _inference2rdata_model.py} +26 -26
  12. cognite/neat/core/_data_model/importers/_rdf/{_owl2rules.py → _owl2data_model.py} +5 -5
  13. cognite/neat/core/_data_model/importers/_spreadsheet2data_model.py +6 -9
  14. cognite/neat/core/_data_model/models/__init__.py +5 -3
  15. cognite/neat/core/_data_model/models/_base_unverified.py +12 -12
  16. cognite/neat/core/_data_model/models/_base_verified.py +6 -11
  17. cognite/neat/core/_data_model/models/conceptual/_validation.py +1 -1
  18. cognite/neat/core/_data_model/models/conceptual/_verified.py +2 -2
  19. cognite/neat/core/_data_model/models/mapping/_classic2core.py +1 -1
  20. cognite/neat/core/_data_model/models/physical/_exporter.py +4 -3
  21. cognite/neat/core/_data_model/models/physical/_validation.py +5 -5
  22. cognite/neat/core/_data_model/transformers/__init__.py +2 -2
  23. cognite/neat/core/_data_model/transformers/_base.py +1 -1
  24. cognite/neat/core/_data_model/transformers/_converters.py +9 -9
  25. cognite/neat/core/_data_model/transformers/_verification.py +1 -1
  26. cognite/neat/core/_instances/extractors/_base.py +1 -1
  27. cognite/neat/core/_instances/extractors/_classic_cdf/_classic.py +1 -1
  28. cognite/neat/core/_instances/extractors/_mock_graph_generator.py +14 -13
  29. cognite/neat/core/_instances/loaders/_base.py +2 -2
  30. cognite/neat/core/_instances/loaders/_rdf2dms.py +19 -19
  31. cognite/neat/core/_instances/transformers/_rdfpath.py +1 -1
  32. cognite/neat/core/_issues/_factory.py +1 -1
  33. cognite/neat/core/_issues/errors/_resources.py +1 -1
  34. cognite/neat/core/_issues/warnings/_properties.py +1 -1
  35. cognite/neat/session/_base.py +26 -22
  36. cognite/neat/session/_drop.py +2 -2
  37. cognite/neat/session/_experimental.py +1 -1
  38. cognite/neat/session/_inspect.py +8 -8
  39. cognite/neat/session/_mapping.py +9 -5
  40. cognite/neat/session/_read.py +38 -36
  41. cognite/neat/session/_set.py +5 -5
  42. cognite/neat/session/_show.py +15 -12
  43. cognite/neat/session/_state/README.md +1 -1
  44. cognite/neat/session/_state.py +18 -18
  45. cognite/neat/session/_subset.py +6 -6
  46. cognite/neat/session/_template.py +13 -11
  47. cognite/neat/session/_to.py +35 -35
  48. {cognite_neat-0.121.2.dist-info → cognite_neat-0.122.0.dist-info}/METADATA +14 -7
  49. {cognite_neat-0.121.2.dist-info → cognite_neat-0.122.0.dist-info}/RECORD +51 -51
  50. {cognite_neat-0.121.2.dist-info → cognite_neat-0.122.0.dist-info}/WHEEL +0 -0
  51. {cognite_neat-0.121.2.dist-info → cognite_neat-0.122.0.dist-info}/licenses/LICENSE +0 -0
@@ -65,7 +65,7 @@ class PhysicalValidation:
65
65
  client: NeatClient | None = None,
66
66
  read_info_by_spreadsheet: dict[str, SpreadsheetRead] | None = None,
67
67
  ) -> None:
68
- self._rules = data_model
68
+ self._data_model = data_model
69
69
  self._client = client
70
70
  self._metadata = data_model.metadata
71
71
  self._properties = data_model.properties
@@ -109,7 +109,7 @@ class PhysicalValidation:
109
109
  )
110
110
  if (imported_views or imported_containers) and self._client is None:
111
111
  raise CDFMissingClientError(
112
- f"{self._rules.metadata.as_data_model_id()} has imported views and/or container: "
112
+ f"{self._data_model.metadata.as_data_model_id()} has imported views and/or container: "
113
113
  f"{imported_views}, {imported_containers}."
114
114
  )
115
115
  referenced_views = ViewList([])
@@ -131,10 +131,10 @@ class PhysicalValidation:
131
131
  raise CDFMissingResourcesError(containers=tuple(missing_containers), views=tuple(missing_views))
132
132
 
133
133
  # Setup data structures for validation
134
- dms_schema = self._rules.as_schema()
134
+ dms_schema = self._data_model.as_schema()
135
135
  ref_view_by_id = {view.as_id(): view for view in referenced_views}
136
136
  ref_container_by_id = {container.as_id(): container for container in referenced_containers}
137
- # All containers and views are the Containers/Views in the DMSRules + the referenced ones
137
+ # All containers and views are the Containers/Views in the Physical DM + the referenced ones
138
138
  all_containers_by_id: dict[dm.ContainerId, dm.ContainerApply | dm.Container] = {
139
139
  **dict(dms_schema.containers.items()),
140
140
  **ref_container_by_id,
@@ -175,7 +175,7 @@ class PhysicalValidation:
175
175
  def _same_space_views_and_data_model(self) -> IssueList:
176
176
  issue_list = IssueList()
177
177
 
178
- schema = self._rules.as_schema(remove_cdf_spaces=True)
178
+ schema = self._data_model.as_schema(remove_cdf_spaces=True)
179
179
 
180
180
  if schema.data_model and schema.views:
181
181
  data_model_space = schema.data_model.space
@@ -6,7 +6,7 @@ from ._converters import (
6
6
  ClassicPrepareCore,
7
7
  ConceptualToPhysical,
8
8
  ConversionTransformer,
9
- ConvertToRules,
9
+ ConvertToDataModel,
10
10
  DropModelViews,
11
11
  IncludeReferenced,
12
12
  MergeConceptualDataModels,
@@ -40,7 +40,7 @@ __all__ = [
40
40
  "ClassicPrepareCore",
41
41
  "ConceptualToPhysical",
42
42
  "ConversionTransformer",
43
- "ConvertToRules",
43
+ "ConvertToDataModel",
44
44
  "DataModelTransformer",
45
45
  "DropModelViews",
46
46
  "IncludeReferenced",
@@ -67,7 +67,7 @@ class DataModelTransformer(ABC, Generic[T_DataModelIn, T_DataModelOut]):
67
67
  return get_args(annotation)
68
68
 
69
69
  if get_origin(annotation) is ImportedDataModel and isinstance(get_args(annotation)[0], TypeVar):
70
- # Hardcoded for now, as we only have two types of ReadRules
70
+ # Hardcoded for now, as we only have two types of imported data models
71
71
  return (
72
72
  ImportedDataModel[UnverifiedPhysicalDataModel],
73
73
  ImportedDataModel[UnverifiedConceptualDataModel],
@@ -110,8 +110,8 @@ from ._base import (
110
110
  )
111
111
  from ._verification import VerifyPhysicalDataModel
112
112
 
113
- T_InputInRules = TypeVar("T_InputInRules", bound=ImportedUnverifiedDataModel)
114
- T_InputOutRules = TypeVar("T_InputOutRules", bound=ImportedUnverifiedDataModel)
113
+ T_UnverifiedInDataModel = TypeVar("T_UnverifiedInDataModel", bound=ImportedUnverifiedDataModel)
114
+ T_UnverifiedOutDataModel = TypeVar("T_UnverifiedOutDataModel", bound=ImportedUnverifiedDataModel)
115
115
 
116
116
 
117
117
  class ConversionTransformer(VerifiedDataModelTransformer[T_VerifiedIn, T_VerifiedOut], ABC):
@@ -225,7 +225,7 @@ class ToDMSCompliantEntities(
225
225
 
226
226
 
227
227
  class StandardizeSpaceAndVersion(VerifiedDataModelTransformer[PhysicalDataModel, PhysicalDataModel]): # type: ignore[misc]
228
- """This transformer standardizes the space and version of the DMSRules.
228
+ """This transformer standardizes the space and version of the physical data model.
229
229
 
230
230
  typically used to ensure all the views are moved to the same version as the data model.
231
231
 
@@ -381,7 +381,7 @@ class PrefixEntities(ConversionTransformer): # type: ignore[type-var]
381
381
  def transform(self, data_model: ConceptualDataModel | PhysicalDataModel) -> ConceptualDataModel | PhysicalDataModel:
382
382
  copy: ConceptualDataModel | PhysicalDataModel = data_model.model_copy(deep=True)
383
383
 
384
- # Case: Prefix Information Rules
384
+ # Case: Prefix Conceptual Data Model
385
385
  if isinstance(copy, ConceptualDataModel):
386
386
  # prefix classes
387
387
  for cls in copy.concepts:
@@ -408,7 +408,7 @@ class PrefixEntities(ConversionTransformer): # type: ignore[type-var]
408
408
  prop.value_type.types[i] = self._with_prefix(cast(ConceptEntity, value_type))
409
409
  return copy
410
410
 
411
- # Case: Prefix DMS Rules
411
+ # Case: Prefix Physical Data Model
412
412
  elif isinstance(copy, PhysicalDataModel):
413
413
  for view in copy.views:
414
414
  if view.view.space == copy.metadata.space:
@@ -600,10 +600,10 @@ class PhysicalToConceptual(ConversionTransformer[PhysicalDataModel, ConceptualDa
600
600
  self.instance_namespace = instance_namespace
601
601
 
602
602
  def transform(self, data_model: PhysicalDataModel) -> ConceptualDataModel:
603
- return _DMSRulesConverter(data_model, self.instance_namespace).as_conceptual_data_model()
603
+ return _PhysicalDataModelConverter(data_model, self.instance_namespace).as_conceptual_data_model()
604
604
 
605
605
 
606
- class ConvertToRules(ConversionTransformer[VerifiedDataModel, VerifiedDataModel]):
606
+ class ConvertToDataModel(ConversionTransformer[VerifiedDataModel, VerifiedDataModel]):
607
607
  """Converts any data_model to any data_model."""
608
608
 
609
609
  def __init__(self, out_cls: type[VerifiedDataModel]):
@@ -2004,7 +2004,7 @@ class _ConceptualDataModelConverter:
2004
2004
  return None
2005
2005
 
2006
2006
 
2007
- class _DMSRulesConverter:
2007
+ class _PhysicalDataModelConverter:
2008
2008
  def __init__(self, data_model: PhysicalDataModel, instance_namespace: Namespace | None = None) -> None:
2009
2009
  self.physical_data_model = data_model
2010
2010
  self.instance_namespace = instance_namespace
@@ -2106,7 +2106,7 @@ class _DMSRulesConverter:
2106
2106
  )
2107
2107
 
2108
2108
 
2109
- class _SubsetEditableCDMRules(VerifiedDataModelTransformer[PhysicalDataModel, PhysicalDataModel]):
2109
+ class _SubsetEditableCDMPhysicalDataModel(VerifiedDataModelTransformer[PhysicalDataModel, PhysicalDataModel]):
2110
2110
  """Subsets editable CDM data model to only include desired set of CDM concepts.
2111
2111
 
2112
2112
  !!! note "Platypus UI limitations"
@@ -35,7 +35,7 @@ class VerificationTransformer(DataModelTransformer[T_ImportedUnverifiedDataModel
35
35
  def transform(self, data_model: T_ImportedUnverifiedDataModel) -> T_VerifiedDataModel:
36
36
  in_ = data_model.unverified_data_model
37
37
  if in_ is None:
38
- raise NeatValueError("Cannot verify rules. The reading of the rules failed.")
38
+ raise NeatValueError("Cannot verify data model. The reading of the data model failed.")
39
39
  verified_data_model: T_VerifiedDataModel | None = None
40
40
  # We need to catch issues as we use the error args to provide extra context for the errors/warnings
41
41
  # For example, which row in the spreadsheet the error occurred.
@@ -39,7 +39,7 @@ class KnowledgeGraphExtractor(BaseExtractor):
39
39
 
40
40
  @abstractmethod
41
41
  def get_conceptual_data_model(self) -> ConceptualDataModel:
42
- """Returns the information rules that the extractor uses."""
42
+ """Returns the conceptual data model that the extractor uses."""
43
43
  raise NotImplementedError()
44
44
 
45
45
  @property
@@ -217,7 +217,7 @@ class ClassicGraphExtractor(KnowledgeGraphExtractor):
217
217
  ExcelImporter(classic_model).to_data_model(),
218
218
  )
219
219
  if unverified.unverified_data_model is None:
220
- raise NeatValueError(f"Could not read the classic model rules from {classic_model}.")
220
+ raise NeatValueError(f"Could not read the classic data model from {classic_model}.")
221
221
 
222
222
  verified = unverified.unverified_data_model.as_verified_data_model()
223
223
  prefixes = get_default_prefixes_and_namespaces()
@@ -29,8 +29,8 @@ class MockGraphGenerator(BaseExtractor):
29
29
  Class used to generate mock graph data for purposes of testing of NEAT.
30
30
 
31
31
  Args:
32
- rules: Transformation rules defining the classes with their properties.
33
- class_count: Target class count for each class in the ontology
32
+ data_model: Data model defining the concepts with their properties.
33
+ concept_count: Target concept count for each concept/class in the data model
34
34
  stop_on_exception: To stop if exception is encountered or not, default is False
35
35
  allow_isolated_classes: To allow generation of instances for classes that are not
36
36
  connected to any other class, default is True
@@ -47,37 +47,38 @@ class MockGraphGenerator(BaseExtractor):
47
47
  # fixes potential issues with circular dependencies
48
48
  from cognite.neat.core._data_model.transformers import PhysicalToConceptual
49
49
 
50
- self.rules = PhysicalToConceptual().transform(data_model)
50
+ self.data_model = PhysicalToConceptual().transform(data_model)
51
51
  elif isinstance(data_model, ConceptualDataModel):
52
- self.rules = data_model
52
+ self.data_model = data_model
53
53
  else:
54
- raise ValueError("Rules must be of type InformationRules or DMSRules!")
54
+ raise ValueError("Data model must be of type Conceptual or Physical!")
55
55
 
56
56
  if not concept_count:
57
57
  self.concept_count = {
58
- concept: 1 for concept in DataModelAnalysis(self.rules).defined_concepts(include_ancestors=True)
58
+ concept: 1 for concept in DataModelAnalysis(self.data_model).defined_concepts(include_ancestors=True)
59
59
  }
60
60
  elif all(isinstance(key, str) for key in concept_count.keys()):
61
61
  self.concept_count = {
62
- ConceptEntity.load(f"{self.rules.metadata.prefix}:{key}"): value for key, value in concept_count.items()
62
+ ConceptEntity.load(f"{self.data_model.metadata.prefix}:{key}"): value
63
+ for key, value in concept_count.items()
63
64
  }
64
65
  elif all(isinstance(key, ConceptEntity) for key in concept_count.keys()):
65
66
  self.concept_count = cast(dict[ConceptEntity, int], concept_count)
66
67
  else:
67
- raise ValueError("Class count keys must be of type str! or ClassEntity! or empty dict!")
68
+ raise ValueError("Class count keys must be of type str! or ConceptEntity! or empty dict!")
68
69
 
69
70
  self.stop_on_exception = stop_on_exception
70
71
  self.allow_isolated_classes = allow_isolated_classes
71
72
 
72
73
  def extract(self) -> list[Triple]:
73
- """Generate mock triples based on data model defined transformation rules and desired number
74
- of class instances
74
+ """Generate mock triples based on data model and desired number
75
+ of concept instances
75
76
 
76
77
  Returns:
77
78
  List of RDF triples, represented as tuples `(subject, predicate, object)`, that define data model instances
78
79
  """
79
80
  return generate_triples(
80
- self.rules,
81
+ self.data_model,
81
82
  self.concept_count,
82
83
  stop_on_exception=self.stop_on_exception,
83
84
  allow_isolated_concepts=self.allow_isolated_classes,
@@ -94,8 +95,8 @@ def generate_triples(
94
95
  of class instances
95
96
 
96
97
  Args:
97
- rules : Rules defining the data model
98
- concept_count: Target class count for each class in the ontology
98
+ data_model : Data model
99
+ concept_count: Target concept count for each class in the data model
99
100
  stop_on_exception: To stop if exception is encountered or not, default is False
100
101
  allow_isolated_concepts: To allow generation of instances for classes that are not
101
102
  connected to any other class, default is True
@@ -29,8 +29,8 @@ class BaseLoader(ABC, Generic[T_Output]):
29
29
  _new_line = "\n"
30
30
  _encoding = "utf-8"
31
31
 
32
- def __init__(self, graph_store: NeatInstanceStore):
33
- self.graph_store = graph_store
32
+ def __init__(self, instance_store: NeatInstanceStore):
33
+ self.instance_store = instance_store
34
34
 
35
35
  @abstractmethod
36
36
  def write_to_file(self, filepath: Path) -> None:
@@ -96,13 +96,13 @@ class _Projection:
96
96
 
97
97
 
98
98
  class DMSLoader(CDFLoader[dm.InstanceApply]):
99
- """Loads Instances to Cognite Data Fusion Data Model Service from NeatGraph.
99
+ """Loads Instances to Cognite Data Fusion Data Model Service from NeatInstanceStore.
100
100
 
101
101
  Args:
102
- dms_rules (DMSRules): The DMS rules used by the data model.
103
- info_rules (InformationRules): The information rules used by the data model, used to
104
- look+up the instances in the store.
105
- graph_store (NeatGraphStore): The graph store to load the data from.
102
+ physical_data_model (PhysicalDataModel): Physical data model.
103
+ conceptual_data_model (ConceptualDataModel): Conceptual data model,
104
+ used to look+up the instances in the store.
105
+ instance_store (NeatInstanceStore): The instance store to load the instances from.
106
106
  instance_space (str): The instance space to load the data into.
107
107
  create_issues (Sequence[NeatIssue] | None): A list of issues that occurred during reading. Defaults to None.
108
108
  client (NeatClient | None): This is used to lookup containers such that the loader
@@ -115,9 +115,9 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
115
115
 
116
116
  def __init__(
117
117
  self,
118
- dms_rules: PhysicalDataModel,
119
- info_rules: ConceptualDataModel,
120
- graph_store: NeatInstanceStore,
118
+ physical_data_model: PhysicalDataModel,
119
+ conceptual_data_model: ConceptualDataModel,
120
+ instance_store: NeatInstanceStore,
121
121
  instance_space: str,
122
122
  space_property: str | None = None,
123
123
  use_source_space: bool = False,
@@ -127,9 +127,9 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
127
127
  neat_prefix_by_predicate_uri: dict[URIRef, str] | None = None,
128
128
  neat_prefix_by_type_uri: dict[URIRef, str] | None = None,
129
129
  ):
130
- super().__init__(graph_store)
131
- self.dms_rules = dms_rules
132
- self.info_rules = info_rules
130
+ super().__init__(instance_store)
131
+ self.physical_data_model = physical_data_model
132
+ self.conceptual_data_model = conceptual_data_model
133
133
  self.neat_prefix_by_predicate_uri = neat_prefix_by_predicate_uri or {}
134
134
  self.neat_prefix_by_type_uri = neat_prefix_by_type_uri or {}
135
135
  self._instance_space = instance_space
@@ -192,7 +192,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
192
192
  projection, issues = self._create_projection(view)
193
193
  yield from issues
194
194
  query = it.query
195
- reader = self.graph_store.read(
195
+ reader = self.instance_store.read(
196
196
  query.rdf_type,
197
197
  property_renaming_config=query.property_renaming_config,
198
198
  remove_uri_namespace=False,
@@ -209,7 +209,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
209
209
  yield _END_OF_CLASS
210
210
 
211
211
  def _create_view_iterations(self) -> tuple[list[_ViewIterator], IssueList]:
212
- view_query_by_id = DataModelAnalysis(self.info_rules, self.dms_rules).view_query_by_id
212
+ view_query_by_id = DataModelAnalysis(self.conceptual_data_model, self.physical_data_model).view_query_by_id
213
213
  iterations_by_view_id = self._select_views_with_instances(view_query_by_id)
214
214
  if self._client:
215
215
  issues = IssueList()
@@ -224,7 +224,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
224
224
  else:
225
225
  views = dm.ViewList([])
226
226
  with catch_issues() as issues:
227
- read_model = self.dms_rules.as_schema().as_read_model()
227
+ read_model = self.physical_data_model.as_schema().as_read_model()
228
228
  views.extend(read_model.views)
229
229
  if issues.has_errors:
230
230
  return [], issues
@@ -274,7 +274,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
274
274
  """Selects the views with data."""
275
275
  view_iterations: dict[dm.ViewId, _ViewIterator] = {}
276
276
  for view_id, query in view_query_by_id.items():
277
- count = self.graph_store.queries.select.count_of_type(query.rdf_type)
277
+ count = self.instance_store.queries.select.count_of_type(query.rdf_type)
278
278
  if count > 0:
279
279
  view_iterations[view_id] = _ViewIterator(view_id, count, query)
280
280
  return view_iterations
@@ -284,7 +284,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
284
284
  if self._space_property is None:
285
285
  return issues
286
286
  total = sum(it.instance_count for it in view_iterations)
287
- properties_by_uriref = self.graph_store.queries.select.properties()
287
+ properties_by_uriref = self.instance_store.queries.select.properties()
288
288
  space_property_uri = next((k for k, v in properties_by_uriref.items() if v == self._space_property), None)
289
289
  if space_property_uri is None:
290
290
  error: ResourceNotFoundError[str, str] = ResourceNotFoundError(
@@ -297,7 +297,7 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
297
297
  issues.append(error)
298
298
  return issues
299
299
 
300
- instance_iterable = self.graph_store.queries.select.list_instances_ids_by_space(space_property_uri)
300
+ instance_iterable = self.instance_store.queries.select.list_instances_ids_by_space(space_property_uri)
301
301
  instance_iterable = iterate_progress_bar_if_above_config_threshold(
302
302
  instance_iterable, total, f"Looking up spaces for {total} instances..."
303
303
  )
@@ -323,8 +323,8 @@ class DMSLoader(CDFLoader[dm.InstanceApply]):
323
323
  if not self.neat_prefix_by_type_uri:
324
324
  return
325
325
 
326
- count = sum(count for _, count in self.graph_store.queries.select.summarize_instances())
327
- instance_iterable = self.graph_store.queries.select.list_instances_ids()
326
+ count = sum(count for _, count in self.instance_store.queries.select.summarize_instances())
327
+ instance_iterable = self.instance_store.queries.select.list_instances_ids()
328
328
  instance_iterable = iterate_progress_bar_if_above_config_threshold(
329
329
  instance_iterable, count, f"Looking up identifiers for {count} instances..."
330
330
  )
@@ -10,7 +10,7 @@ from ._base import BaseTransformerStandardised, RowTransformationOutput
10
10
 
11
11
 
12
12
  class MakeConnectionOnExactMatch(BaseTransformerStandardised):
13
- description: str = "Adds property that contains id of reference to all references of given class in Rules"
13
+ description: str = "Adds property that contains id of reference to all references of given class in model"
14
14
  _use_only_once: bool = False
15
15
  _need_changes = frozenset({})
16
16
 
@@ -37,7 +37,7 @@ def _from_pydantic_error(error: ErrorDetails, read_info_by_sheet: dict[str, Spre
37
37
  if location:
38
38
  return SpreadsheetError.create(location, neat_error, read_info_by_sheet.get(cast(str, location[0])))
39
39
 
40
- # errors that occur while for example parsing spreadsheet in input rules
40
+ # errors that occur while for example parsing spreadsheet in imported data model
41
41
  # will not have location information so we return neat_error as is
42
42
  # this is workaround until more elegant solution is found
43
43
  return neat_error
@@ -72,7 +72,7 @@ class ResourceNotDefinedError(ResourceError[T_Identifier]):
72
72
  class ResourceConversionError(ResourceError, ValueError):
73
73
  """Failed to convert the {resource_type} {identifier} to {target_format}: {reason}"""
74
74
 
75
- fix = "Check the error message and correct the rules."
75
+ fix = "Check the error message and correct the data model."
76
76
  target_format: str
77
77
  reason: str
78
78
 
@@ -48,7 +48,7 @@ class PropertyDefinitionDuplicatedWarning(PropertyWarning[T_Identifier]):
48
48
  @dataclass(unsafe_hash=True)
49
49
  class PropertyValueTypeUndefinedWarning(PropertyWarning[T_Identifier]):
50
50
  """The {resource_type} with identifier {identifier} has a property {property_name}
51
- which has undefined value type. This may result in unexpected behavior when exporting rules.
51
+ which has undefined value type. This may result in unexpected behavior when exporting data model.
52
52
  {default_action}"""
53
53
 
54
54
  extra = "Recommended action: {recommended_action}"
@@ -162,20 +162,22 @@ class NeatSession:
162
162
  reserved_properties: What to do with reserved properties. Can be "error" or "warning".
163
163
 
164
164
  Example:
165
- Convert to DMS rules
165
+ Convert to Physical Data Model
166
166
  ```python
167
167
  neat.convert()
168
168
  ```
169
169
  """
170
170
  self._state._raise_exception_if_condition_not_met(
171
- "Convert to physical", has_dms_rules=False, has_information_rules=True
171
+ "Convert to physical",
172
+ has_physical_data_model=False,
173
+ has_conceptual_data_model=True,
172
174
  )
173
175
  converter = ConceptualToPhysical(reserved_properties=reserved_properties, client=self._state.client)
174
176
 
175
177
  issues = self._state.rule_transform(converter)
176
178
 
177
179
  if self._verbose and not issues.has_errors:
178
- print("Rules converted to dms.")
180
+ print("Conceptual data model converted to physical data model.")
179
181
  else:
180
182
  print("Conversion failed.")
181
183
  if issues:
@@ -232,47 +234,49 @@ class NeatSession:
232
234
  ) -> IssueList:
233
235
  """Infer data model from instances."""
234
236
  last_entity: DataModelEntity | None = None
235
- if self._state.rule_store.provenance:
236
- last_entity = self._state.rule_store.provenance[-1].target_entity
237
+ if self._state.data_model_store.provenance:
238
+ last_entity = self._state.data_model_store.provenance[-1].target_entity
237
239
 
238
- # Note that this importer behaves as a transformer in the rule store when there is an existing rules.
239
- # We are essentially transforming the last entity's information rules into a new set of information rules.
240
+ # Note that this importer behaves as a transformer in the data model store when there
241
+ # is an existing data model.
242
+ # We are essentially transforming the last entity's conceptual data model
243
+ # into a new conceptual data model.
240
244
  importer = importers.SubclassInferenceImporter(
241
245
  issue_list=IssueList(),
242
246
  graph=self._state.instances.store.graph(),
243
- rules=last_entity.conceptual if last_entity is not None else None,
247
+ data_model=last_entity.conceptual if last_entity is not None else None,
244
248
  data_model_id=(dm.DataModelId.load(model_id) if last_entity is None else None),
245
249
  )
246
250
 
247
251
  def action() -> tuple[ConceptualDataModel, PhysicalDataModel | None]:
248
- unverified_information = importer.to_data_model()
249
- unverified_information = ToDMSCompliantEntities(rename_warning="raise").transform(unverified_information)
252
+ unverified_conceptual = importer.to_data_model()
253
+ unverified_conceptual = ToDMSCompliantEntities(rename_warning="raise").transform(unverified_conceptual)
250
254
 
251
- extra_info = VerifyConceptualDataModel().transform(unverified_information)
255
+ extra_conceptual = VerifyConceptualDataModel().transform(unverified_conceptual)
252
256
  if not last_entity:
253
- return extra_info, None
254
- merged_info = MergeConceptualDataModels(extra_info).transform(last_entity.conceptual)
257
+ return extra_conceptual, None
258
+ merged_conceptual = MergeConceptualDataModels(extra_conceptual).transform(last_entity.conceptual)
255
259
  if not last_entity.physical:
256
- return merged_info, None
260
+ return merged_conceptual, None
257
261
 
258
- extra_dms = ConceptualToPhysical(reserved_properties="warning", client=self._state.client).transform(
259
- extra_info
262
+ extra_physical = ConceptualToPhysical(reserved_properties="warning", client=self._state.client).transform(
263
+ extra_conceptual
260
264
  )
261
265
 
262
- merged_dms = MergePhysicalDataModels(extra_dms).transform(last_entity.physical)
263
- return merged_info, merged_dms
266
+ merged_physical = MergePhysicalDataModels(extra_physical).transform(last_entity.physical)
267
+ return merged_conceptual, merged_physical
264
268
 
265
- return self._state.rule_store.do_activity(action, importer)
269
+ return self._state.data_model_store.do_activity(action, importer)
266
270
 
267
271
  def _repr_html_(self) -> str:
268
272
  state = self._state
269
- if state.instances.empty and state.rule_store.empty:
273
+ if state.instances.empty and state.data_model_store.empty:
270
274
  return "<strong>Empty session</strong>. Get started by reading something with the <em>.read</em> attribute."
271
275
 
272
276
  output = []
273
277
 
274
- if state.rule_store.provenance:
275
- last_entity = state.rule_store.provenance[-1].target_entity
278
+ if state.data_model_store.provenance:
279
+ last_entity = state.data_model_store.provenance[-1].target_entity
276
280
  if last_entity.physical:
277
281
  html = last_entity.physical._repr_html_()
278
282
  else:
@@ -78,7 +78,7 @@ class DropDataModelAPI:
78
78
  """
79
79
  if sum([view_external_id is not None, group is not None]) != 1:
80
80
  raise NeatSessionError("Only one of view_external_id or group can be specified.")
81
- last_dms = self._state.rule_store.last_verified_physical_data_model
81
+ last_dms = self._state.data_model_store.last_verified_physical_data_model
82
82
  if group is not None and last_dms.metadata.as_data_model_id() not in COGNITE_MODELS:
83
83
  raise NeatSessionError("Group can only be specified for CogniteCore models.")
84
84
  if view_external_id is not None:
@@ -98,6 +98,6 @@ class DropDataModelAPI:
98
98
  )
99
99
  before = len(last_dms.views)
100
100
  issues = self._state.rule_transform(DropModelViews(view_external_id, group))
101
- after = len(self._state.rule_store.last_verified_physical_data_model.views)
101
+ after = len(self._state.data_model_store.last_verified_physical_data_model.views)
102
102
  print(f"Dropped {before - after} views.")
103
103
  return issues
@@ -10,7 +10,7 @@ class ExperimentalFeatureWarning(UserWarning):
10
10
 
11
11
 
12
12
  class ExperimentalFlags:
13
- manual_rules_edit = ExperimentalFeatureWarning("enable_manual_edit")
13
+ manual_data_model_edit = ExperimentalFeatureWarning("enable_manual_edit")
14
14
  same_space_properties_only_export = ExperimentalFeatureWarning("same-space-properties-only")
15
15
  standardize_naming = ExperimentalFeatureWarning("standardize_naming")
16
16
  standardize_space_and_version = ExperimentalFeatureWarning("standardize_space_and_version")
@@ -62,9 +62,9 @@ class InspectAPI:
62
62
  neat.inspect.properties
63
63
  ```
64
64
  """
65
- if self._state.rule_store.empty:
65
+ if self._state.data_model_store.empty:
66
66
  return pd.DataFrame()
67
- last_entity = self._state.rule_store.provenance[-1].target_entity
67
+ last_entity = self._state.data_model_store.provenance[-1].target_entity
68
68
  if last_entity.physical:
69
69
  df = last_entity.physical.properties.to_pandas()
70
70
  else:
@@ -73,9 +73,9 @@ class InspectAPI:
73
73
  return df
74
74
 
75
75
  def views(self) -> pd.DataFrame:
76
- if self._state.rule_store.empty:
76
+ if self._state.data_model_store.empty:
77
77
  return pd.DataFrame()
78
- last_entity = self._state.rule_store.provenance[-1].target_entity
78
+ last_entity = self._state.data_model_store.provenance[-1].target_entity
79
79
  if last_entity.physical is None:
80
80
  return pd.DataFrame()
81
81
  df = last_entity.physical.views.to_pandas()
@@ -95,7 +95,7 @@ class InspectIssues:
95
95
  def __call__(
96
96
  self,
97
97
  search: str | None = None,
98
- include: Literal["all", "errors", "warning"] | Set[Literal["all", "errors", "warning"]] = "all",
98
+ include: (Literal["all", "errors", "warning"] | Set[Literal["all", "errors", "warning"]]) = "all",
99
99
  return_dataframe: Literal[True] = (False if IN_NOTEBOOK else True), # type: ignore[assignment]
100
100
  ) -> pd.DataFrame: ...
101
101
 
@@ -103,7 +103,7 @@ class InspectIssues:
103
103
  def __call__(
104
104
  self,
105
105
  search: str | None = None,
106
- include: Literal["all", "errors", "warning"] | Set[Literal["all", "errors", "warning"]] = "all",
106
+ include: (Literal["all", "errors", "warning"] | Set[Literal["all", "errors", "warning"]]) = "all",
107
107
  return_dataframe: Literal[False] = (False if IN_NOTEBOOK else True), # type: ignore[assignment]
108
108
  ) -> None: ...
109
109
 
@@ -114,7 +114,7 @@ class InspectIssues:
114
114
  return_dataframe: bool = (False if IN_NOTEBOOK else True), # type: ignore[assignment]
115
115
  ) -> pd.DataFrame | None:
116
116
  """Returns the issues of the current data model."""
117
- issues = self._state.rule_store.last_issues
117
+ issues = self._state.data_model_store.last_issues
118
118
  if issues is None and self._state.instances.store.provenance:
119
119
  last_change = self._state.instances.store.provenance[-1]
120
120
  issues = last_change.target_entity.issues
@@ -180,7 +180,7 @@ class InspectOutcome:
180
180
  """
181
181
 
182
182
  def __init__(self, state: SessionState) -> None:
183
- self.data_model = InspectUploadOutcome(lambda: state.rule_store.last_outcome)
183
+ self.data_model = InspectUploadOutcome(lambda: state.data_model_store.last_outcome)
184
184
  self.instances = InspectUploadOutcome(lambda: state.instances.last_outcome)
185
185
 
186
186
 
@@ -42,12 +42,12 @@ class DataModelMappingAPI:
42
42
  neat.mapping.classic_to_core(company_prefix="WindFarmX", use_parent_property_name=True)
43
43
  ```
44
44
  """
45
- if self._state.rule_store.empty:
46
- raise NeatSessionError("No rules to map")
47
- last_entity = self._state.rule_store.provenance[-1].target_entity
45
+ if self._state.data_model_store.empty:
46
+ raise NeatSessionError("No data model to map")
47
+ last_entity = self._state.data_model_store.provenance[-1].target_entity
48
48
  if last_entity.physical is None:
49
49
  raise NeatSessionError("Data model not converted to DMS. Try running `neat.convert('dms')` first.")
50
- rules = last_entity.physical
50
+ data_model = last_entity.physical
51
51
  if self._state.client is None:
52
52
  raise NeatSessionError("Client is required to map classic to core")
53
53
 
@@ -57,7 +57,11 @@ class DataModelMappingAPI:
57
57
  transformers.extend(
58
58
  [
59
59
  PhysicalDataModelMapper(
60
- load_classic_to_core_mapping(company_prefix, rules.metadata.space, rules.metadata.version)
60
+ load_classic_to_core_mapping(
61
+ company_prefix,
62
+ data_model.metadata.space,
63
+ data_model.metadata.version,
64
+ )
61
65
  ),
62
66
  IncludeReferenced(self._state.client),
63
67
  ]