cognite-neat 0.121.1__py3-none-any.whl → 0.121.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (97) hide show
  1. cognite/neat/_version.py +1 -1
  2. cognite/neat/core/_client/_api/statistics.py +91 -0
  3. cognite/neat/core/_client/_api_client.py +2 -0
  4. cognite/neat/core/_client/data_classes/statistics.py +125 -0
  5. cognite/neat/core/_client/testing.py +4 -0
  6. cognite/neat/core/_constants.py +6 -7
  7. cognite/neat/core/_data_model/_constants.py +23 -16
  8. cognite/neat/core/_data_model/_shared.py +33 -17
  9. cognite/neat/core/_data_model/analysis/__init__.py +2 -2
  10. cognite/neat/core/_data_model/analysis/_base.py +186 -183
  11. cognite/neat/core/_data_model/catalog/__init__.py +1 -1
  12. cognite/neat/core/_data_model/exporters/__init__.py +5 -5
  13. cognite/neat/core/_data_model/exporters/_base.py +10 -8
  14. cognite/neat/core/_data_model/exporters/{_rules2dms.py → _data_model2dms.py} +22 -18
  15. cognite/neat/core/_data_model/exporters/{_rules2excel.py → _data_model2excel.py} +51 -51
  16. cognite/neat/core/_data_model/exporters/{_rules2instance_template.py → _data_model2instance_template.py} +4 -4
  17. cognite/neat/core/_data_model/exporters/{_rules2ontology.py → _data_model2ontology.py} +50 -50
  18. cognite/neat/core/_data_model/exporters/{_rules2yaml.py → _data_model2yaml.py} +21 -18
  19. cognite/neat/core/_data_model/importers/__init__.py +6 -6
  20. cognite/neat/core/_data_model/importers/_base.py +8 -6
  21. cognite/neat/core/_data_model/importers/_base_file_reader.py +56 -0
  22. cognite/neat/core/_data_model/importers/{_yaml2rules.py → _dict2data_model.py} +40 -20
  23. cognite/neat/core/_data_model/importers/{_dms2rules.py → _dms2data_model.py} +58 -49
  24. cognite/neat/core/_data_model/importers/{_dtdl2rules → _dtdl2data_model}/dtdl_converter.py +22 -22
  25. cognite/neat/core/_data_model/importers/{_dtdl2rules → _dtdl2data_model}/dtdl_importer.py +7 -7
  26. cognite/neat/core/_data_model/importers/{_dtdl2rules → _dtdl2data_model}/spec.py +3 -3
  27. cognite/neat/core/_data_model/importers/_rdf/_base.py +9 -9
  28. cognite/neat/core/_data_model/importers/_rdf/_imf2rules.py +15 -15
  29. cognite/neat/core/_data_model/importers/_rdf/_inference2rules.py +36 -36
  30. cognite/neat/core/_data_model/importers/_rdf/_owl2rules.py +12 -12
  31. cognite/neat/core/_data_model/importers/_rdf/_shared.py +25 -25
  32. cognite/neat/core/_data_model/importers/{_spreadsheet2rules.py → _spreadsheet2data_model.py} +72 -12
  33. cognite/neat/core/_data_model/models/__init__.py +8 -8
  34. cognite/neat/core/_data_model/models/_base_unverified.py +1 -1
  35. cognite/neat/core/_data_model/models/_base_verified.py +3 -3
  36. cognite/neat/core/_data_model/models/_types.py +6 -6
  37. cognite/neat/core/_data_model/models/conceptual/__init__.py +6 -6
  38. cognite/neat/core/_data_model/models/conceptual/_unverified.py +20 -20
  39. cognite/neat/core/_data_model/models/conceptual/_validation.py +87 -77
  40. cognite/neat/core/_data_model/models/conceptual/_verified.py +53 -51
  41. cognite/neat/core/_data_model/models/data_types.py +2 -2
  42. cognite/neat/core/_data_model/models/entities/__init__.py +8 -8
  43. cognite/neat/core/_data_model/models/entities/_loaders.py +11 -10
  44. cognite/neat/core/_data_model/models/entities/_multi_value.py +5 -5
  45. cognite/neat/core/_data_model/models/entities/_single_value.py +44 -38
  46. cognite/neat/core/_data_model/models/entities/_types.py +9 -3
  47. cognite/neat/core/_data_model/models/entities/_wrapped.py +3 -3
  48. cognite/neat/core/_data_model/models/mapping/_classic2core.py +12 -9
  49. cognite/neat/core/_data_model/models/physical/__init__.py +40 -0
  50. cognite/neat/core/_data_model/models/{dms → physical}/_exporter.py +71 -52
  51. cognite/neat/core/_data_model/models/{dms/_rules_input.py → physical/_unverified.py} +48 -39
  52. cognite/neat/core/_data_model/models/{dms → physical}/_validation.py +13 -11
  53. cognite/neat/core/_data_model/models/{dms/_rules.py → physical/_verified.py} +68 -60
  54. cognite/neat/core/_data_model/transformers/__init__.py +27 -23
  55. cognite/neat/core/_data_model/transformers/_base.py +26 -19
  56. cognite/neat/core/_data_model/transformers/_converters.py +703 -618
  57. cognite/neat/core/_data_model/transformers/_mapping.py +74 -55
  58. cognite/neat/core/_data_model/transformers/_verification.py +63 -54
  59. cognite/neat/core/_instances/extractors/_base.py +1 -1
  60. cognite/neat/core/_instances/extractors/_classic_cdf/_classic.py +8 -8
  61. cognite/neat/core/_instances/extractors/_dms_graph.py +42 -34
  62. cognite/neat/core/_instances/extractors/_mock_graph_generator.py +98 -95
  63. cognite/neat/core/_instances/loaders/_base.py +2 -2
  64. cognite/neat/core/_instances/loaders/_rdf2dms.py +6 -6
  65. cognite/neat/core/_instances/transformers/_base.py +7 -4
  66. cognite/neat/core/_instances/transformers/_value_type.py +2 -6
  67. cognite/neat/core/_issues/_base.py +4 -4
  68. cognite/neat/core/_issues/errors/__init__.py +2 -2
  69. cognite/neat/core/_issues/errors/_wrapper.py +2 -2
  70. cognite/neat/core/_issues/warnings/_models.py +4 -4
  71. cognite/neat/core/_store/__init__.py +3 -3
  72. cognite/neat/core/_store/{_rules_store.py → _data_model.py} +119 -112
  73. cognite/neat/core/_store/{_graph_store.py → _instance.py} +3 -4
  74. cognite/neat/core/_store/_provenance.py +2 -2
  75. cognite/neat/core/_store/exceptions.py +2 -2
  76. cognite/neat/core/_utils/rdf_.py +14 -0
  77. cognite/neat/core/_utils/text.py +1 -1
  78. cognite/neat/session/_base.py +22 -20
  79. cognite/neat/session/_drop.py +2 -2
  80. cognite/neat/session/_inspect.py +5 -5
  81. cognite/neat/session/_mapping.py +8 -6
  82. cognite/neat/session/_read.py +2 -2
  83. cognite/neat/session/_set.py +3 -3
  84. cognite/neat/session/_show.py +11 -11
  85. cognite/neat/session/_state.py +13 -13
  86. cognite/neat/session/_subset.py +12 -9
  87. cognite/neat/session/_template.py +13 -13
  88. cognite/neat/session/_to.py +17 -17
  89. {cognite_neat-0.121.1.dist-info → cognite_neat-0.121.2.dist-info}/METADATA +1 -1
  90. {cognite_neat-0.121.1.dist-info → cognite_neat-0.121.2.dist-info}/RECORD +95 -93
  91. cognite/neat/core/_data_model/exporters/_validation.py +0 -14
  92. cognite/neat/core/_data_model/models/dms/__init__.py +0 -32
  93. /cognite/neat/core/_data_model/catalog/{info-rules-imf.xlsx → conceptual-imf-data-model.xlsx} +0 -0
  94. /cognite/neat/core/_data_model/importers/{_dtdl2rules → _dtdl2data_model}/__init__.py +0 -0
  95. /cognite/neat/core/_data_model/importers/{_dtdl2rules → _dtdl2data_model}/_unit_lookup.py +0 -0
  96. {cognite_neat-0.121.1.dist-info → cognite_neat-0.121.2.dist-info}/WHEEL +0 -0
  97. {cognite_neat-0.121.1.dist-info → cognite_neat-0.121.2.dist-info}/licenses/LICENSE +0 -0
@@ -12,19 +12,19 @@ from rdflib import Literal as RdfLiteral
12
12
 
13
13
  from cognite.neat.core._config import GLOBAL_CONFIG
14
14
  from cognite.neat.core._constants import NEAT, get_default_prefixes_and_namespaces
15
- from cognite.neat.core._data_model.analysis import RulesAnalysis
15
+ from cognite.neat.core._data_model.analysis import DataModelAnalysis
16
16
  from cognite.neat.core._data_model.models import ConceptualDataModel, data_types
17
17
  from cognite.neat.core._data_model.models.conceptual import (
18
- ConceptualClass,
18
+ Concept,
19
19
  ConceptualMetadata,
20
- UnverifiedConceptualClass,
20
+ UnverifiedConcept,
21
21
  UnverifiedConceptualProperty,
22
22
  )
23
23
  from cognite.neat.core._data_model.models.data_types import AnyURI
24
24
  from cognite.neat.core._data_model.models.entities._single_value import UnknownEntity
25
25
  from cognite.neat.core._issues import IssueList
26
26
  from cognite.neat.core._issues.warnings import PropertyValueTypeUndefinedWarning
27
- from cognite.neat.core._store import NeatGraphStore
27
+ from cognite.neat.core._store import NeatInstanceStore
28
28
  from cognite.neat.core._store._provenance import INSTANCES_ENTITY
29
29
  from cognite.neat.core._utils.collection_ import iterate_progress_bar
30
30
  from cognite.neat.core._utils.rdf_ import remove_namespace_from_uri, uri_to_short_form
@@ -83,8 +83,8 @@ class InferenceImporter(BaseRDFImporter):
83
83
  @classmethod
84
84
  def from_graph_store(
85
85
  cls,
86
- store: NeatGraphStore,
87
- data_model_id: dm.DataModelId | tuple[str, str, str] = DEFAULT_INFERENCE_DATA_MODEL_ID,
86
+ store: NeatInstanceStore,
87
+ data_model_id: (dm.DataModelId | tuple[str, str, str]) = DEFAULT_INFERENCE_DATA_MODEL_ID,
88
88
  max_number_of_instance: int = -1,
89
89
  non_existing_node_type: UnknownEntity | AnyURI = DEFAULT_NON_EXISTING_NODE_TYPE,
90
90
  language: str = "en",
@@ -145,7 +145,7 @@ class InferenceImporter(BaseRDFImporter):
145
145
  ) -> "InferenceImporter":
146
146
  raise NotImplementedError("JSON file format is not supported yet.")
147
147
 
148
- def _to_rules_components(
148
+ def _to_data_model_components(
149
149
  self,
150
150
  ) -> dict:
151
151
  """Convert RDF graph to dictionary defining data model and prefixes of the graph
@@ -158,33 +158,33 @@ class InferenceImporter(BaseRDFImporter):
158
158
  Tuple of data model and prefixes of the graph
159
159
  """
160
160
 
161
- classes: dict[str, dict] = {}
161
+ concepts: dict[str, dict] = {}
162
162
  properties: dict[str, dict] = {}
163
163
  prefixes: dict[str, Namespace] = {}
164
164
  count_by_value_type_by_property: dict[str, dict[str, int]] = defaultdict(Counter)
165
165
 
166
166
  # Infers all the classes in the graph
167
- for class_uri, no_instances in self.graph.query(ORDERED_CLASSES_QUERY): # type: ignore[misc]
168
- if (class_id := remove_namespace_from_uri(cast(URIRef, class_uri))) in classes:
167
+ for concept_uri, no_instances in self.graph.query(ORDERED_CLASSES_QUERY): # type: ignore[misc]
168
+ if (concept_id := remove_namespace_from_uri(cast(URIRef, concept_uri))) in concepts:
169
169
  # handles cases when class id is already present in classes
170
- class_id = f"{class_id}_{len(classes) + 1}"
170
+ concept_id = f"{concept_id}_{len(concepts) + 1}"
171
171
 
172
- classes[class_id] = {
173
- "class_": class_id,
174
- "uri": class_uri,
172
+ concepts[concept_id] = {
173
+ "concept": concept_id,
174
+ "uri": concept_uri,
175
175
  "comment": f"Inferred from knowledge graph, where this class has <{no_instances}> instances",
176
176
  }
177
177
 
178
- self._add_uri_namespace_to_prefixes(cast(URIRef, class_uri), prefixes)
178
+ self._add_uri_namespace_to_prefixes(cast(URIRef, concept_uri), prefixes)
179
179
 
180
180
  instances_query = (
181
181
  INSTANCES_OF_CLASS_QUERY if self.max_number_of_instance == -1 else INSTANCES_OF_CLASS_RICHNESS_ORDERED_QUERY
182
182
  )
183
183
 
184
- classes_iterable = iterate_progress_bar(classes.items(), len(classes), "Inferring classes")
184
+ classes_iterable = iterate_progress_bar(concepts.items(), len(concepts), "Inferring classes")
185
185
 
186
186
  # Infers all the properties of the class
187
- for class_id, class_definition in classes_iterable:
187
+ for concept_id, class_definition in classes_iterable:
188
188
  for ( # type: ignore[misc]
189
189
  instance,
190
190
  _,
@@ -216,7 +216,7 @@ class InferenceImporter(BaseRDFImporter):
216
216
 
217
217
  issue = PropertyValueTypeUndefinedWarning(
218
218
  resource_type="Property",
219
- identifier=f"{class_id}:{property_id}",
219
+ identifier=f"{concept_id}:{property_id}",
220
220
  property_name=property_id,
221
221
  default_action="Remove the property from the rules",
222
222
  recommended_action="Make sure that graph is complete",
@@ -225,10 +225,10 @@ class InferenceImporter(BaseRDFImporter):
225
225
  if issue not in self.issue_list:
226
226
  self.issue_list.append(issue)
227
227
 
228
- id_ = f"{class_id}:{property_id}"
228
+ id_ = f"{concept_id}:{property_id}"
229
229
 
230
230
  definition = {
231
- "class_": class_id,
231
+ "concept": concept_id,
232
232
  "property_": property_id,
233
233
  "max_count": cast(RdfLiteral, occurrence).value,
234
234
  "value_type": value_type_id,
@@ -265,7 +265,7 @@ class InferenceImporter(BaseRDFImporter):
265
265
 
266
266
  return {
267
267
  "metadata": self._default_metadata(),
268
- "classes": list(classes.values()),
268
+ "concepts": list(concepts.values()),
269
269
  "properties": list(properties.values()),
270
270
  "prefixes": prefixes,
271
271
  }
@@ -372,7 +372,7 @@ class SubclassInferenceImporter(BaseRDFImporter):
372
372
  super().__init__(issue_list, graph, identifier, -1, non_existing_node_type, language="en")
373
373
  self._rules = rules
374
374
 
375
- def _to_rules_components(
375
+ def _to_data_model_components(
376
376
  self,
377
377
  ) -> dict:
378
378
  if self._rules:
@@ -392,19 +392,19 @@ class SubclassInferenceImporter(BaseRDFImporter):
392
392
  default_space = metadata["space"]
393
393
  return {
394
394
  "metadata": metadata,
395
- "classes": [cls.dump(default_space) for cls in classes],
395
+ "concepts": [cls.dump(default_space) for cls in classes],
396
396
  "properties": [prop.dump(default_space) for prop in properties],
397
397
  "prefixes": prefixes,
398
398
  }
399
399
 
400
400
  def _create_classes_properties(
401
401
  self, read_properties: list[_ReadProperties], prefixes: dict[str, Namespace]
402
- ) -> tuple[list[UnverifiedConceptualClass], list[UnverifiedConceptualProperty]]:
402
+ ) -> tuple[list[UnverifiedConcept], list[UnverifiedConceptualProperty]]:
403
403
  if self._rules:
404
- existing_classes = {class_.class_.suffix: class_ for class_ in self._rules.classes}
404
+ existing_classes = {class_.concept.suffix: class_ for class_ in self._rules.concepts}
405
405
  else:
406
406
  existing_classes = {}
407
- classes: list[UnverifiedConceptualClass] = []
407
+ classes: list[UnverifiedConcept] = []
408
408
  properties_by_class_suffix_by_property_id: dict[str, dict[str, UnverifiedConceptualProperty]] = {}
409
409
 
410
410
  # Help for IDE
@@ -429,9 +429,9 @@ class SubclassInferenceImporter(BaseRDFImporter):
429
429
  parent_suffix = remove_namespace_from_uri(parent_uri)
430
430
  self._add_uri_namespace_to_prefixes(parent_uri, prefixes)
431
431
  if parent_suffix not in existing_classes:
432
- classes.append(UnverifiedConceptualClass(class_=parent_suffix))
432
+ classes.append(UnverifiedConcept(concept=parent_suffix))
433
433
  else:
434
- classes.append(UnverifiedConceptualClass.load(existing_classes[parent_suffix].model_dump()))
434
+ classes.append(UnverifiedConcept.load(existing_classes[parent_suffix].model_dump()))
435
435
  else:
436
436
  shared_property_uris = set()
437
437
  shared_properties: dict[URIRef, list[_ReadProperties]] = defaultdict(list)
@@ -441,14 +441,14 @@ class SubclassInferenceImporter(BaseRDFImporter):
441
441
 
442
442
  if class_suffix not in existing_classes:
443
443
  classes.append(
444
- UnverifiedConceptualClass(
445
- class_=class_suffix,
444
+ UnverifiedConcept(
445
+ concept=class_suffix,
446
446
  implements=parent_suffix,
447
447
  instance_source=type_uri,
448
448
  )
449
449
  )
450
450
  else:
451
- classes.append(UnverifiedConceptualClass.load(existing_classes[class_suffix].model_dump()))
451
+ classes.append(UnverifiedConcept.load(existing_classes[class_suffix].model_dump()))
452
452
 
453
453
  properties_by_id: dict[str, UnverifiedConceptualProperty] = {}
454
454
  for property_uri, read_properties in properties_by_property_uri.items():
@@ -510,20 +510,20 @@ class SubclassInferenceImporter(BaseRDFImporter):
510
510
  type_uri, instance_count_literal = cast(tuple[URIRef, RdfLiteral], result_row)
511
511
  count_by_type[type_uri] = instance_count_literal.toPython()
512
512
  if self._rules:
513
- analysis = RulesAnalysis(self._rules)
513
+ analysis = DataModelAnalysis(self._rules)
514
514
  existing_class_properties = {
515
515
  (class_entity.suffix, prop.property_): prop
516
- for class_entity, properties in analysis.properties_by_class(
516
+ for class_entity, properties in analysis.properties_by_concepts(
517
517
  include_ancestors=True, include_different_space=True
518
518
  ).items()
519
519
  for prop in properties
520
520
  }
521
- existing_classes = {cls_.class_.suffix: cls_ for cls_ in self._rules.classes}
521
+ existing_classes = {cls_.concept.suffix: cls_ for cls_ in self._rules.concepts}
522
522
  else:
523
523
  existing_class_properties = {}
524
524
  existing_classes = {}
525
525
  properties_by_class_by_subclass: list[_ReadProperties] = []
526
- existing_class: ConceptualClass | None
526
+ existing_class: Concept | None
527
527
  total_instance_count = sum(count_by_type.values())
528
528
  iterable = count_by_type.items()
529
529
  if GLOBAL_CONFIG.use_iterate_bar_threshold and total_instance_count > GLOBAL_CONFIG.use_iterate_bar_threshold:
@@ -581,7 +581,7 @@ class SubclassInferenceImporter(BaseRDFImporter):
581
581
  first = read_properties[0]
582
582
  value_type = self._get_value_type(read_properties, prefixes)
583
583
  return UnverifiedConceptualProperty(
584
- class_=class_suffix,
584
+ concept=class_suffix,
585
585
  property_=property_id,
586
586
  max_count=first.max_occurrence,
587
587
  value_type=value_type,
@@ -3,20 +3,20 @@ there are loaders to TransformationRules pydantic class."""
3
3
 
4
4
  from cognite.neat.core._data_model.importers._rdf._base import BaseRDFImporter
5
5
  from cognite.neat.core._data_model.importers._rdf._shared import (
6
- parse_classes,
6
+ parse_concepts,
7
7
  parse_properties,
8
8
  )
9
9
 
10
- CLASSES_QUERY = """SELECT ?class_ ?name ?description ?implements
10
+ CLASSES_QUERY = """SELECT ?concept ?name ?description ?implements
11
11
  WHERE {{
12
12
 
13
- ?class_ a owl:Class .
14
- OPTIONAL {{?class_ rdfs:subClassOf ?implements }}.
15
- OPTIONAL {{?class_ rdfs:label|skos:prefLabel ?name }}.
16
- OPTIONAL {{?class_ rdfs:comment|skos:definition ?description}} .
13
+ ?concept a owl:Class .
14
+ OPTIONAL {{?concept rdfs:subClassOf ?implements }}.
15
+ OPTIONAL {{?concept rdfs:label|skos:prefLabel ?name }}.
16
+ OPTIONAL {{?concept rdfs:comment|skos:definition ?description}} .
17
17
 
18
18
 
19
- FILTER (!isBlank(?class_))
19
+ FILTER (!isBlank(?concept ))
20
20
  FILTER (!bound(?implements) || !isBlank(?implements))
21
21
 
22
22
  FILTER (!bound(?name) || LANG(?name) = "" || LANGMATCHES(LANG(?name), "{language}"))
@@ -27,11 +27,11 @@ CLASSES_QUERY = """SELECT ?class_ ?name ?description ?implements
27
27
 
28
28
  PROPERTIES_QUERY = """
29
29
 
30
- SELECT ?class_ ?property_ ?name ?description ?value_type ?minCount ?maxCount ?default
30
+ SELECT ?concept ?property_ ?name ?description ?value_type ?minCount ?maxCount ?default
31
31
  WHERE {{
32
32
  ?property_ a ?property_Type.
33
33
  FILTER (?property_Type IN (owl:ObjectProperty, owl:DatatypeProperty ) )
34
- OPTIONAL {{?property_ rdfs:domain ?class_ }}.
34
+ OPTIONAL {{?property_ rdfs:domain ?concept }}.
35
35
  OPTIONAL {{?property_ rdfs:range ?value_type }}.
36
36
  OPTIONAL {{?property_ rdfs:label|skos:prefLabel ?name }}.
37
37
  OPTIONAL {{?property_ rdfs:comment|skos:definition ?description}}.
@@ -64,10 +64,10 @@ class OWLImporter(BaseRDFImporter):
64
64
 
65
65
  """
66
66
 
67
- def _to_rules_components(
67
+ def _to_data_model_components(
68
68
  self,
69
69
  ) -> dict:
70
- classes, issue_list = parse_classes(self.graph, CLASSES_QUERY, self.language, self.issue_list)
70
+ concepts, issue_list = parse_concepts(self.graph, CLASSES_QUERY, self.language, self.issue_list)
71
71
  self.issue_list = issue_list
72
72
 
73
73
  # NeatError
@@ -76,7 +76,7 @@ class OWLImporter(BaseRDFImporter):
76
76
 
77
77
  components = {
78
78
  "Metadata": self._metadata,
79
- "Classes": list(classes.values()) if classes else [],
79
+ "Concepts": list(concepts.values()) if concepts else [],
80
80
  "Properties": list(properties.values()) if properties else [],
81
81
  }
82
82
 
@@ -13,18 +13,18 @@ from cognite.neat.core._issues.warnings._resources import (
13
13
  from cognite.neat.core._utils.rdf_ import convert_rdflib_content
14
14
 
15
15
 
16
- def parse_classes(graph: Graph, query: str, language: str, issue_list: IssueList) -> tuple[dict, IssueList]:
17
- """Parse classes from graph
16
+ def parse_concepts(graph: Graph, query: str, language: str, issue_list: IssueList) -> tuple[dict, IssueList]:
17
+ """Parse concepts from graph
18
18
 
19
19
  Args:
20
- graph: Graph containing classes definitions
20
+ graph: Graph containing concept definitions
21
21
  language: Language to use for parsing, by default "en"
22
22
 
23
23
  Returns:
24
24
  Dataframe containing owl classes
25
25
  """
26
26
 
27
- classes: dict[str, dict] = {}
27
+ concepts: dict[str, dict] = {}
28
28
 
29
29
  query = prepareQuery(query.format(language=language), initNs={k: v for k, v in graph.namespaces()})
30
30
  expected_keys = [str(v) for v in query.algebra._vars]
@@ -33,42 +33,42 @@ def parse_classes(graph: Graph, query: str, language: str, issue_list: IssueList
33
33
  res: dict = convert_rdflib_content(cast(ResultRow, raw).asdict(), True)
34
34
  res = {key: res.get(key, None) for key in expected_keys}
35
35
 
36
- class_id = res["class_"]
36
+ concept_id = res["concept"]
37
37
 
38
38
  # Safeguarding against incomplete semantic definitions
39
39
  if res["implements"] and isinstance(res["implements"], BNode):
40
40
  issue_list.append(
41
41
  ResourceRetrievalWarning(
42
- class_id,
42
+ concept_id,
43
43
  "implements",
44
- error=("Unable to determine class that is being implemented"),
44
+ error=("Unable to determine concept that is being implemented"),
45
45
  )
46
46
  )
47
47
  continue
48
48
 
49
- if class_id not in classes:
50
- classes[class_id] = res
49
+ if concept_id not in concepts:
50
+ concepts[concept_id] = res
51
51
  else:
52
52
  # Handling implements
53
- if classes[class_id]["implements"] and isinstance(classes[class_id]["implements"], list):
54
- if res["implements"] not in classes[class_id]["implements"]:
55
- classes[class_id]["implements"].append(res["implements"])
53
+ if concepts[concept_id]["implements"] and isinstance(concepts[concept_id]["implements"], list):
54
+ if res["implements"] not in concepts[concept_id]["implements"]:
55
+ concepts[concept_id]["implements"].append(res["implements"])
56
56
 
57
- elif classes[class_id]["implements"] and isinstance(classes[class_id]["implements"], str):
58
- classes[class_id]["implements"] = [classes[class_id]["implements"]]
57
+ elif concepts[concept_id]["implements"] and isinstance(concepts[concept_id]["implements"], str):
58
+ concepts[concept_id]["implements"] = [concepts[concept_id]["implements"]]
59
59
 
60
- if res["implements"] not in classes[class_id]["implements"]:
61
- classes[class_id]["implements"].append(res["implements"])
60
+ if res["implements"] not in concepts[concept_id]["implements"]:
61
+ concepts[concept_id]["implements"].append(res["implements"])
62
62
  elif res["implements"]:
63
- classes[class_id]["implements"] = [res["implements"]]
63
+ concepts[concept_id]["implements"] = [res["implements"]]
64
64
 
65
- handle_meta("class_", classes, class_id, res, "name", issue_list)
66
- handle_meta("class_", classes, class_id, res, "description", issue_list)
65
+ handle_meta("concept", concepts, concept_id, res, "name", issue_list)
66
+ handle_meta("concept", concepts, concept_id, res, "description", issue_list)
67
67
 
68
- if not classes:
69
- issue_list.append(NeatValueError("Unable to parse classes"))
68
+ if not concepts:
69
+ issue_list.append(NeatValueError("Unable to parse concepts"))
70
70
 
71
- return classes, issue_list
71
+ return concepts, issue_list
72
72
 
73
73
 
74
74
  def parse_properties(graph: Graph, query: str, language: str, issue_list: IssueList) -> tuple[dict, IssueList]:
@@ -94,12 +94,12 @@ def parse_properties(graph: Graph, query: str, language: str, issue_list: IssueL
94
94
  property_id = res["property_"]
95
95
 
96
96
  # Safeguarding against incomplete semantic definitions
97
- if not res["class_"] or isinstance(res["class_"], BNode):
97
+ if not res["concept"] or isinstance(res["concept"], BNode):
98
98
  issue_list.append(
99
99
  ResourceRetrievalWarning(
100
100
  property_id,
101
101
  "property",
102
- error=("Unable to determine to what class property is being defined"),
102
+ error=("Unable to determine to what concept property is being defined"),
103
103
  )
104
104
  )
105
105
  continue
@@ -115,7 +115,7 @@ def parse_properties(graph: Graph, query: str, language: str, issue_list: IssueL
115
115
  )
116
116
  continue
117
117
 
118
- id_ = f"{res['class_']}.{res['property_']}"
118
+ id_ = f"{res['concept']}.{res['property_']}"
119
119
 
120
120
  if id_ not in properties:
121
121
  properties[id_] = res
@@ -1,18 +1,24 @@
1
1
  """This module performs importing of graph to TransformationRules pydantic class.
2
2
  In more details, it traverses the graph and abstracts class and properties, basically
3
- generating a list of rules based on which nodes that form the graph are made.
3
+ generating a list of data_model based on which nodes that form the graph are made.
4
4
  """
5
5
 
6
+ import tempfile
6
7
  from collections import UserDict, defaultdict
7
8
  from dataclasses import dataclass
8
9
  from pathlib import Path
9
10
  from typing import Literal, cast
10
11
 
11
12
  import pandas as pd
13
+ from openpyxl import load_workbook
14
+ from openpyxl.worksheet.worksheet import Worksheet
12
15
  from pandas import ExcelFile
13
16
  from rdflib import Namespace, URIRef
14
17
 
15
- from cognite.neat.core._data_model._shared import ReadRules, T_InputRules
18
+ from cognite.neat.core._data_model._shared import (
19
+ ImportedDataModel,
20
+ T_UnverifiedDataModel,
21
+ )
16
22
  from cognite.neat.core._data_model.models import (
17
23
  INPUT_RULES_BY_ROLE,
18
24
  VERIFIED_RULES_BY_ROLE,
@@ -36,11 +42,11 @@ SOURCE_SHEET__TARGET_FIELD__HEADERS = [
36
42
  "Properties",
37
43
  "Properties",
38
44
  {
39
- RoleTypes.information: ["Class", "Property"],
45
+ RoleTypes.information: ["Concept", "Property"],
40
46
  RoleTypes.dms: ["View", "View Property"],
41
47
  },
42
48
  ),
43
- ("Classes", "Classes", ["Class"]),
49
+ ("Concepts", "Concepts", ["Concept"]),
44
50
  ("Containers", "Containers", ["Container"]),
45
51
  ("Views", "Views", ["View"]),
46
52
  ("Enum", "Enum", ["Collection"]),
@@ -242,8 +248,8 @@ class SpreadsheetReader:
242
248
  return sheets, read_info_by_sheet
243
249
 
244
250
 
245
- class ExcelImporter(BaseImporter[T_InputRules]):
246
- """Import rules from an Excel file.
251
+ class ExcelImporter(BaseImporter[T_UnverifiedDataModel]):
252
+ """Import data_model from an Excel file.
247
253
 
248
254
  Args:
249
255
  filepath (Path): The path to the Excel file.
@@ -252,14 +258,15 @@ class ExcelImporter(BaseImporter[T_InputRules]):
252
258
  def __init__(self, filepath: Path):
253
259
  self.filepath = filepath
254
260
 
255
- def to_rules(self) -> ReadRules[T_InputRules]:
261
+ def to_data_model(self) -> ImportedDataModel[T_UnverifiedDataModel]:
256
262
  issue_list = IssueList(title=f"'{self.filepath.name}'")
257
263
  if not self.filepath.exists():
258
264
  raise FileNotFoundNeatError(self.filepath)
259
265
 
266
+ self.filepath = self._make_forward_compatible_spreadsheet(self.filepath)
267
+
260
268
  with pd.ExcelFile(self.filepath) as excel_file:
261
269
  user_reader = SpreadsheetReader(issue_list)
262
-
263
270
  user_read = user_reader.read(excel_file, self.filepath)
264
271
 
265
272
  issue_list.trigger_warnings()
@@ -267,15 +274,23 @@ class ExcelImporter(BaseImporter[T_InputRules]):
267
274
  raise MultiValueError(issue_list.errors)
268
275
 
269
276
  if user_read is None:
270
- return ReadRules(None, {})
277
+ return ImportedDataModel(None, {})
271
278
 
272
279
  sheets = user_read.sheets
273
280
  original_role = user_read.role
274
281
  read_info_by_sheet = user_read.read_info_by_sheet
275
282
 
276
- rules_cls = INPUT_RULES_BY_ROLE[original_role]
277
- rules = cast(T_InputRules, rules_cls.load(sheets))
278
- return ReadRules(rules, read_info_by_sheet)
283
+ data_model_cls = INPUT_RULES_BY_ROLE[original_role]
284
+ data_model = cast(T_UnverifiedDataModel, data_model_cls.load(sheets))
285
+
286
+ # Delete the temporary file if it was created
287
+ if "temp_neat_file" in self.filepath.name:
288
+ try:
289
+ self.filepath.unlink()
290
+ except Exception as e:
291
+ issue_list.append(FileReadError(self.filepath, f"Failed to delete temporary file: {e}"))
292
+
293
+ return ImportedDataModel(data_model, read_info_by_sheet)
279
294
 
280
295
  @property
281
296
  def description(self) -> str:
@@ -284,3 +299,48 @@ class ExcelImporter(BaseImporter[T_InputRules]):
284
299
  @property
285
300
  def source_uri(self) -> URIRef:
286
301
  return URIRef(f"file://{self.filepath.name}")
302
+
303
+ def _make_forward_compatible_spreadsheet(self, filepath: Path) -> Path:
304
+ """Makes the spreadsheet forward compatible by renaming legacy class with concept
305
+
306
+ Args:
307
+ filepath (Path): The path to the Excel file.
308
+
309
+ """
310
+
311
+ workbook = load_workbook(filepath)
312
+
313
+ if "Classes" in workbook.sheetnames:
314
+ print(
315
+ (
316
+ "You are using a legacy spreadsheet format, "
317
+ "which we will support until v1.0 of neat."
318
+ " Please update your spreadsheet to the new format."
319
+ ),
320
+ )
321
+ _replace_class_with_concept_cell(workbook["Classes"])
322
+ sheet = workbook["Classes"]
323
+ sheet.title = "Concepts"
324
+
325
+ if "Properties" in workbook.sheetnames:
326
+ _replace_class_with_concept_cell(workbook["Properties"])
327
+
328
+ with tempfile.NamedTemporaryFile(prefix="temp_neat_file", suffix=".xlsx", delete=False) as temp_file:
329
+ workbook.save(temp_file.name)
330
+ return Path(temp_file.name)
331
+
332
+ else:
333
+ return filepath
334
+
335
+
336
+ def _replace_class_with_concept_cell(sheet: Worksheet) -> None:
337
+ """
338
+ Replaces the word "Class" with "Concept" in the first row of the given sheet.
339
+
340
+ Args:
341
+ sheet (Worksheet): The sheet in which to replace the word "Class".
342
+ """
343
+ for row in sheet.iter_rows():
344
+ for cell in row:
345
+ if cell.value == "Class":
346
+ cell.value = "Concept"
@@ -7,30 +7,30 @@ from cognite.neat.core._data_model.models.conceptual._verified import (
7
7
  )
8
8
 
9
9
  from ._base_verified import DataModelType, ExtensionCategory, RoleTypes, SchemaCompleteness, SheetList, SheetRow
10
- from .dms._rules import DMSRules
11
- from .dms._rules_input import DMSInputRules
10
+ from .physical._unverified import UnverifiedPhysicalDataModel
11
+ from .physical._verified import PhysicalDataModel
12
12
 
13
- INPUT_RULES_BY_ROLE: dict[RoleTypes, type[UnverifiedConceptualDataModel] | type[DMSInputRules]] = {
13
+ INPUT_RULES_BY_ROLE: dict[RoleTypes, type[UnverifiedConceptualDataModel] | type[UnverifiedPhysicalDataModel]] = {
14
14
  RoleTypes.information: UnverifiedConceptualDataModel,
15
- RoleTypes.dms: DMSInputRules,
15
+ RoleTypes.dms: UnverifiedPhysicalDataModel,
16
16
  }
17
- VERIFIED_RULES_BY_ROLE: dict[RoleTypes, type[ConceptualDataModel] | type[DMSRules]] = {
17
+ VERIFIED_RULES_BY_ROLE: dict[RoleTypes, type[ConceptualDataModel] | type[PhysicalDataModel]] = {
18
18
  RoleTypes.information: ConceptualDataModel,
19
- RoleTypes.dms: DMSRules,
19
+ RoleTypes.dms: PhysicalDataModel,
20
20
  }
21
21
 
22
22
 
23
23
  __all__ = [
24
24
  "INPUT_RULES_BY_ROLE",
25
25
  "ConceptualDataModel",
26
- "DMSInputRules",
27
- "DMSRules",
28
26
  "DMSSchema",
29
27
  "DataModelType",
30
28
  "ExtensionCategory",
29
+ "PhysicalDataModel",
31
30
  "RoleTypes",
32
31
  "SchemaCompleteness",
33
32
  "SheetList",
34
33
  "SheetRow",
35
34
  "UnverifiedConceptualDataModel",
35
+ "UnverifiedPhysicalDataModel",
36
36
  ]
@@ -110,7 +110,7 @@ class UnverifiedDataModel(Generic[T_BaseRules], ABC):
110
110
  def _dataclass_fields(self) -> list[Field]:
111
111
  return list(fields(self))
112
112
 
113
- def as_verified_rules(self) -> T_BaseRules:
113
+ def as_verified_data_model(self) -> T_BaseRules:
114
114
  cls_ = self._get_verified_cls()
115
115
  return cls_.model_validate(self.dump())
116
116
 
@@ -30,7 +30,7 @@ from cognite.neat.core._constants import DEFAULT_NAMESPACE
30
30
  from cognite.neat.core._data_model.models._types import (
31
31
  ContainerEntityType,
32
32
  DataModelExternalIdType,
33
- DmsPropertyType,
33
+ PhysicalPropertyType,
34
34
  SpaceType,
35
35
  StrListType,
36
36
  URIRefType,
@@ -438,7 +438,7 @@ ExtensionCategoryType = Annotated[
438
438
  # Immutable such that this can be used as a key in a dictionary
439
439
  class ContainerProperty(BaseModel, frozen=True):
440
440
  container: ContainerEntityType
441
- property_: DmsPropertyType
441
+ property_: PhysicalPropertyType
442
442
 
443
443
 
444
444
  class ContainerDestinationProperty(ContainerProperty, frozen=True):
@@ -451,4 +451,4 @@ class ViewRef(BaseModel, frozen=True):
451
451
 
452
452
 
453
453
  class ViewProperty(ViewRef, frozen=True):
454
- property_: DmsPropertyType
454
+ property_: PhysicalPropertyType
@@ -23,14 +23,14 @@ from cognite.neat.core._data_model._constants import (
23
23
  )
24
24
  from cognite.neat.core._data_model.models.entities._multi_value import MultiValueTypeInfo
25
25
  from cognite.neat.core._data_model.models.entities._single_value import (
26
- ClassEntity,
26
+ ConceptEntity,
27
27
  ContainerEntity,
28
28
  ViewEntity,
29
29
  )
30
30
  from cognite.neat.core._issues.errors import RegexViolationError
31
31
  from cognite.neat.core._issues.warnings import RegexViolationWarning
32
32
 
33
- Entities: TypeAlias = ClassEntity | ViewEntity | ContainerEntity
33
+ Entities: TypeAlias = ConceptEntity | ViewEntity | ContainerEntity
34
34
  T_Entities = TypeVar("T_Entities", bound=Entities)
35
35
 
36
36
 
@@ -139,9 +139,9 @@ ConceptualPropertyType = Annotated[
139
139
  str,
140
140
  AfterValidator(_external_id_validation_factory(EntityTypes.conceptual_property, "Property column in properties")),
141
141
  ]
142
- DmsPropertyType = Annotated[
142
+ PhysicalPropertyType = Annotated[
143
143
  str,
144
- AfterValidator(_external_id_validation_factory(EntityTypes.dms_property, "Property column in properties")),
144
+ AfterValidator(_external_id_validation_factory(EntityTypes.physical_property, "Property column in properties")),
145
145
  ]
146
146
 
147
147
 
@@ -152,7 +152,7 @@ def _entity_validation(value: Entities, location: str) -> Entities:
152
152
  return value
153
153
 
154
154
 
155
- ClassEntityType = Annotated[ClassEntity, AfterValidator(lambda v: _entity_validation(v, "the Class column"))]
155
+ ConceptEntityType = Annotated[ConceptEntity, AfterValidator(lambda v: _entity_validation(v, "the Class column"))]
156
156
  ViewEntityType = Annotated[ViewEntity, AfterValidator(lambda v: _entity_validation(v, "the View column"))]
157
157
  ContainerEntityType = Annotated[
158
158
  ContainerEntity, AfterValidator(lambda v: _entity_validation(v, "the Container column"))
@@ -161,7 +161,7 @@ ContainerEntityType = Annotated[
161
161
 
162
162
  def _multi_value_type_validation(value: MultiValueTypeInfo, location: str) -> MultiValueTypeInfo:
163
163
  for type_ in value.types:
164
- if isinstance(type_, ClassEntity):
164
+ if isinstance(type_, ConceptEntity):
165
165
  _entity_validation(type_, location)
166
166
  return value
167
167