cognite-neat 0.102.0__py3-none-any.whl → 0.103.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of cognite-neat might be problematic. Click here for more details.

Files changed (66) hide show
  1. cognite/neat/__init__.py +1 -1
  2. cognite/neat/_app/api/routers/crud.py +1 -1
  3. cognite/neat/_client/__init__.py +1 -1
  4. cognite/neat/_client/_api/data_modeling_loaders.py +1 -1
  5. cognite/neat/_client/_api/schema.py +1 -1
  6. cognite/neat/_graph/_tracking/__init__.py +1 -1
  7. cognite/neat/_graph/extractors/__init__.py +8 -8
  8. cognite/neat/_graph/extractors/_mock_graph_generator.py +2 -3
  9. cognite/neat/_graph/loaders/_base.py +1 -1
  10. cognite/neat/_graph/loaders/_rdf2dms.py +165 -47
  11. cognite/neat/_graph/transformers/__init__.py +13 -9
  12. cognite/neat/_graph/transformers/_value_type.py +196 -2
  13. cognite/neat/_issues/__init__.py +6 -6
  14. cognite/neat/_issues/_base.py +4 -4
  15. cognite/neat/_issues/errors/__init__.py +22 -22
  16. cognite/neat/_issues/formatters.py +1 -1
  17. cognite/neat/_issues/warnings/__init__.py +20 -18
  18. cognite/neat/_issues/warnings/_properties.py +7 -0
  19. cognite/neat/_issues/warnings/user_modeling.py +2 -2
  20. cognite/neat/_rules/analysis/__init__.py +1 -1
  21. cognite/neat/_rules/catalog/__init__.py +1 -0
  22. cognite/neat/_rules/catalog/hello_world_pump.xlsx +0 -0
  23. cognite/neat/_rules/exporters/__init__.py +5 -5
  24. cognite/neat/_rules/exporters/_rules2excel.py +5 -4
  25. cognite/neat/_rules/importers/__init__.py +4 -4
  26. cognite/neat/_rules/importers/_base.py +7 -3
  27. cognite/neat/_rules/importers/_rdf/__init__.py +1 -1
  28. cognite/neat/_rules/models/__init__.py +5 -5
  29. cognite/neat/_rules/models/_base_rules.py +1 -1
  30. cognite/neat/_rules/models/dms/__init__.py +11 -11
  31. cognite/neat/_rules/models/dms/_validation.py +18 -10
  32. cognite/neat/_rules/models/entities/__init__.py +26 -26
  33. cognite/neat/_rules/models/entities/_single_value.py +25 -5
  34. cognite/neat/_rules/models/information/__init__.py +5 -5
  35. cognite/neat/_rules/models/mapping/_classic2core.yaml +54 -8
  36. cognite/neat/_rules/transformers/__init__.py +12 -12
  37. cognite/neat/_rules/transformers/_pipelines.py +10 -5
  38. cognite/neat/_session/_base.py +71 -0
  39. cognite/neat/_session/_collector.py +3 -1
  40. cognite/neat/_session/_drop.py +10 -0
  41. cognite/neat/_session/_inspect.py +35 -1
  42. cognite/neat/_session/_mapping.py +5 -0
  43. cognite/neat/_session/_prepare.py +121 -15
  44. cognite/neat/_session/_read.py +180 -20
  45. cognite/neat/_session/_set.py +11 -1
  46. cognite/neat/_session/_show.py +50 -11
  47. cognite/neat/_session/_to.py +58 -10
  48. cognite/neat/_session/engine/__init__.py +1 -1
  49. cognite/neat/_store/__init__.py +3 -2
  50. cognite/neat/_store/{_base.py → _graph_store.py} +33 -0
  51. cognite/neat/_store/_provenance.py +11 -1
  52. cognite/neat/_store/_rules_store.py +20 -0
  53. cognite/neat/_utils/auth.py +1 -1
  54. cognite/neat/_utils/io_.py +11 -0
  55. cognite/neat/_utils/reader/__init__.py +1 -1
  56. cognite/neat/_version.py +2 -2
  57. cognite/neat/_workflows/__init__.py +3 -3
  58. cognite/neat/_workflows/steps/lib/current/graph_extractor.py +1 -1
  59. cognite/neat/_workflows/steps/lib/current/rules_exporter.py +1 -1
  60. cognite/neat/_workflows/steps/lib/current/rules_importer.py +2 -2
  61. cognite/neat/_workflows/steps/lib/io/io_steps.py +3 -3
  62. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.1.dist-info}/METADATA +1 -1
  63. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.1.dist-info}/RECORD +66 -63
  64. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.1.dist-info}/LICENSE +0 -0
  65. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.1.dist-info}/WHEEL +0 -0
  66. {cognite_neat-0.102.0.dist-info → cognite_neat-0.103.1.dist-info}/entry_points.txt +0 -0
@@ -1,8 +1,17 @@
1
- from rdflib import XSD, Graph, URIRef
1
+ import warnings
2
+ from collections.abc import Callable
3
+ from typing import Any, cast
4
+ from urllib.parse import quote
5
+
6
+ import rdflib
7
+ from rdflib import RDF, XSD, Graph, Namespace, URIRef
2
8
 
3
9
  from cognite.neat._constants import UNKNOWN_TYPE
4
10
  from cognite.neat._graph.queries import Queries
5
- from cognite.neat._utils.rdf_ import remove_namespace_from_uri
11
+ from cognite.neat._issues.warnings import NeatValueWarning, PropertyDataTypeConversionWarning
12
+ from cognite.neat._utils.auxiliary import string_to_ideal_type
13
+ from cognite.neat._utils.collection_ import iterate_progress_bar
14
+ from cognite.neat._utils.rdf_ import get_namespace, remove_namespace_from_uri
6
15
 
7
16
  from ._base import BaseTransformer
8
17
 
@@ -64,3 +73,188 @@ class SplitMultiValueProperty(BaseTransformer):
64
73
  graph.remove((s, property_uri, o))
65
74
  new_property = URIRef(f"{property_uri}_{remove_namespace_from_uri(value_type_uri)}")
66
75
  graph.add((s, new_property, o))
76
+
77
+
78
+ class ConvertLiteral(BaseTransformer):
79
+ description: str = "ConvertLiteral is a transformer that improve data typing of a literal value."
80
+ _use_only_once: bool = False
81
+ _need_changes = frozenset({})
82
+
83
+ _count_by_properties = """SELECT (COUNT(?value) AS ?valueCount)
84
+ WHERE {{
85
+ ?instance a <{subject_type}> .
86
+ ?instance <{subject_predicate}> ?value
87
+ FILTER(isLiteral(?value))
88
+ }}"""
89
+
90
+ _count_by_properties_uri = """SELECT (COUNT(?value) AS ?valueCount)
91
+ WHERE {{
92
+ ?instance a <{subject_type}> .
93
+ ?instance <{subject_predicate}> ?value
94
+ FILTER(isIRI(?value))
95
+ }}"""
96
+
97
+ _properties = """SELECT ?instance ?value
98
+ WHERE {{
99
+ ?instance a <{subject_type}> .
100
+ ?instance <{subject_predicate}> ?value
101
+
102
+ FILTER(isLiteral(?value))
103
+
104
+ }}"""
105
+
106
+ def __init__(
107
+ self,
108
+ subject_type: URIRef,
109
+ subject_predicate: URIRef,
110
+ conversion: Callable[[Any], Any] | None = None,
111
+ ) -> None:
112
+ self.subject_type = subject_type
113
+ self.subject_predicate = subject_predicate
114
+ self.conversion = conversion or string_to_ideal_type
115
+ self._type_name = remove_namespace_from_uri(subject_type)
116
+ self._property_name = remove_namespace_from_uri(subject_predicate)
117
+
118
+ def transform(self, graph: Graph) -> None:
119
+ count_connection_query = self._count_by_properties_uri.format(
120
+ subject_type=self.subject_type, subject_predicate=self.subject_predicate
121
+ )
122
+ connection_count_res = list(graph.query(count_connection_query))
123
+ connection_count = int(connection_count_res[0][0]) # type: ignore [index, arg-type]
124
+
125
+ if connection_count > 0:
126
+ warnings.warn(
127
+ NeatValueWarning(
128
+ f"Skipping {connection_count} of {self._type_name}.{self._property_name} "
129
+ f"as these are connections and not data values."
130
+ ),
131
+ stacklevel=2,
132
+ )
133
+
134
+ count_query = self._count_by_properties.format(
135
+ subject_type=self.subject_type, subject_predicate=self.subject_predicate
136
+ )
137
+
138
+ property_count_res = list(graph.query(count_query))
139
+ property_count = int(property_count_res[0][0]) # type: ignore [index, arg-type]
140
+ iterate_query = self._properties.format(
141
+ subject_type=self.subject_type, subject_predicate=self.subject_predicate
142
+ )
143
+
144
+ for instance, literal in iterate_progress_bar( # type: ignore[misc]
145
+ graph.query(iterate_query),
146
+ total=property_count,
147
+ description=f"Converting {self._type_name}.{self._property_name}.",
148
+ ):
149
+ value = cast(rdflib.Literal, literal).toPython()
150
+ try:
151
+ converted_value = self.conversion(value)
152
+ except Exception as e:
153
+ warnings.warn(
154
+ PropertyDataTypeConversionWarning(str(instance), self._type_name, self._property_name, str(e)),
155
+ stacklevel=2,
156
+ )
157
+ continue
158
+
159
+ graph.add((instance, self.subject_predicate, rdflib.Literal(converted_value)))
160
+ graph.remove((instance, self.subject_predicate, literal))
161
+
162
+
163
+ class LiteralToEntity(BaseTransformer):
164
+ description = "Converts a literal value to new entity"
165
+
166
+ _count_properties_of_type = """SELECT (COUNT(?property) AS ?propertyCount)
167
+ WHERE {{
168
+ ?instance a <{subject_type}> .
169
+ ?instance <{subject_predicate}> ?property
170
+ FILTER(isLiteral(?property))
171
+ }}"""
172
+ _count_connections_of_type = """SELECT (COUNT(?property) AS ?propertyCount)
173
+ WHERE {{
174
+ ?instance a <{subject_type}> .
175
+ ?instance <{subject_predicate}> ?property
176
+ FILTER(isIRI(?property))
177
+ }}"""
178
+
179
+ _properties_of_type = """SELECT ?instance ?property
180
+ WHERE {{
181
+ ?instance a <{subject_type}> .
182
+ ?instance <{subject_predicate}> ?property
183
+ FILTER(isLiteral(?property))
184
+ }}"""
185
+
186
+ _count_properties = """SELECT (COUNT(?property) AS ?propertyCount)
187
+ WHERE {{
188
+ ?instance <{subject_predicate}> ?property
189
+ FILTER(isLiteral(?property))
190
+ }}"""
191
+ _count_connections = """SELECT (COUNT(?property) AS ?propertyCount)
192
+ WHERE {{
193
+ ?instance <{subject_predicate}> ?property
194
+ FILTER(isIRI(?property))
195
+ }}"""
196
+ _properties = """SELECT ?instance ?property
197
+ WHERE {{
198
+ ?instance <{subject_predicate}> ?property
199
+ FILTER(isLiteral(?property))
200
+ }}"""
201
+
202
+ def __init__(
203
+ self, subject_type: URIRef | None, subject_predicate: URIRef, entity_type: str, new_property: str | None = None
204
+ ) -> None:
205
+ self.subject_type = subject_type
206
+ self.subject_predicate = subject_predicate
207
+ self.entity_type = entity_type
208
+ self.new_property = new_property
209
+
210
+ def transform(self, graph: Graph) -> None:
211
+ if self.subject_type is None:
212
+ count_query = self._count_properties.format(subject_predicate=self.subject_predicate)
213
+ iterate_query = self._properties.format(subject_predicate=self.subject_predicate)
214
+ connection_count_query = self._count_connections.format(subject_predicate=self.subject_predicate)
215
+ else:
216
+ count_query = self._count_properties_of_type.format(
217
+ subject_type=self.subject_type, subject_predicate=self.subject_predicate
218
+ )
219
+ iterate_query = self._properties_of_type.format(
220
+ subject_type=self.subject_type, subject_predicate=self.subject_predicate
221
+ )
222
+ connection_count_query = self._count_connections_of_type.format(
223
+ subject_type=self.subject_type, subject_predicate=self.subject_predicate
224
+ )
225
+
226
+ connection_count_res = list(graph.query(connection_count_query))
227
+ connection_count = int(connection_count_res[0][0]) # type: ignore [index, arg-type]
228
+ if connection_count > 0:
229
+ warnings.warn(
230
+ NeatValueWarning(
231
+ f"Skipping {connection_count} of {remove_namespace_from_uri(self.subject_predicate)} "
232
+ f"as these are connections and not data values."
233
+ ),
234
+ stacklevel=2,
235
+ )
236
+
237
+ property_count_res = list(graph.query(count_query))
238
+ property_count = int(property_count_res[0][0]) # type: ignore [index, arg-type]
239
+
240
+ instance: URIRef
241
+ description = f"Creating {remove_namespace_from_uri(self.subject_predicate)}."
242
+ if self.subject_type is not None:
243
+ description = (
244
+ f"Creating {remove_namespace_from_uri(self.subject_type)}."
245
+ f"{remove_namespace_from_uri(self.subject_predicate)}."
246
+ )
247
+ for instance, literal in iterate_progress_bar( # type: ignore[misc, assignment]
248
+ graph.query(iterate_query),
249
+ total=property_count,
250
+ description=description,
251
+ ):
252
+ value = cast(rdflib.Literal, literal).toPython()
253
+ namespace = Namespace(get_namespace(instance))
254
+ entity_type = namespace[self.entity_type]
255
+ new_entity = namespace[f"{self.entity_type}_{quote(value)!s}"]
256
+ graph.add((new_entity, RDF.type, entity_type))
257
+ if self.new_property is not None:
258
+ graph.add((new_entity, namespace[self.new_property], rdflib.Literal(value)))
259
+ graph.add((instance, self.subject_predicate, new_entity))
260
+ graph.remove((instance, self.subject_predicate, literal))
@@ -15,14 +15,14 @@ from ._base import (
15
15
  )
16
16
 
17
17
  __all__ = [
18
- "NeatIssue",
19
- "NeatError",
20
- "NeatWarning",
21
18
  "DefaultWarning",
22
- "NeatIssueList",
19
+ "FutureResult",
23
20
  "IssueList",
24
21
  "MultiValueError",
25
- "catch_warnings",
22
+ "NeatError",
23
+ "NeatIssue",
24
+ "NeatIssueList",
25
+ "NeatWarning",
26
26
  "catch_issues",
27
- "FutureResult",
27
+ "catch_warnings",
28
28
  ]
@@ -32,12 +32,12 @@ else:
32
32
 
33
33
 
34
34
  __all__ = [
35
- "NeatIssue",
36
- "NeatError",
37
- "NeatWarning",
38
35
  "DefaultWarning",
39
- "NeatIssueList",
40
36
  "MultiValueError",
37
+ "NeatError",
38
+ "NeatIssue",
39
+ "NeatIssueList",
40
+ "NeatWarning",
41
41
  ]
42
42
 
43
43
  T_Identifier = TypeVar("T_Identifier", bound=Hashable)
@@ -38,40 +38,40 @@ from ._workflow import (
38
38
  )
39
39
 
40
40
  __all__ = [
41
+ "AuthorizationError",
42
+ "CDFMissingClientError",
43
+ "DefaultPydanticError",
44
+ "FileMissingRequiredFieldError",
45
+ "FileNotAFileError",
46
+ "FileNotFoundNeatError",
47
+ "FileReadError",
48
+ "FileTypeUnexpectedError",
41
49
  "NeatError",
42
- "NeatValueError",
43
50
  "NeatImportError",
44
- "RegexViolationError",
45
- "AuthorizationError",
51
+ "NeatTypeError",
52
+ "NeatValueError",
46
53
  "NeatYamlError",
47
- "FileReadError",
48
- "ResourceCreationError",
49
- "FileNotFoundNeatError",
50
- "FileMissingRequiredFieldError",
54
+ "PropertyDefinitionDuplicatedError",
51
55
  "PropertyDefinitionError",
52
- "PropertyTypeNotSupportedError",
56
+ "PropertyMappingDuplicatedError",
53
57
  "PropertyNotFoundError",
54
- "PropertyDefinitionDuplicatedError",
58
+ "PropertyTypeNotSupportedError",
59
+ "RegexViolationError",
55
60
  "ResourceChangedError",
61
+ "ResourceConversionError",
62
+ "ResourceCreationError",
56
63
  "ResourceDuplicatedError",
57
- "ResourceRetrievalError",
58
- "ResourceNotFoundError",
59
64
  "ResourceError",
60
- "ResourceNotDefinedError",
61
65
  "ResourceMissingIdentifierError",
62
- "ResourceConversionError",
63
- "WorkflowConfigurationNotSetError",
66
+ "ResourceNotDefinedError",
67
+ "ResourceNotFoundError",
68
+ "ResourceRetrievalError",
69
+ "ReversedConnectionNotFeasibleError",
70
+ "RowError",
64
71
  "WorkFlowMissingDataError",
72
+ "WorkflowConfigurationNotSetError",
65
73
  "WorkflowStepNotInitializedError",
66
74
  "WorkflowStepOutputError",
67
- "FileTypeUnexpectedError",
68
- "FileNotAFileError",
69
- "DefaultPydanticError",
70
- "PropertyMappingDuplicatedError",
71
- "RowError",
72
- "NeatTypeError",
73
- "ReversedConnectionNotFeasibleError",
74
- "CDFMissingClientError",
75
75
  ]
76
76
 
77
77
  _NEAT_ERRORS_BY_NAME = {error.__name__: error for error in _get_subclasses(NeatError, include_base=True)}
@@ -5,7 +5,7 @@ from pathlib import Path
5
5
 
6
6
  from ._base import NeatError, NeatIssueList, NeatWarning
7
7
 
8
- __all__ = ["Formatter", "BasicHTML", "FORMATTER_BY_NAME"]
8
+ __all__ = ["FORMATTER_BY_NAME", "BasicHTML", "Formatter"]
9
9
 
10
10
 
11
11
  class Formatter(ABC):
@@ -26,6 +26,7 @@ from ._models import (
26
26
  UserModelingWarning,
27
27
  )
28
28
  from ._properties import (
29
+ PropertyDataTypeConversionWarning,
29
30
  PropertyDefinitionDuplicatedWarning,
30
31
  PropertyNotFoundWarning,
31
32
  PropertyOverwritingWarning,
@@ -43,38 +44,39 @@ from ._resources import (
43
44
  )
44
45
 
45
46
  __all__ = [
47
+ "BreakingModelingPrincipleWarning",
48
+ "CDFAuthWarning",
49
+ "CDFMaxIterationsWarning",
50
+ "CDFNotSupportedWarning",
46
51
  "DefaultWarning",
47
- "FileReadWarning",
48
- "FileMissingRequiredFieldWarning",
49
52
  "FileItemNotSupportedWarning",
53
+ "FileMissingRequiredFieldWarning",
54
+ "FileReadWarning",
50
55
  "FileTypeUnexpectedWarning",
51
56
  "NeatValueWarning",
57
+ "NotSupportedHasDataFilterLimitWarning",
58
+ "NotSupportedViewContainerLimitWarning",
52
59
  "NotSupportedWarning",
53
- "UserModelingWarning",
54
- "CDFNotSupportedWarning",
55
- "BreakingModelingPrincipleWarning",
60
+ "PrincipleMatchingSpaceAndVersionWarning",
61
+ "PrincipleOneModelOneSpaceWarning",
62
+ "PrincipleSolutionBuildsOnEnterpriseWarning",
63
+ "PropertyDataTypeConversionWarning",
56
64
  "PropertyDefinitionDuplicatedWarning",
57
- "PropertyTypeNotSupportedWarning",
58
65
  "PropertyNotFoundWarning",
59
- "PropertyValueTypeUndefinedWarning",
60
66
  "PropertyOverwritingWarning",
61
67
  "PropertySkippedWarning",
62
- "ResourceNeatWarning",
63
- "ResourcesDuplicatedWarning",
68
+ "PropertyTypeNotSupportedWarning",
69
+ "PropertyValueTypeUndefinedWarning",
64
70
  "RegexViolationWarning",
71
+ "ResourceNeatWarning",
65
72
  "ResourceNotFoundWarning",
66
- "ResourceTypeNotSupportedWarning",
67
- "ResourceRetrievalWarning",
68
73
  "ResourceRegexViolationWarning",
69
- "PrincipleOneModelOneSpaceWarning",
70
- "PrincipleMatchingSpaceAndVersionWarning",
71
- "PrincipleSolutionBuildsOnEnterpriseWarning",
72
- "NotSupportedViewContainerLimitWarning",
73
- "NotSupportedHasDataFilterLimitWarning",
74
+ "ResourceRetrievalWarning",
75
+ "ResourceTypeNotSupportedWarning",
76
+ "ResourcesDuplicatedWarning",
74
77
  "UndefinedViewWarning",
75
- "CDFAuthWarning",
78
+ "UserModelingWarning",
76
79
  "user_modeling",
77
- "CDFMaxIterationsWarning",
78
80
  ]
79
81
 
80
82
  _NEAT_WARNINGS_BY_NAME = {warning.__name__: warning for warning in _get_subclasses(NeatWarning, include_base=True)}
@@ -70,3 +70,10 @@ class PropertySkippedWarning(PropertyWarning[T_Identifier]):
70
70
  which is skipped. {reason}."""
71
71
 
72
72
  reason: str
73
+
74
+
75
+ @dataclass(unsafe_hash=True)
76
+ class PropertyDataTypeConversionWarning(PropertyWarning[T_Identifier]):
77
+ """The {resource_type} with identifier {identifier} failed to convert the property {property_name}: {error}"""
78
+
79
+ error: str
@@ -12,12 +12,12 @@ from cognite.neat._constants import DMS_CONTAINER_PROPERTY_SIZE_LIMIT
12
12
  from ._models import UserModelingWarning
13
13
 
14
14
  __all__ = [
15
+ "ContainerPropertyLimitWarning",
15
16
  "DirectRelationMissingSourceWarning",
16
17
  "EmptyContainerWarning",
17
18
  "HasDataFilterOnNoPropertiesViewWarning",
18
- "NodeTypeFilterOnParentViewWarning",
19
19
  "HasDataFilterOnViewWithReferencesWarning",
20
- "ContainerPropertyLimitWarning",
20
+ "NodeTypeFilterOnParentViewWarning",
21
21
  "NotNeatSupportedFilterWarning",
22
22
  "ParentInDifferentSpaceWarning",
23
23
  ]
@@ -1,4 +1,4 @@
1
1
  from ._dms import DMSAnalysis
2
2
  from ._information import InformationAnalysis
3
3
 
4
- __all__ = ["InformationAnalysis", "DMSAnalysis"]
4
+ __all__ = ["DMSAnalysis", "InformationAnalysis"]
@@ -4,3 +4,4 @@ from pathlib import Path
4
4
 
5
5
  _CATALOG = Path(__file__).parent
6
6
  imf_attributes = _CATALOG / "info-rules-imf.xlsx"
7
+ hello_world_pump = _CATALOG / "hello_world_pump.xlsx"
@@ -7,15 +7,15 @@ from ._rules2yaml import YAMLExporter
7
7
 
8
8
  __all__ = [
9
9
  "BaseExporter",
10
- "DMSExporter",
11
10
  "CDFExporter",
12
- "SemanticDataModelExporter",
13
- "OWLExporter",
11
+ "DMSExporter",
12
+ "ExcelExporter",
14
13
  "GraphExporter",
14
+ "InstanceTemplateExporter",
15
+ "OWLExporter",
15
16
  "SHACLExporter",
16
- "ExcelExporter",
17
+ "SemanticDataModelExporter",
17
18
  "YAMLExporter",
18
- "InstanceTemplateExporter",
19
19
  ]
20
20
 
21
21
 
@@ -117,7 +117,7 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
117
117
 
118
118
  main_header = self._main_header_by_sheet_name[sheet_name]
119
119
  sheet.append([main_header] + [""] * (len(headers) - 1))
120
- sheet.merge_cells(start_row=1, start_column=1, end_row=1, end_column=len(headers))
120
+
121
121
  if headers[0] == "Neat ID":
122
122
  # Move the Neat ID to the end of the columns
123
123
  headers = headers[1:] + ["Neat ID"]
@@ -157,12 +157,13 @@ class ExcelExporter(BaseExporter[VerifiedRules, Workbook]):
157
157
  # This freezes all rows above the given row
158
158
  sheet.freeze_panes = sheet["A3"]
159
159
 
160
- sheet["A1"].alignment = Alignment(horizontal="center")
160
+ sheet["A1"].alignment = Alignment(horizontal="left")
161
161
 
162
162
  if self._styling_level > 1:
163
163
  # Make the header row bold, larger, and colored
164
- sheet["A1"].font = Font(bold=True, size=20)
165
- sheet["A1"].fill = PatternFill(fgColor="FFC000", patternType="solid")
164
+ for cell, *_ in sheet.iter_cols(min_row=1, max_row=1, min_col=1, max_col=len(headers)):
165
+ cell.font = Font(bold=True, size=20)
166
+ cell.fill = PatternFill(fgColor="FFC000", patternType="solid")
166
167
  for cell in sheet["2"]:
167
168
  cell.font = Font(bold=True, size=14)
168
169
 
@@ -7,14 +7,14 @@ from ._yaml2rules import YAMLImporter
7
7
 
8
8
  __all__ = [
9
9
  "BaseImporter",
10
- "OWLImporter",
11
- "IMFImporter",
12
10
  "DMSImporter",
11
+ "DTDLImporter",
13
12
  "ExcelImporter",
14
13
  "GoogleSheetImporter",
15
- "DTDLImporter",
16
- "YAMLImporter",
14
+ "IMFImporter",
17
15
  "InferenceImporter",
16
+ "OWLImporter",
17
+ "YAMLImporter",
18
18
  ]
19
19
 
20
20
  RulesImporters = (
@@ -3,16 +3,18 @@ from abc import ABC, abstractmethod
3
3
  from collections.abc import Iterator
4
4
  from contextlib import contextmanager, suppress
5
5
  from datetime import datetime
6
- from typing import Any, Generic, Literal
6
+ from typing import TYPE_CHECKING, Any, Generic, Literal
7
7
 
8
8
  from pydantic import ValidationError
9
9
 
10
10
  from cognite.neat._constants import DEFAULT_NAMESPACE
11
11
  from cognite.neat._issues import IssueList, NeatError, NeatWarning
12
12
  from cognite.neat._rules._shared import ReadRules, T_InputRules
13
- from cognite.neat._store._provenance import Agent as ProvenanceAgent
14
13
  from cognite.neat._utils.auxiliary import class_html_doc
15
14
 
15
+ if TYPE_CHECKING:
16
+ from cognite.neat._store._provenance import Agent as ProvenanceAgent
17
+
16
18
 
17
19
  class BaseImporter(ABC, Generic[T_InputRules]):
18
20
  """
@@ -48,8 +50,10 @@ class BaseImporter(ABC, Generic[T_InputRules]):
48
50
  return class_html_doc(cls)
49
51
 
50
52
  @property
51
- def agent(self) -> ProvenanceAgent:
53
+ def agent(self) -> "ProvenanceAgent":
52
54
  """Provenance agent for the importer."""
55
+ from cognite.neat._store._provenance import Agent as ProvenanceAgent
56
+
53
57
  return ProvenanceAgent(id_=DEFAULT_NAMESPACE[f"agent/{type(self).__name__}"])
54
58
 
55
59
 
@@ -2,4 +2,4 @@ from ._imf2rules import IMFImporter
2
2
  from ._inference2rules import InferenceImporter
3
3
  from ._owl2rules import OWLImporter
4
4
 
5
- __all__ = ["IMFImporter", "OWLImporter", "InferenceImporter"]
5
+ __all__ = ["IMFImporter", "InferenceImporter", "OWLImporter"]
@@ -17,16 +17,16 @@ VERIFIED_RULES_BY_ROLE: dict[RoleTypes, type[InformationRules] | type[DMSRules]]
17
17
 
18
18
 
19
19
  __all__ = [
20
+ "INPUT_RULES_BY_ROLE",
20
21
  "DMSInputRules",
21
- "InformationInputRules",
22
- "InformationRules",
23
22
  "DMSRules",
24
- "INPUT_RULES_BY_ROLE",
25
23
  "DMSSchema",
24
+ "DataModelType",
25
+ "ExtensionCategory",
26
+ "InformationInputRules",
27
+ "InformationRules",
26
28
  "RoleTypes",
27
29
  "SchemaCompleteness",
28
- "ExtensionCategory",
29
- "DataModelType",
30
30
  "SheetList",
31
31
  "SheetRow",
32
32
  ]
@@ -149,7 +149,7 @@ class BaseMetadata(SchemaModel):
149
149
 
150
150
  role: ClassVar[RoleTypes] = Field(description="Role of the person creating the data model")
151
151
  aspect: ClassVar[DataModelAspect] = Field(description="Aspect of the data model")
152
- space: SpaceType = Field(alias="prefix", description="The space where the data model is defined")
152
+ space: SpaceType = Field(description="The space where the data model is defined")
153
153
  external_id: DataModelExternalIdType = Field(
154
154
  alias="externalId", description="External identifier for the data model"
155
155
  )
@@ -13,20 +13,20 @@ from ._rules_input import (
13
13
  from ._validation import DMSValidation
14
14
 
15
15
  __all__ = [
16
- "DMSRules",
17
- "DMSSchema",
18
- "DMSMetadata",
19
- "DMSView",
20
- "DMSProperty",
21
16
  "DMSContainer",
22
- "DMSNode",
23
17
  "DMSEnum",
24
- "DMSInputRules",
25
- "DMSInputMetadata",
26
- "DMSInputView",
27
- "DMSInputProperty",
28
18
  "DMSInputContainer",
29
- "DMSInputNode",
30
19
  "DMSInputEnum",
20
+ "DMSInputMetadata",
21
+ "DMSInputNode",
22
+ "DMSInputProperty",
23
+ "DMSInputRules",
24
+ "DMSInputView",
25
+ "DMSMetadata",
26
+ "DMSNode",
27
+ "DMSProperty",
28
+ "DMSRules",
29
+ "DMSSchema",
31
30
  "DMSValidation",
31
+ "DMSView",
32
32
  ]