cognite-neat 0.125.1__py3-none-any.whl → 0.126.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/__init__.py +4 -0
- cognite/neat/_client/api.py +8 -0
- cognite/neat/_client/client.py +19 -0
- cognite/neat/_client/config.py +40 -0
- cognite/neat/_client/containers_api.py +73 -0
- cognite/neat/_client/data_classes.py +10 -0
- cognite/neat/_client/data_model_api.py +63 -0
- cognite/neat/_client/spaces_api.py +67 -0
- cognite/neat/_client/views_api.py +82 -0
- cognite/neat/_data_model/_analysis.py +127 -0
- cognite/neat/_data_model/_constants.py +59 -0
- cognite/neat/_data_model/_shared.py +46 -0
- cognite/neat/_data_model/deployer/__init__.py +0 -0
- cognite/neat/_data_model/deployer/_differ.py +113 -0
- cognite/neat/_data_model/deployer/_differ_container.py +354 -0
- cognite/neat/_data_model/deployer/_differ_data_model.py +29 -0
- cognite/neat/_data_model/deployer/_differ_space.py +9 -0
- cognite/neat/_data_model/deployer/_differ_view.py +194 -0
- cognite/neat/_data_model/deployer/data_classes.py +176 -0
- cognite/neat/_data_model/exporters/__init__.py +4 -0
- cognite/neat/_data_model/exporters/_base.py +22 -0
- cognite/neat/_data_model/exporters/_table_exporter/__init__.py +0 -0
- cognite/neat/_data_model/exporters/_table_exporter/exporter.py +106 -0
- cognite/neat/_data_model/exporters/_table_exporter/workbook.py +414 -0
- cognite/neat/_data_model/exporters/_table_exporter/writer.py +391 -0
- cognite/neat/_data_model/importers/__init__.py +2 -1
- cognite/neat/_data_model/importers/_api_importer.py +88 -0
- cognite/neat/_data_model/importers/_table_importer/data_classes.py +48 -8
- cognite/neat/_data_model/importers/_table_importer/importer.py +102 -6
- cognite/neat/_data_model/importers/_table_importer/reader.py +860 -0
- cognite/neat/_data_model/models/dms/__init__.py +19 -1
- cognite/neat/_data_model/models/dms/_base.py +12 -8
- cognite/neat/_data_model/models/dms/_constants.py +1 -1
- cognite/neat/_data_model/models/dms/_constraints.py +2 -1
- cognite/neat/_data_model/models/dms/_container.py +5 -5
- cognite/neat/_data_model/models/dms/_data_model.py +3 -3
- cognite/neat/_data_model/models/dms/_data_types.py +8 -1
- cognite/neat/_data_model/models/dms/_http.py +18 -0
- cognite/neat/_data_model/models/dms/_indexes.py +2 -1
- cognite/neat/_data_model/models/dms/_references.py +17 -4
- cognite/neat/_data_model/models/dms/_space.py +11 -7
- cognite/neat/_data_model/models/dms/_view_property.py +7 -4
- cognite/neat/_data_model/models/dms/_views.py +16 -6
- cognite/neat/_data_model/validation/__init__.py +0 -0
- cognite/neat/_data_model/validation/_base.py +16 -0
- cognite/neat/_data_model/validation/dms/__init__.py +9 -0
- cognite/neat/_data_model/validation/dms/_orchestrator.py +68 -0
- cognite/neat/_data_model/validation/dms/_validators.py +139 -0
- cognite/neat/_exceptions.py +15 -3
- cognite/neat/_issues.py +39 -6
- cognite/neat/_session/__init__.py +3 -0
- cognite/neat/_session/_physical.py +88 -0
- cognite/neat/_session/_session.py +34 -25
- cognite/neat/_session/_wrappers.py +61 -0
- cognite/neat/_state_machine/__init__.py +10 -0
- cognite/neat/{_session/_state_machine → _state_machine}/_base.py +11 -1
- cognite/neat/_state_machine/_states.py +53 -0
- cognite/neat/_store/__init__.py +3 -0
- cognite/neat/_store/_provenance.py +55 -0
- cognite/neat/_store/_store.py +124 -0
- cognite/neat/_utils/_reader.py +194 -0
- cognite/neat/_utils/http_client/__init__.py +14 -20
- cognite/neat/_utils/http_client/_client.py +22 -61
- cognite/neat/_utils/http_client/_data_classes.py +167 -268
- cognite/neat/_utils/text.py +6 -0
- cognite/neat/_utils/useful_types.py +26 -2
- cognite/neat/_version.py +1 -1
- cognite/neat/v0/core/_data_model/importers/_rdf/_shared.py +2 -2
- cognite/neat/v0/core/_data_model/importers/_spreadsheet2data_model.py +2 -2
- cognite/neat/v0/core/_data_model/models/entities/_single_value.py +1 -1
- cognite/neat/v0/core/_data_model/models/physical/_unverified.py +1 -1
- cognite/neat/v0/core/_data_model/models/physical/_validation.py +2 -2
- cognite/neat/v0/core/_data_model/models/physical/_verified.py +3 -3
- cognite/neat/v0/core/_data_model/transformers/_converters.py +1 -1
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/METADATA +1 -1
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/RECORD +78 -40
- cognite/neat/_session/_state_machine/__init__.py +0 -23
- cognite/neat/_session/_state_machine/_states.py +0 -150
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/WHEEL +0 -0
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,860 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from collections import defaultdict
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import Any, Literal, TypeVar, cast, overload
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, TypeAdapter, ValidationError
|
|
7
|
+
|
|
8
|
+
from cognite.neat._data_model.models.dms import (
|
|
9
|
+
Constraint,
|
|
10
|
+
ConstraintAdapter,
|
|
11
|
+
ContainerPropertyDefinition,
|
|
12
|
+
ContainerRequest,
|
|
13
|
+
DataModelRequest,
|
|
14
|
+
Index,
|
|
15
|
+
IndexAdapter,
|
|
16
|
+
NodeReference,
|
|
17
|
+
RequestSchema,
|
|
18
|
+
SpaceRequest,
|
|
19
|
+
UniquenessConstraintDefinition,
|
|
20
|
+
ViewRequest,
|
|
21
|
+
ViewRequestProperty,
|
|
22
|
+
ViewRequestPropertyAdapter,
|
|
23
|
+
)
|
|
24
|
+
from cognite.neat._data_model.models.entities import ParsedEntity, parse_entity
|
|
25
|
+
from cognite.neat._exceptions import DataModelImportException
|
|
26
|
+
from cognite.neat._issues import ModelSyntaxError
|
|
27
|
+
from cognite.neat._utils.text import humanize_collection
|
|
28
|
+
from cognite.neat._utils.validation import humanize_validation_error
|
|
29
|
+
|
|
30
|
+
from .data_classes import DMSContainer, DMSEnum, DMSNode, DMSProperty, DMSView, TableDMS
|
|
31
|
+
from .source import TableSource
|
|
32
|
+
|
|
33
|
+
T_BaseModel = TypeVar("T_BaseModel", bound=BaseModel)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class ReadViewProperty:
|
|
38
|
+
prop_id: str
|
|
39
|
+
row_no: int
|
|
40
|
+
view_property: ViewRequestProperty
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class ReadContainerProperty:
|
|
45
|
+
prop_id: str
|
|
46
|
+
row_no: int
|
|
47
|
+
container_property: ContainerPropertyDefinition
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class ReadIndex:
|
|
52
|
+
prop_id: str
|
|
53
|
+
order: int | None
|
|
54
|
+
row_no: int
|
|
55
|
+
index_id: str
|
|
56
|
+
index: Index
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass
|
|
60
|
+
class ReadConstraint:
|
|
61
|
+
prop_id: str
|
|
62
|
+
order: int | None
|
|
63
|
+
row_no: int
|
|
64
|
+
constraint_id: str
|
|
65
|
+
constraint: Constraint
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class ReadProperties:
|
|
70
|
+
"""Read properties from the properties table.
|
|
71
|
+
|
|
72
|
+
Attributes:
|
|
73
|
+
container: A mapping from container entity to a mapping of property identifier to container property definition.
|
|
74
|
+
view: A mapping from view entity to a mapping of property identifier to view property definition.
|
|
75
|
+
indices: A mapping from (container entity, index identifier) to a list of read indices
|
|
76
|
+
constraints: A mapping from (container entity, constraint identifier) to a list of read constraints
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
container: dict[tuple[ParsedEntity, str], list[ReadContainerProperty]] = field(
|
|
80
|
+
default_factory=lambda: defaultdict(list)
|
|
81
|
+
)
|
|
82
|
+
view: dict[tuple[ParsedEntity, str], list[ReadViewProperty]] = field(default_factory=lambda: defaultdict(list))
|
|
83
|
+
indices: dict[tuple[ParsedEntity, str], list[ReadIndex]] = field(default_factory=lambda: defaultdict(list))
|
|
84
|
+
constraints: dict[tuple[ParsedEntity, str], list[ReadConstraint]] = field(default_factory=lambda: defaultdict(list))
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass
|
|
88
|
+
class ProcessedProperties:
|
|
89
|
+
container: dict[ParsedEntity, dict[str, ContainerPropertyDefinition]] = field(
|
|
90
|
+
default_factory=lambda: defaultdict(dict)
|
|
91
|
+
)
|
|
92
|
+
view: dict[ParsedEntity, dict[str, ViewRequestProperty]] = field(default_factory=lambda: defaultdict(dict))
|
|
93
|
+
indices: dict[ParsedEntity, dict[str, Index]] = field(default_factory=lambda: defaultdict(dict))
|
|
94
|
+
constraints: dict[ParsedEntity, dict[str, Constraint]] = field(default_factory=lambda: defaultdict(dict))
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DMSTableReader:
|
|
98
|
+
"""Reads a TableDMS object and converts it to a RequestSchema.
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
default_space (str): The default space to use when no space is given in an entity.
|
|
103
|
+
default_version (str): The default version to use when no version is given in an entity.
|
|
104
|
+
source (TableSource): The source of the table data, used for error reporting.
|
|
105
|
+
|
|
106
|
+
Raises:
|
|
107
|
+
DataModelImportError: If there are any errors in the data model.
|
|
108
|
+
|
|
109
|
+
Attributes:
|
|
110
|
+
errors (list[ModelSyntaxError]): A list of errors encountered during parsing.
|
|
111
|
+
|
|
112
|
+
Class Attributes:
|
|
113
|
+
Sheets: This is used to create error messages. It ensures that the column names matches
|
|
114
|
+
the names in the table, even if they are renamed in the code.
|
|
115
|
+
PropertyColumns: This is used to create error messages for the properties table.
|
|
116
|
+
It ensures that the column names matches the names in the table, even if they are renamed in the code.
|
|
117
|
+
ContainerColumns: This is used to create error messages for the containers table.
|
|
118
|
+
It ensures that the column names matches the names in the table, even if they are renamed in the code.
|
|
119
|
+
ViewColumns: This is used to create error messages for the views table.
|
|
120
|
+
It ensures that the column names matches the names in the table, even if they are renamed in the code.
|
|
121
|
+
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
# The following classes are used when creating error messages. They ensure that the column names
|
|
125
|
+
# matches the names in the table, even if they are renamed in the code.
|
|
126
|
+
# Note that this is not a complete list of all columns, only those that are used in error messages.
|
|
127
|
+
class Sheets:
|
|
128
|
+
metadata = cast(str, TableDMS.model_fields["metadata"].validation_alias)
|
|
129
|
+
properties = cast(str, TableDMS.model_fields["properties"].validation_alias)
|
|
130
|
+
containers = cast(str, TableDMS.model_fields["containers"].validation_alias)
|
|
131
|
+
views = cast(str, TableDMS.model_fields["views"].validation_alias)
|
|
132
|
+
nodes = cast(str, TableDMS.model_fields["nodes"].validation_alias)
|
|
133
|
+
|
|
134
|
+
class PropertyColumns:
|
|
135
|
+
view = cast(str, DMSProperty.model_fields["view"].validation_alias)
|
|
136
|
+
view_property = cast(str, DMSProperty.model_fields["view_property"].validation_alias)
|
|
137
|
+
connection = cast(str, DMSProperty.model_fields["connection"].validation_alias)
|
|
138
|
+
value_type = cast(str, DMSProperty.model_fields["value_type"].validation_alias)
|
|
139
|
+
min_count = cast(str, DMSProperty.model_fields["min_count"].validation_alias)
|
|
140
|
+
max_count = cast(str, DMSProperty.model_fields["max_count"].validation_alias)
|
|
141
|
+
default = cast(str, DMSProperty.model_fields["default"].validation_alias)
|
|
142
|
+
auto_increment = cast(str, DMSProperty.model_fields["auto_increment"].validation_alias)
|
|
143
|
+
container = cast(str, DMSProperty.model_fields["container"].validation_alias)
|
|
144
|
+
container_property = cast(str, DMSProperty.model_fields["container_property"].validation_alias)
|
|
145
|
+
container_property_name = cast(str, DMSProperty.model_fields["container_property_name"].validation_alias)
|
|
146
|
+
container_property_description = cast(
|
|
147
|
+
str, DMSProperty.model_fields["container_property_description"].validation_alias
|
|
148
|
+
)
|
|
149
|
+
index = cast(str, DMSProperty.model_fields["index"].validation_alias)
|
|
150
|
+
constraint = cast(str, DMSProperty.model_fields["constraint"].validation_alias)
|
|
151
|
+
|
|
152
|
+
class ContainerColumns:
|
|
153
|
+
container = cast(str, DMSContainer.model_fields["container"].validation_alias)
|
|
154
|
+
constraint = cast(str, DMSContainer.model_fields["constraint"].validation_alias)
|
|
155
|
+
|
|
156
|
+
class ViewColumns:
|
|
157
|
+
view = cast(str, DMSView.model_fields["view"].validation_alias)
|
|
158
|
+
filter = cast(str, DMSView.model_fields["filter"].validation_alias)
|
|
159
|
+
|
|
160
|
+
def __init__(self, default_space: str, default_version: str, source: TableSource) -> None:
|
|
161
|
+
self.default_space = default_space
|
|
162
|
+
self.default_version = default_version
|
|
163
|
+
self.source = source
|
|
164
|
+
self.errors: list[ModelSyntaxError] = []
|
|
165
|
+
|
|
166
|
+
def read_tables(self, tables: TableDMS) -> RequestSchema:
|
|
167
|
+
space_request = self.read_space(self.default_space)
|
|
168
|
+
node_types = self.read_nodes(tables.nodes)
|
|
169
|
+
enum_collections = self.read_enum_collections(tables.enum)
|
|
170
|
+
read = self.read_properties(tables.properties, enum_collections)
|
|
171
|
+
processed = self.process_properties(read)
|
|
172
|
+
containers = self.read_containers(tables.containers, processed)
|
|
173
|
+
views, valid_view_entities = self.read_views(tables.views, processed.view)
|
|
174
|
+
data_model = self.read_data_model(tables, valid_view_entities)
|
|
175
|
+
|
|
176
|
+
if self.errors:
|
|
177
|
+
raise DataModelImportException(self.errors) from None
|
|
178
|
+
return RequestSchema(
|
|
179
|
+
dataModel=data_model, views=views, containers=containers, spaces=[space_request], nodeTypes=node_types
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
def read_space(self, space: str) -> SpaceRequest:
|
|
183
|
+
space_request = self._validate_obj(SpaceRequest, {"space": space}, (self.Sheets.metadata,), field_name="value")
|
|
184
|
+
if space_request is None:
|
|
185
|
+
# If space is invalid, we stop parsing to avoid raising an error for every place the space is used.
|
|
186
|
+
raise DataModelImportException(self.errors) from None
|
|
187
|
+
return space_request
|
|
188
|
+
|
|
189
|
+
def read_nodes(self, nodes: list[DMSNode]) -> list[NodeReference]:
|
|
190
|
+
node_refs: list[NodeReference] = []
|
|
191
|
+
for row_no, row in enumerate(nodes):
|
|
192
|
+
data = self._create_node_ref(row.node)
|
|
193
|
+
instantiated = self._validate_obj(NodeReference, data, (self.Sheets.nodes, row_no))
|
|
194
|
+
if instantiated is not None:
|
|
195
|
+
node_refs.append(instantiated)
|
|
196
|
+
return node_refs
|
|
197
|
+
|
|
198
|
+
@staticmethod
|
|
199
|
+
def read_enum_collections(enum_rows: list[DMSEnum]) -> dict[str, dict[str, Any]]:
|
|
200
|
+
enum_collections: dict[str, dict[str, Any]] = defaultdict(dict)
|
|
201
|
+
for row in enum_rows:
|
|
202
|
+
enum_collections[row.collection][row.value] = {
|
|
203
|
+
"name": row.name,
|
|
204
|
+
"description": row.description,
|
|
205
|
+
}
|
|
206
|
+
return enum_collections
|
|
207
|
+
|
|
208
|
+
def read_properties(
|
|
209
|
+
self, properties: list[DMSProperty], enum_collections: dict[str, dict[str, Any]]
|
|
210
|
+
) -> ReadProperties:
|
|
211
|
+
read = ReadProperties()
|
|
212
|
+
for row_no, prop in enumerate(properties):
|
|
213
|
+
self._process_view_property(prop, read, row_no)
|
|
214
|
+
if prop.container is None or prop.container_property is None:
|
|
215
|
+
# This is when the property is an edge or reverse direct relation property.
|
|
216
|
+
continue
|
|
217
|
+
self._process_container_property(prop, read, enum_collections, row_no)
|
|
218
|
+
self._process_index(prop, read, row_no)
|
|
219
|
+
self._process_constraint(prop, read, row_no)
|
|
220
|
+
return read
|
|
221
|
+
|
|
222
|
+
def process_properties(self, read: ReadProperties) -> ProcessedProperties:
|
|
223
|
+
return ProcessedProperties(
|
|
224
|
+
container=self.create_container_properties(read),
|
|
225
|
+
view=self.create_view_properties(read),
|
|
226
|
+
indices=self.create_indices(read),
|
|
227
|
+
constraints=self.create_constraints(read),
|
|
228
|
+
)
|
|
229
|
+
|
|
230
|
+
def create_container_properties(
|
|
231
|
+
self, read: ReadProperties
|
|
232
|
+
) -> dict[ParsedEntity, dict[str, ContainerPropertyDefinition]]:
|
|
233
|
+
container_props: dict[ParsedEntity, dict[str, ContainerPropertyDefinition]] = defaultdict(dict)
|
|
234
|
+
for (container_entity, prop_id), prop_list in read.container.items():
|
|
235
|
+
if len(prop_list) == 0:
|
|
236
|
+
# Should not happen
|
|
237
|
+
continue
|
|
238
|
+
container_props[container_entity][prop_id] = prop_list[0].container_property
|
|
239
|
+
if len(prop_list) > 1 and self._are_definitions_different(prop_list):
|
|
240
|
+
# If multiple view properties are mapping to the same container property,
|
|
241
|
+
# the container property definitions must be the same.
|
|
242
|
+
rows_str = humanize_collection(
|
|
243
|
+
[self.source.adjust_row_number(self.Sheets.properties, p.row_no) for p in prop_list]
|
|
244
|
+
)
|
|
245
|
+
container_columns_str = humanize_collection(
|
|
246
|
+
[
|
|
247
|
+
self.PropertyColumns.connection,
|
|
248
|
+
self.PropertyColumns.value_type,
|
|
249
|
+
self.PropertyColumns.min_count,
|
|
250
|
+
self.PropertyColumns.max_count,
|
|
251
|
+
self.PropertyColumns.default,
|
|
252
|
+
self.PropertyColumns.auto_increment,
|
|
253
|
+
self.PropertyColumns.container_property_name,
|
|
254
|
+
self.PropertyColumns.container_property_description,
|
|
255
|
+
self.PropertyColumns.index,
|
|
256
|
+
self.PropertyColumns.constraint,
|
|
257
|
+
]
|
|
258
|
+
)
|
|
259
|
+
self.errors.append(
|
|
260
|
+
ModelSyntaxError(
|
|
261
|
+
message=(
|
|
262
|
+
f"In {self.source.location((self.Sheets.properties,))} "
|
|
263
|
+
f"when the column {self.PropertyColumns.container!r} and "
|
|
264
|
+
f"{self.PropertyColumns.container_property!r} are the same, "
|
|
265
|
+
f"all the container columns ({container_columns_str}) must be the same. "
|
|
266
|
+
f"Inconsistent definitions for container '{container_entity!s} "
|
|
267
|
+
f"and {prop_id!r}' found in rows {rows_str}."
|
|
268
|
+
)
|
|
269
|
+
)
|
|
270
|
+
)
|
|
271
|
+
return container_props
|
|
272
|
+
|
|
273
|
+
def _are_definitions_different(self, prop_list: list[ReadContainerProperty]) -> bool:
|
|
274
|
+
if len(prop_list) < 2:
|
|
275
|
+
return False
|
|
276
|
+
first_def = prop_list[0].container_property
|
|
277
|
+
for prop in prop_list[1:]:
|
|
278
|
+
if first_def != prop.container_property:
|
|
279
|
+
return True
|
|
280
|
+
return False
|
|
281
|
+
|
|
282
|
+
def create_view_properties(self, read: ReadProperties) -> dict[ParsedEntity, dict[str, ViewRequestProperty]]:
|
|
283
|
+
view_props: dict[ParsedEntity, dict[str, ViewRequestProperty]] = defaultdict(dict)
|
|
284
|
+
for (view_entity, prop_id), prop_list in read.view.items():
|
|
285
|
+
if len(prop_list) == 0:
|
|
286
|
+
# Should not happen
|
|
287
|
+
continue
|
|
288
|
+
view_props[view_entity][prop_id] = prop_list[0].view_property
|
|
289
|
+
if len(prop_list) > 1:
|
|
290
|
+
# Safeguard against duplicated rows for view properties.
|
|
291
|
+
rows_str = humanize_collection(
|
|
292
|
+
[self.source.adjust_row_number(self.Sheets.properties, p.row_no) for p in prop_list]
|
|
293
|
+
)
|
|
294
|
+
self.errors.append(
|
|
295
|
+
ModelSyntaxError(
|
|
296
|
+
message=(
|
|
297
|
+
f"In {self.source.location((self.Sheets.properties,))} the combination of columns "
|
|
298
|
+
f"{self.PropertyColumns.view!r} and {self.PropertyColumns.view_property!r} must be unique. "
|
|
299
|
+
f"Duplicated entries for view '{view_entity!s}' and "
|
|
300
|
+
f"property '{prop_id!s}' found in rows {rows_str}."
|
|
301
|
+
)
|
|
302
|
+
)
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
return view_props
|
|
306
|
+
|
|
307
|
+
def create_indices(self, read: ReadProperties) -> dict[ParsedEntity, dict[str, Index]]:
|
|
308
|
+
indices: dict[ParsedEntity, dict[str, Index]] = defaultdict(dict)
|
|
309
|
+
for (container_entity, index_id), index_list in read.indices.items():
|
|
310
|
+
if len(index_list) == 0:
|
|
311
|
+
continue
|
|
312
|
+
index = index_list[0].index
|
|
313
|
+
if len(index_list) == 1:
|
|
314
|
+
indices[container_entity][index_id] = index
|
|
315
|
+
continue
|
|
316
|
+
if missing_order := [idx for idx in index_list if idx.order is None]:
|
|
317
|
+
row_str = humanize_collection(
|
|
318
|
+
[self.source.adjust_row_number(self.Sheets.properties, idx.row_no) for idx in missing_order]
|
|
319
|
+
)
|
|
320
|
+
self.errors.append(
|
|
321
|
+
ModelSyntaxError(
|
|
322
|
+
message=(
|
|
323
|
+
f"In table {self.Sheets.properties!r} column {self.PropertyColumns.index!r}: "
|
|
324
|
+
f"the index {index_id!r} on container {container_entity!s} is defined on multiple "
|
|
325
|
+
f"properties. This requires the 'order' attribute to be set. It is missing in rows "
|
|
326
|
+
f"{row_str}."
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
)
|
|
330
|
+
continue
|
|
331
|
+
index.properties = [idx.prop_id for idx in sorted(index_list, key=lambda x: x.order or 999)]
|
|
332
|
+
indices[container_entity][index_id] = index
|
|
333
|
+
return indices
|
|
334
|
+
|
|
335
|
+
def create_constraints(self, read: ReadProperties) -> dict[ParsedEntity, dict[str, Constraint]]:
|
|
336
|
+
constraints: dict[ParsedEntity, dict[str, Constraint]] = defaultdict(dict)
|
|
337
|
+
for (container_entity, constraint_id), constraint_list in read.constraints.items():
|
|
338
|
+
if len(constraint_list) == 0:
|
|
339
|
+
continue
|
|
340
|
+
constraint = constraint_list[0].constraint
|
|
341
|
+
if len(constraint_list) == 1 or not isinstance(constraint, UniquenessConstraintDefinition):
|
|
342
|
+
constraints[container_entity][constraint_id] = constraint
|
|
343
|
+
continue
|
|
344
|
+
if missing_order := [c for c in constraint_list if c.order is None]:
|
|
345
|
+
row_str = humanize_collection(
|
|
346
|
+
[self.source.adjust_row_number(self.Sheets.properties, c.row_no) for c in missing_order]
|
|
347
|
+
)
|
|
348
|
+
self.errors.append(
|
|
349
|
+
ModelSyntaxError(
|
|
350
|
+
message=(
|
|
351
|
+
f"In table {self.Sheets.properties!r} column {self.PropertyColumns.constraint!r}: "
|
|
352
|
+
f"the uniqueness constraint {constraint_id!r} on container {container_entity!s} is defined "
|
|
353
|
+
f"on multiple properties. This requires the 'order' attribute to be set. It is missing in "
|
|
354
|
+
f"rows {row_str}."
|
|
355
|
+
)
|
|
356
|
+
)
|
|
357
|
+
)
|
|
358
|
+
continue
|
|
359
|
+
constraint.properties = [c.prop_id for c in sorted(constraint_list, key=lambda x: x.order or 999)]
|
|
360
|
+
constraints[container_entity][constraint_id] = constraint
|
|
361
|
+
return constraints
|
|
362
|
+
|
|
363
|
+
def _process_view_property(self, prop: DMSProperty, read: ReadProperties, row_no: int) -> None:
|
|
364
|
+
loc = (self.Sheets.properties, row_no)
|
|
365
|
+
data = self.read_view_property(prop, loc)
|
|
366
|
+
view_prop = self._validate_adapter(ViewRequestPropertyAdapter, data, loc)
|
|
367
|
+
if view_prop is not None:
|
|
368
|
+
read.view[(prop.view, prop.view_property)].append(
|
|
369
|
+
# MyPy has a very strange complaint here. It complains that given type is not expected type,
|
|
370
|
+
# even though they are exactly the same.
|
|
371
|
+
ReadViewProperty(prop.container_property, row_no, view_prop) # type: ignore[arg-type]
|
|
372
|
+
)
|
|
373
|
+
return None
|
|
374
|
+
|
|
375
|
+
def _process_container_property(
|
|
376
|
+
self, prop: DMSProperty, read: ReadProperties, enum_collections: dict[str, dict[str, Any]], row_no: int
|
|
377
|
+
) -> None:
|
|
378
|
+
loc = (self.Sheets.properties, row_no)
|
|
379
|
+
data = self.read_container_property(prop, enum_collections, loc=loc)
|
|
380
|
+
container_prop = self._validate_obj(ContainerPropertyDefinition, data, loc)
|
|
381
|
+
if container_prop is not None and prop.container and prop.container_property:
|
|
382
|
+
read.container[(prop.container, prop.container_property)].append(
|
|
383
|
+
ReadContainerProperty(prop.container_property, row_no, container_prop)
|
|
384
|
+
)
|
|
385
|
+
return None
|
|
386
|
+
|
|
387
|
+
def _process_index(self, prop: DMSProperty, read: ReadProperties, row_no: int) -> None:
|
|
388
|
+
if prop.index is None or prop.container_property is None or prop.container is None:
|
|
389
|
+
return
|
|
390
|
+
|
|
391
|
+
loc = (self.Sheets.properties, row_no, self.PropertyColumns.index)
|
|
392
|
+
for index in prop.index:
|
|
393
|
+
data = self.read_index(index, prop.container_property)
|
|
394
|
+
created = self._validate_adapter(IndexAdapter, data, loc)
|
|
395
|
+
if created is None:
|
|
396
|
+
continue
|
|
397
|
+
order = self._read_order(index.properties, loc)
|
|
398
|
+
read.indices[(prop.container, index.suffix)].append(
|
|
399
|
+
ReadIndex(
|
|
400
|
+
prop_id=prop.container_property, order=order, row_no=row_no, index_id=index.suffix, index=created
|
|
401
|
+
)
|
|
402
|
+
)
|
|
403
|
+
|
|
404
|
+
def _read_order(self, properties: dict[str, Any], loc: tuple[str | int, ...]) -> int | None:
|
|
405
|
+
if "order" not in properties:
|
|
406
|
+
return None
|
|
407
|
+
try:
|
|
408
|
+
return int(properties["order"])
|
|
409
|
+
except ValueError:
|
|
410
|
+
self.errors.append(
|
|
411
|
+
ModelSyntaxError(
|
|
412
|
+
message=f"In {self.source.location(loc)} invalid order value '{properties['order']}'. "
|
|
413
|
+
"Must be an integer."
|
|
414
|
+
)
|
|
415
|
+
)
|
|
416
|
+
return None
|
|
417
|
+
|
|
418
|
+
@staticmethod
|
|
419
|
+
def read_index(index: ParsedEntity, prop_id: str) -> dict[str, Any]:
|
|
420
|
+
return {
|
|
421
|
+
"indexType": index.prefix,
|
|
422
|
+
"properties": [prop_id],
|
|
423
|
+
**index.properties,
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
def _process_constraint(self, prop: DMSProperty, read: ReadProperties, row_no: int) -> None:
|
|
427
|
+
if prop.constraint is None or prop.container_property is None or prop.container is None:
|
|
428
|
+
return
|
|
429
|
+
loc = (self.Sheets.properties, row_no, self.PropertyColumns.constraint)
|
|
430
|
+
for constraint in prop.constraint:
|
|
431
|
+
data = self.read_property_constraint(constraint, prop.container_property)
|
|
432
|
+
created = self._validate_adapter(ConstraintAdapter, data, loc)
|
|
433
|
+
if created is None:
|
|
434
|
+
continue
|
|
435
|
+
order = self._read_order(constraint.properties, loc)
|
|
436
|
+
read.constraints[(prop.container, constraint.suffix)].append(
|
|
437
|
+
ReadConstraint(
|
|
438
|
+
prop_id=prop.container_property,
|
|
439
|
+
order=order,
|
|
440
|
+
constraint_id=constraint.suffix,
|
|
441
|
+
row_no=row_no,
|
|
442
|
+
constraint=created,
|
|
443
|
+
)
|
|
444
|
+
)
|
|
445
|
+
|
|
446
|
+
@staticmethod
|
|
447
|
+
def read_property_constraint(constraint: ParsedEntity, prop_id: str) -> dict[str, Any]:
|
|
448
|
+
return {"constraintType": constraint.prefix, "properties": [prop_id], **constraint.properties}
|
|
449
|
+
|
|
450
|
+
def read_view_property(self, prop: DMSProperty, loc: tuple[str | int, ...]) -> dict[str, Any]:
|
|
451
|
+
"""Reads a single view property from a given row in the properties table.
|
|
452
|
+
|
|
453
|
+
The type of property (core, edge, reverse direct relation) is determined based on the connection column
|
|
454
|
+
as follows:
|
|
455
|
+
1. If the connection is empty or 'direct' it is a core property.
|
|
456
|
+
2. If the connection is 'edge' it is an edge property.
|
|
457
|
+
3. If the connection is 'reverse' it is a reverse direct relation property
|
|
458
|
+
4. Otherwise, it is an error.
|
|
459
|
+
|
|
460
|
+
Args:
|
|
461
|
+
prop (DMSProperty): The property row to read.
|
|
462
|
+
loc (tuple[str | int, ...]): The location of the property in the source for error reporting.
|
|
463
|
+
|
|
464
|
+
Returns:
|
|
465
|
+
ViewRequestProperty: The parsed view property.
|
|
466
|
+
"""
|
|
467
|
+
|
|
468
|
+
if prop.connection is None or prop.connection.suffix == "direct":
|
|
469
|
+
return self.read_core_view_property(prop)
|
|
470
|
+
elif prop.connection.suffix == "edge":
|
|
471
|
+
return self.read_edge_view_property(prop, loc)
|
|
472
|
+
elif prop.connection.suffix == "reverse":
|
|
473
|
+
return self.read_reverse_direct_relation_view_property(prop)
|
|
474
|
+
else:
|
|
475
|
+
self.errors.append(
|
|
476
|
+
ModelSyntaxError(
|
|
477
|
+
message=f"In {self.source.location(loc)} invalid connection type '{prop.connection.suffix}'. "
|
|
478
|
+
)
|
|
479
|
+
)
|
|
480
|
+
return {}
|
|
481
|
+
|
|
482
|
+
def read_core_view_property(self, prop: DMSProperty) -> dict[str, Any]:
|
|
483
|
+
return dict(
|
|
484
|
+
connectionType="primary_property",
|
|
485
|
+
name=prop.name,
|
|
486
|
+
description=prop.description,
|
|
487
|
+
container=self._create_container_ref(prop.container),
|
|
488
|
+
containerPropertyIdentifier=prop.container_property,
|
|
489
|
+
source=None if prop.connection is None else self._create_view_ref(prop.value_type),
|
|
490
|
+
)
|
|
491
|
+
|
|
492
|
+
def read_edge_view_property(self, prop: DMSProperty, loc: tuple[str | int, ...]) -> dict[str, Any]:
|
|
493
|
+
if prop.connection is None:
|
|
494
|
+
return {}
|
|
495
|
+
edge_source: dict[str, str | None] | None = None
|
|
496
|
+
if "edgeSource" in prop.connection.properties:
|
|
497
|
+
edge_source = self._create_view_ref_unparsed(
|
|
498
|
+
prop.connection.properties["edgeSource"], (*loc, self.PropertyColumns.connection, "edgeSource")
|
|
499
|
+
)
|
|
500
|
+
return dict(
|
|
501
|
+
connectionType="single_edge_connection" if prop.max_count == 1 else "multi_edge_connection",
|
|
502
|
+
name=prop.name,
|
|
503
|
+
description=prop.description,
|
|
504
|
+
source=self._create_view_ref(prop.value_type),
|
|
505
|
+
type=self._create_node_ref_unparsed(
|
|
506
|
+
prop.connection.properties.get("type"),
|
|
507
|
+
prop.view,
|
|
508
|
+
prop.view_property,
|
|
509
|
+
(*loc, self.PropertyColumns.connection, "type"),
|
|
510
|
+
),
|
|
511
|
+
edgeSource=edge_source,
|
|
512
|
+
direction=prop.connection.properties.get("direction", "outwards"),
|
|
513
|
+
)
|
|
514
|
+
|
|
515
|
+
def read_reverse_direct_relation_view_property(
|
|
516
|
+
self,
|
|
517
|
+
prop: DMSProperty,
|
|
518
|
+
) -> dict[str, Any]:
|
|
519
|
+
if prop.connection is None:
|
|
520
|
+
return {}
|
|
521
|
+
view_ref = self._create_view_ref(prop.value_type)
|
|
522
|
+
return dict(
|
|
523
|
+
connectionType="single_reverse_direct_relation" if prop.max_count == 1 else "multi_reverse_direct_relation",
|
|
524
|
+
name=prop.name,
|
|
525
|
+
description=prop.description,
|
|
526
|
+
source=view_ref,
|
|
527
|
+
through={
|
|
528
|
+
"source": view_ref,
|
|
529
|
+
"identifier": prop.connection.properties.get("property"),
|
|
530
|
+
},
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
def read_container_property(
|
|
534
|
+
self, prop: DMSProperty, enum_collections: dict[str, dict[str, Any]], loc: tuple[str | int, ...]
|
|
535
|
+
) -> dict[str, Any]:
|
|
536
|
+
data_type = self._read_data_type(prop, enum_collections, loc)
|
|
537
|
+
return dict(
|
|
538
|
+
immutable=prop.immutable,
|
|
539
|
+
nullable=prop.min_count == 0 or prop.min_count is None,
|
|
540
|
+
autoIncrement=prop.auto_increment,
|
|
541
|
+
defaultValue=prop.default,
|
|
542
|
+
description=prop.container_property_description,
|
|
543
|
+
name=prop.container_property_name,
|
|
544
|
+
type=data_type,
|
|
545
|
+
)
|
|
546
|
+
|
|
547
|
+
def _read_data_type(
|
|
548
|
+
self, prop: DMSProperty, enum_collections: dict[str, dict[str, Any]], loc: tuple[str | int, ...]
|
|
549
|
+
) -> dict[str, Any]:
|
|
550
|
+
# Implementation to read the container property type from DMSProperty
|
|
551
|
+
is_list = None if prop.max_count is None else prop.max_count > 1
|
|
552
|
+
max_list_size: int | None = None
|
|
553
|
+
if is_list and prop.max_count is not None:
|
|
554
|
+
max_list_size = prop.max_count
|
|
555
|
+
|
|
556
|
+
args: dict[str, Any] = {
|
|
557
|
+
"maxListSize": max_list_size,
|
|
558
|
+
"list": is_list,
|
|
559
|
+
"type": "direct" if prop.connection is not None else prop.value_type.suffix,
|
|
560
|
+
}
|
|
561
|
+
args.update(prop.value_type.properties)
|
|
562
|
+
if "container" in args and prop.connection is not None:
|
|
563
|
+
# Direct relation constraint.
|
|
564
|
+
args["container"] = self._create_container_ref_unparsed(
|
|
565
|
+
prop.connection.properties["container"], (*loc, self.PropertyColumns.connection, "container")
|
|
566
|
+
)
|
|
567
|
+
if args["type"] == "enum" and "collection" in prop.value_type.properties:
|
|
568
|
+
args["values"] = enum_collections.get(prop.value_type.properties["collection"], {})
|
|
569
|
+
return args
|
|
570
|
+
|
|
571
|
+
def read_containers(
|
|
572
|
+
self, containers: list[DMSContainer], properties: ProcessedProperties
|
|
573
|
+
) -> list[ContainerRequest]:
|
|
574
|
+
# Implementation to read containers from DMSContainer list
|
|
575
|
+
containers_requests: list[ContainerRequest] = []
|
|
576
|
+
rows_by_seen: dict[ParsedEntity, list[int]] = defaultdict(list)
|
|
577
|
+
for row_no, container in enumerate(containers):
|
|
578
|
+
property_constraints = properties.constraints.get(container.container, {})
|
|
579
|
+
require_constraints = self.read_container_constraints(container, row_no)
|
|
580
|
+
if conflict := set(property_constraints.keys()).intersection(set(require_constraints.keys())):
|
|
581
|
+
conflict_str = humanize_collection(conflict)
|
|
582
|
+
location_str = self.source.location((self.Sheets.containers, row_no, self.ContainerColumns.constraint))
|
|
583
|
+
self.errors.append(
|
|
584
|
+
ModelSyntaxError(
|
|
585
|
+
message=(
|
|
586
|
+
f"In {location_str} the container '{container.container!s}' has constraints defined "
|
|
587
|
+
f"with the same identifier(s) as the uniqueness constraint defined in the "
|
|
588
|
+
f"{self.Sheets.properties} sheet. Ensure that the identifiers are unique. "
|
|
589
|
+
f"Conflicting identifiers: {conflict_str}. "
|
|
590
|
+
)
|
|
591
|
+
)
|
|
592
|
+
)
|
|
593
|
+
constraints = {**property_constraints, **require_constraints}
|
|
594
|
+
container_request = self._validate_obj(
|
|
595
|
+
ContainerRequest,
|
|
596
|
+
dict(
|
|
597
|
+
**self._create_container_ref(container.container),
|
|
598
|
+
usedFor=container.used_for,
|
|
599
|
+
name=container.name,
|
|
600
|
+
description=container.description,
|
|
601
|
+
properties=properties.container[container.container],
|
|
602
|
+
indexes=properties.indices.get(container.container),
|
|
603
|
+
constraints=constraints or None,
|
|
604
|
+
),
|
|
605
|
+
(self.Sheets.containers, row_no),
|
|
606
|
+
)
|
|
607
|
+
if container_request is None:
|
|
608
|
+
continue
|
|
609
|
+
if container.container in rows_by_seen:
|
|
610
|
+
rows_by_seen[container.container].append(row_no)
|
|
611
|
+
else:
|
|
612
|
+
containers_requests.append(container_request)
|
|
613
|
+
rows_by_seen[container.container] = [row_no]
|
|
614
|
+
for entity, rows in rows_by_seen.items():
|
|
615
|
+
if len(rows) > 1:
|
|
616
|
+
rows_str = humanize_collection([self.source.adjust_row_number(self.Sheets.containers, r) for r in rows])
|
|
617
|
+
self.errors.append(
|
|
618
|
+
ModelSyntaxError(
|
|
619
|
+
message=(
|
|
620
|
+
f"In {self.source.location((self.Sheets.containers,))} the values in "
|
|
621
|
+
f"column {self.ContainerColumns.container!r} must be unique. "
|
|
622
|
+
f"Duplicated entries for container '{entity!s}' found in rows {rows_str}."
|
|
623
|
+
)
|
|
624
|
+
)
|
|
625
|
+
)
|
|
626
|
+
return containers_requests
|
|
627
|
+
|
|
628
|
+
def read_container_constraints(self, container: DMSContainer, row_no: int) -> dict[str, Constraint]:
|
|
629
|
+
constraints: dict[str, Constraint] = {}
|
|
630
|
+
if not container.constraint:
|
|
631
|
+
return constraints
|
|
632
|
+
for entity in container.constraint:
|
|
633
|
+
loc = self.Sheets.containers, row_no, self.ContainerColumns.constraint
|
|
634
|
+
if entity.prefix != "requires":
|
|
635
|
+
self.errors.append(
|
|
636
|
+
ModelSyntaxError(
|
|
637
|
+
message=(
|
|
638
|
+
f"In {self.source.location(loc)} the constraint '{entity.suffix}' on container "
|
|
639
|
+
f"'{container.container!s}' has an invalid type '{entity.prefix}'. Only 'requires' "
|
|
640
|
+
f"constraints are supported at the container level."
|
|
641
|
+
)
|
|
642
|
+
)
|
|
643
|
+
)
|
|
644
|
+
continue
|
|
645
|
+
|
|
646
|
+
if "require" not in entity.properties:
|
|
647
|
+
self.errors.append(
|
|
648
|
+
ModelSyntaxError(
|
|
649
|
+
message=(
|
|
650
|
+
f"In {self.source.location(loc)} the constraint '{entity.suffix}' on container "
|
|
651
|
+
f"'{container.container!s}' is missing the "
|
|
652
|
+
f"'require' property which is required for container level constraints."
|
|
653
|
+
)
|
|
654
|
+
)
|
|
655
|
+
)
|
|
656
|
+
continue
|
|
657
|
+
data = {
|
|
658
|
+
"constraintType": entity.prefix,
|
|
659
|
+
"require": self._create_container_ref_unparsed(entity.properties["require"], loc),
|
|
660
|
+
}
|
|
661
|
+
created = self._validate_adapter(ConstraintAdapter, data, loc)
|
|
662
|
+
if created is None:
|
|
663
|
+
continue
|
|
664
|
+
constraints[entity.suffix] = created
|
|
665
|
+
return constraints
|
|
666
|
+
|
|
667
|
+
def read_views(
|
|
668
|
+
self,
|
|
669
|
+
views: list[DMSView],
|
|
670
|
+
properties: dict[ParsedEntity, dict[str, ViewRequestProperty]],
|
|
671
|
+
) -> tuple[list[ViewRequest], set[ParsedEntity]]:
|
|
672
|
+
views_requests: list[ViewRequest] = []
|
|
673
|
+
rows_by_seen: dict[ParsedEntity, list[int]] = defaultdict(list)
|
|
674
|
+
for row_no, view in enumerate(views):
|
|
675
|
+
filter_dict: dict[str, Any] | None = None
|
|
676
|
+
if view.filter is not None:
|
|
677
|
+
try:
|
|
678
|
+
filter_dict = json.loads(view.filter)
|
|
679
|
+
except ValueError as e:
|
|
680
|
+
self.errors.append(
|
|
681
|
+
ModelSyntaxError(
|
|
682
|
+
message=(
|
|
683
|
+
f"In {self.source.location((self.Sheets.views, row_no, self.ViewColumns.filter))} "
|
|
684
|
+
f"must be valid json. Got error {e!s}"
|
|
685
|
+
)
|
|
686
|
+
)
|
|
687
|
+
)
|
|
688
|
+
view_request = self._validate_obj(
|
|
689
|
+
ViewRequest,
|
|
690
|
+
dict(
|
|
691
|
+
**self._create_view_ref(view.view),
|
|
692
|
+
name=view.name,
|
|
693
|
+
description=view.description,
|
|
694
|
+
implements=[self._create_view_ref(impl) for impl in view.implements] if view.implements else None,
|
|
695
|
+
filter=filter_dict,
|
|
696
|
+
properties=properties.get(view.view, {}),
|
|
697
|
+
),
|
|
698
|
+
(self.Sheets.views, row_no),
|
|
699
|
+
)
|
|
700
|
+
if view_request is None:
|
|
701
|
+
continue
|
|
702
|
+
if view.view in rows_by_seen:
|
|
703
|
+
rows_by_seen[view.view].append(row_no)
|
|
704
|
+
else:
|
|
705
|
+
views_requests.append(view_request)
|
|
706
|
+
rows_by_seen[view.view] = [row_no]
|
|
707
|
+
for entity, rows in rows_by_seen.items():
|
|
708
|
+
if len(rows) > 1:
|
|
709
|
+
rows_str = humanize_collection([self.source.adjust_row_number(self.Sheets.views, r) for r in rows])
|
|
710
|
+
self.errors.append(
|
|
711
|
+
ModelSyntaxError(
|
|
712
|
+
message=(
|
|
713
|
+
f"In {self.source.location((self.Sheets.views,))} the values in "
|
|
714
|
+
f"column {self.ViewColumns.view!r} must be unique. "
|
|
715
|
+
f"Duplicated entries for view '{entity!s}' found in rows {rows_str}."
|
|
716
|
+
)
|
|
717
|
+
)
|
|
718
|
+
)
|
|
719
|
+
return views_requests, set(rows_by_seen.keys())
|
|
720
|
+
|
|
721
|
+
def read_data_model(self, tables: TableDMS, valid_view_entities: set[ParsedEntity]) -> DataModelRequest:
|
|
722
|
+
data = {
|
|
723
|
+
**{meta.key: meta.value for meta in tables.metadata},
|
|
724
|
+
"views": [
|
|
725
|
+
self._create_view_ref(view.view)
|
|
726
|
+
for view in tables.views
|
|
727
|
+
if view.in_model is not False and view.view in valid_view_entities
|
|
728
|
+
],
|
|
729
|
+
}
|
|
730
|
+
model = self._validate_obj(DataModelRequest, data, (self.Sheets.metadata,), field_name="value")
|
|
731
|
+
if model is None:
|
|
732
|
+
# This is the last step, so we can raise the error here.
|
|
733
|
+
raise DataModelImportException(self.errors) from None
|
|
734
|
+
return model
|
|
735
|
+
|
|
736
|
+
def _parse_entity(self, entity: str, loc: tuple[str | int, ...]) -> ParsedEntity | None:
|
|
737
|
+
try:
|
|
738
|
+
parsed = parse_entity(entity)
|
|
739
|
+
except ValueError as e:
|
|
740
|
+
self.errors.append(
|
|
741
|
+
ModelSyntaxError(message=f"In {self.source.location(loc)} failed to parse entity '{entity}': {e!s}")
|
|
742
|
+
)
|
|
743
|
+
return None
|
|
744
|
+
return parsed
|
|
745
|
+
|
|
746
|
+
def _create_view_ref_unparsed(self, entity: str, loc: tuple[str | int, ...]) -> dict[str, str | None]:
|
|
747
|
+
parsed = self._parse_entity(entity, loc)
|
|
748
|
+
if parsed is None:
|
|
749
|
+
return dict()
|
|
750
|
+
return self._create_view_ref(parsed)
|
|
751
|
+
|
|
752
|
+
def _create_view_ref(self, entity: ParsedEntity | None) -> dict[str, str | None]:
|
|
753
|
+
if entity is None or entity.suffix == "":
|
|
754
|
+
# If no suffix is given, we cannot create a valid reference.
|
|
755
|
+
return dict()
|
|
756
|
+
space, version = entity.prefix, entity.properties.get("version")
|
|
757
|
+
if space == "":
|
|
758
|
+
space = self.default_space
|
|
759
|
+
# Only if default space is used, we can use default version.
|
|
760
|
+
if version is None:
|
|
761
|
+
version = self.default_version
|
|
762
|
+
return {
|
|
763
|
+
"space": space,
|
|
764
|
+
"externalId": entity.suffix,
|
|
765
|
+
"version": version,
|
|
766
|
+
}
|
|
767
|
+
|
|
768
|
+
def _create_container_ref_unparsed(self, entity: str, loc: tuple[str | int, ...]) -> dict[str, str]:
|
|
769
|
+
parsed = self._parse_entity(entity, loc)
|
|
770
|
+
if parsed is None:
|
|
771
|
+
return dict()
|
|
772
|
+
return self._create_container_ref(parsed)
|
|
773
|
+
|
|
774
|
+
def _create_container_ref(self, entity: ParsedEntity | None) -> dict[str, str]:
|
|
775
|
+
if entity is None or entity.suffix == "":
|
|
776
|
+
# If no suffix is given, we cannot create a valid reference.
|
|
777
|
+
return dict()
|
|
778
|
+
return {
|
|
779
|
+
"space": entity.prefix or self.default_space,
|
|
780
|
+
"externalId": entity.suffix,
|
|
781
|
+
}
|
|
782
|
+
|
|
783
|
+
def _create_node_ref_unparsed(
|
|
784
|
+
self, entity: str | None, view: ParsedEntity, view_prop: str, loc: tuple[str | int, ...]
|
|
785
|
+
) -> dict[str, str | None]:
|
|
786
|
+
if entity is None:
|
|
787
|
+
# Use default
|
|
788
|
+
return self._create_node_ref(None, view, view_prop)
|
|
789
|
+
parsed = self._parse_entity(entity, loc)
|
|
790
|
+
if parsed is None:
|
|
791
|
+
return dict()
|
|
792
|
+
return self._create_node_ref(parsed, view, view_prop)
|
|
793
|
+
|
|
794
|
+
@overload
|
|
795
|
+
def _create_node_ref(
|
|
796
|
+
self, entity: ParsedEntity, *, view: None = None, view_prop: None = None
|
|
797
|
+
) -> dict[str, str | None]: ...
|
|
798
|
+
|
|
799
|
+
@overload
|
|
800
|
+
def _create_node_ref(
|
|
801
|
+
self, entity: ParsedEntity | None, view: ParsedEntity, view_prop: str
|
|
802
|
+
) -> dict[str, str | None]: ...
|
|
803
|
+
|
|
804
|
+
def _create_node_ref(
|
|
805
|
+
self, entity: ParsedEntity | None, view: ParsedEntity | None = None, view_prop: str | None = None
|
|
806
|
+
) -> dict[str, str | None]:
|
|
807
|
+
if entity is None or entity.suffix == "":
|
|
808
|
+
if view is None or view_prop is None:
|
|
809
|
+
return dict()
|
|
810
|
+
# If no suffix is given, we fallback to the view's property
|
|
811
|
+
return {
|
|
812
|
+
"space": view.prefix or self.default_space,
|
|
813
|
+
"externalId": f"{view.suffix}.{view_prop}",
|
|
814
|
+
}
|
|
815
|
+
return {
|
|
816
|
+
"space": entity.prefix or self.default_space,
|
|
817
|
+
"externalId": entity.suffix,
|
|
818
|
+
}
|
|
819
|
+
|
|
820
|
+
def _validate_obj(
|
|
821
|
+
self,
|
|
822
|
+
obj: type[T_BaseModel],
|
|
823
|
+
data: dict,
|
|
824
|
+
parent_loc: tuple[str | int, ...],
|
|
825
|
+
field_name: Literal["field", "column", "value"] = "column",
|
|
826
|
+
) -> T_BaseModel | None:
|
|
827
|
+
try:
|
|
828
|
+
return obj.model_validate(data)
|
|
829
|
+
except ValidationError as e:
|
|
830
|
+
self._add_error_messages(e, parent_loc, field_name=field_name)
|
|
831
|
+
return None
|
|
832
|
+
|
|
833
|
+
def _validate_adapter(
|
|
834
|
+
self, adapter: TypeAdapter[T_BaseModel], data: dict[str, Any], parent_loc: tuple[str | int, ...]
|
|
835
|
+
) -> T_BaseModel | None:
|
|
836
|
+
try:
|
|
837
|
+
return adapter.validate_python(data, strict=True)
|
|
838
|
+
except ValidationError as e:
|
|
839
|
+
self._add_error_messages(e, parent_loc, field_name="column")
|
|
840
|
+
return None
|
|
841
|
+
|
|
842
|
+
def _add_error_messages(
|
|
843
|
+
self,
|
|
844
|
+
error: ValidationError,
|
|
845
|
+
parent_loc: tuple[str | int, ...],
|
|
846
|
+
field_name: Literal["field", "column", "value"] = "column",
|
|
847
|
+
) -> None:
|
|
848
|
+
seen: set[str] = set()
|
|
849
|
+
for message in humanize_validation_error(
|
|
850
|
+
error,
|
|
851
|
+
parent_loc=parent_loc,
|
|
852
|
+
humanize_location=self.source.location,
|
|
853
|
+
field_name=field_name,
|
|
854
|
+
field_renaming=self.source.field_mapping(parent_loc[0]),
|
|
855
|
+
missing_required_descriptor="empty" if field_name == "column" else "missing",
|
|
856
|
+
):
|
|
857
|
+
if message in seen:
|
|
858
|
+
continue
|
|
859
|
+
seen.add(message)
|
|
860
|
+
self.errors.append(ModelSyntaxError(message=message))
|