cognite-neat 0.125.0__py3-none-any.whl → 0.126.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_data_model/exporters/__init__.py +0 -0
- cognite/neat/_data_model/exporters/_base.py +17 -0
- cognite/neat/_data_model/exporters/_table_exporter.py +35 -0
- cognite/neat/_data_model/importers/_table_importer/importer.py +31 -4
- cognite/neat/_data_model/importers/_table_importer/reader.py +804 -0
- cognite/neat/_data_model/models/dms/__init__.py +2 -0
- cognite/neat/_utils/useful_types.py +3 -0
- cognite/neat/_version.py +1 -1
- cognite/neat/v0/core/_data_model/importers/_spreadsheet2data_model.py +2 -2
- cognite/neat/v0/core/_data_model/models/entities/_single_value.py +1 -1
- cognite/neat/v0/core/_data_model/models/physical/_unverified.py +1 -1
- cognite/neat/v0/core/_data_model/models/physical/_validation.py +2 -2
- cognite/neat/v0/core/_data_model/models/physical/_verified.py +3 -3
- cognite/neat/v0/core/_data_model/transformers/_converters.py +1 -1
- cognite/neat/v0/core/_instances/queries/_select.py +13 -29
- cognite/neat/v0/core/_store/_instance.py +2 -2
- {cognite_neat-0.125.0.dist-info → cognite_neat-0.126.0.dist-info}/METADATA +1 -1
- {cognite_neat-0.125.0.dist-info → cognite_neat-0.126.0.dist-info}/RECORD +20 -16
- {cognite_neat-0.125.0.dist-info → cognite_neat-0.126.0.dist-info}/WHEEL +0 -0
- {cognite_neat-0.125.0.dist-info → cognite_neat-0.126.0.dist-info}/licenses/LICENSE +0 -0
|
File without changes
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Generic, TypeVar
|
|
3
|
+
|
|
4
|
+
from cognite.neat._data_model.models.dms import RequestSchema
|
|
5
|
+
|
|
6
|
+
T_Export = TypeVar("T_Export")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DMSExporter(ABC, Generic[T_Export]):
|
|
10
|
+
"""This is the base class for all DMS exporters."""
|
|
11
|
+
|
|
12
|
+
NEW_LINE = "\n"
|
|
13
|
+
ENCODING = "utf-8"
|
|
14
|
+
|
|
15
|
+
@abstractmethod
|
|
16
|
+
def export(self, data_model: RequestSchema) -> T_Export:
|
|
17
|
+
raise NotImplementedError()
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from cognite.neat._data_model.models.dms import RequestSchema
|
|
4
|
+
from cognite.neat._utils.useful_types import DataModelTableType
|
|
5
|
+
|
|
6
|
+
from ._base import DMSExporter
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DMSTableExporter(DMSExporter[DataModelTableType]):
|
|
10
|
+
"""Exports DMS to a table structure.
|
|
11
|
+
|
|
12
|
+
The tables are expected to be a dictionary where the keys are the table names and the values
|
|
13
|
+
are lists of dictionaries representing the rows in the table.
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
def export(self, data_model: RequestSchema) -> DataModelTableType:
|
|
17
|
+
raise NotImplementedError()
|
|
18
|
+
|
|
19
|
+
def as_excel(self, data_model: RequestSchema, file_path: Path) -> None:
|
|
20
|
+
"""Exports the data model as an Excel file.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
data_model (RequestSchema): The data model to export.
|
|
24
|
+
file_path (Path): The path to the Excel file to create.
|
|
25
|
+
"""
|
|
26
|
+
raise NotImplementedError()
|
|
27
|
+
|
|
28
|
+
def as_yaml(self, data_model: RequestSchema, file_path: Path) -> None:
|
|
29
|
+
"""Exports the data model as a YAML file.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
data_model (RequestSchema): The data model to export.
|
|
33
|
+
file_path (Path): The path to the YAML file to create.
|
|
34
|
+
"""
|
|
35
|
+
raise NotImplementedError()
|
|
@@ -9,10 +9,13 @@ from cognite.neat._data_model.models.dms import (
|
|
|
9
9
|
)
|
|
10
10
|
from cognite.neat._exceptions import DataModelImportError
|
|
11
11
|
from cognite.neat._issues import ModelSyntaxError
|
|
12
|
-
from cognite.neat._utils.
|
|
12
|
+
from cognite.neat._utils.text import humanize_collection
|
|
13
|
+
from cognite.neat._utils.useful_types import DataModelTableType
|
|
13
14
|
from cognite.neat._utils.validation import as_json_path, humanize_validation_error
|
|
14
15
|
|
|
15
|
-
from .data_classes import TableDMS
|
|
16
|
+
from .data_classes import MetadataValue, TableDMS
|
|
17
|
+
from .reader import DMSTableReader
|
|
18
|
+
from .source import TableSource
|
|
16
19
|
|
|
17
20
|
|
|
18
21
|
class DMSTableImporter(DMSImporter):
|
|
@@ -30,11 +33,16 @@ class DMSTableImporter(DMSImporter):
|
|
|
30
33
|
f"Missing required column: {sheet!r}": f"Missing required sheet: {sheet!r}" for sheet in REQUIRED_SHEETS
|
|
31
34
|
}
|
|
32
35
|
|
|
33
|
-
def __init__(self, tables:
|
|
36
|
+
def __init__(self, tables: DataModelTableType, source: TableSource | None = None) -> None:
|
|
34
37
|
self._table = tables
|
|
38
|
+
self._source = source or TableSource("Unknown")
|
|
35
39
|
|
|
36
40
|
def to_data_model(self) -> RequestSchema:
|
|
37
|
-
|
|
41
|
+
tables = self._read_tables()
|
|
42
|
+
|
|
43
|
+
space, version = self._read_defaults(tables.metadata)
|
|
44
|
+
reader = DMSTableReader(space, version, self._source)
|
|
45
|
+
return reader.read_tables(tables)
|
|
38
46
|
|
|
39
47
|
def _read_tables(self) -> TableDMS:
|
|
40
48
|
try:
|
|
@@ -74,3 +82,22 @@ class DMSTableImporter(DMSImporter):
|
|
|
74
82
|
return f"{loc[0]} sheet row {loc[1] + 1} column {loc[2]!r}"
|
|
75
83
|
# This should be unreachable as the TableDMS model only has 2 levels.
|
|
76
84
|
return as_json_path(loc)
|
|
85
|
+
|
|
86
|
+
@staticmethod
|
|
87
|
+
def _read_defaults(metadata: list[MetadataValue]) -> tuple[str, str]:
|
|
88
|
+
"""Reads the space and version from the metadata table."""
|
|
89
|
+
default_space: str | None = None
|
|
90
|
+
default_version: str | None = None
|
|
91
|
+
missing = {"space", "version"}
|
|
92
|
+
for meta in metadata:
|
|
93
|
+
if meta.key == "space":
|
|
94
|
+
default_space = str(meta.value)
|
|
95
|
+
missing.remove("space")
|
|
96
|
+
elif meta.key == "version":
|
|
97
|
+
default_version = str(meta.value)
|
|
98
|
+
missing.remove("version")
|
|
99
|
+
if missing:
|
|
100
|
+
error = ModelSyntaxError(message=f"In Metadata missing required values: {humanize_collection(missing)}")
|
|
101
|
+
# If space or version is missing, we cannot continue parsing the model as these are used as defaults.
|
|
102
|
+
raise DataModelImportError([error]) from None
|
|
103
|
+
return str(default_space), str(default_version)
|
|
@@ -0,0 +1,804 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from collections import defaultdict
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from typing import Any, Literal, TypeVar, cast, overload
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, TypeAdapter, ValidationError
|
|
7
|
+
|
|
8
|
+
from cognite.neat._data_model.models.dms import (
|
|
9
|
+
Constraint,
|
|
10
|
+
ConstraintAdapter,
|
|
11
|
+
ContainerPropertyDefinition,
|
|
12
|
+
ContainerRequest,
|
|
13
|
+
DataModelRequest,
|
|
14
|
+
Index,
|
|
15
|
+
IndexAdapter,
|
|
16
|
+
NodeReference,
|
|
17
|
+
RequestSchema,
|
|
18
|
+
SpaceRequest,
|
|
19
|
+
UniquenessConstraintDefinition,
|
|
20
|
+
ViewRequest,
|
|
21
|
+
ViewRequestProperty,
|
|
22
|
+
ViewRequestPropertyAdapter,
|
|
23
|
+
)
|
|
24
|
+
from cognite.neat._data_model.models.entities import ParsedEntity, parse_entity
|
|
25
|
+
from cognite.neat._exceptions import DataModelImportError
|
|
26
|
+
from cognite.neat._issues import ModelSyntaxError
|
|
27
|
+
from cognite.neat._utils.text import humanize_collection
|
|
28
|
+
from cognite.neat._utils.validation import humanize_validation_error
|
|
29
|
+
|
|
30
|
+
from .data_classes import DMSContainer, DMSEnum, DMSNode, DMSProperty, DMSView, TableDMS
|
|
31
|
+
from .source import TableSource
|
|
32
|
+
|
|
33
|
+
T_BaseModel = TypeVar("T_BaseModel", bound=BaseModel)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@dataclass
|
|
37
|
+
class ReadViewProperty:
|
|
38
|
+
prop_id: str
|
|
39
|
+
row_no: int
|
|
40
|
+
view_property: ViewRequestProperty
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class ReadContainerProperty:
|
|
45
|
+
prop_id: str
|
|
46
|
+
row_no: int
|
|
47
|
+
container_property: ContainerPropertyDefinition
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class ReadIndex:
|
|
52
|
+
prop_id: str
|
|
53
|
+
order: int | None
|
|
54
|
+
row_no: int
|
|
55
|
+
index_id: str
|
|
56
|
+
index: Index
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass
|
|
60
|
+
class ReadConstraint:
|
|
61
|
+
prop_id: str
|
|
62
|
+
order: int | None
|
|
63
|
+
row_no: int
|
|
64
|
+
constraint_id: str
|
|
65
|
+
constraint: Constraint
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
class ReadProperties:
|
|
70
|
+
"""Read properties from the properties table.
|
|
71
|
+
|
|
72
|
+
Attributes:
|
|
73
|
+
container: A mapping from container entity to a mapping of property identifier to container property definition.
|
|
74
|
+
view: A mapping from view entity to a mapping of property identifier to view property definition.
|
|
75
|
+
indices: A mapping from (container entity, index identifier) to a list of read indices
|
|
76
|
+
constraints: A mapping from (container entity, constraint identifier) to a list of read constraints
|
|
77
|
+
"""
|
|
78
|
+
|
|
79
|
+
container: dict[tuple[ParsedEntity, str], list[ReadContainerProperty]] = field(
|
|
80
|
+
default_factory=lambda: defaultdict(list)
|
|
81
|
+
)
|
|
82
|
+
view: dict[tuple[ParsedEntity, str], list[ReadViewProperty]] = field(default_factory=lambda: defaultdict(list))
|
|
83
|
+
indices: dict[tuple[ParsedEntity, str], list[ReadIndex]] = field(default_factory=lambda: defaultdict(list))
|
|
84
|
+
constraints: dict[tuple[ParsedEntity, str], list[ReadConstraint]] = field(default_factory=lambda: defaultdict(list))
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@dataclass
|
|
88
|
+
class ProcessedProperties:
|
|
89
|
+
container: dict[ParsedEntity, dict[str, ContainerPropertyDefinition]] = field(
|
|
90
|
+
default_factory=lambda: defaultdict(dict)
|
|
91
|
+
)
|
|
92
|
+
view: dict[ParsedEntity, dict[str, ViewRequestProperty]] = field(default_factory=lambda: defaultdict(dict))
|
|
93
|
+
indices: dict[ParsedEntity, dict[str, Index]] = field(default_factory=lambda: defaultdict(dict))
|
|
94
|
+
constraints: dict[ParsedEntity, dict[str, Constraint]] = field(default_factory=lambda: defaultdict(dict))
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DMSTableReader:
|
|
98
|
+
"""Reads a TableDMS object and converts it to a RequestSchema.
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
default_space (str): The default space to use when no space is given in an entity.
|
|
103
|
+
default_version (str): The default version to use when no version is given in an entity.
|
|
104
|
+
source (TableSource): The source of the table data, used for error reporting.
|
|
105
|
+
|
|
106
|
+
Raises:
|
|
107
|
+
DataModelImportError: If there are any errors in the data model.
|
|
108
|
+
|
|
109
|
+
Attributes:
|
|
110
|
+
errors (list[ModelSyntaxError]): A list of errors encountered during parsing.
|
|
111
|
+
|
|
112
|
+
Class Attributes:
|
|
113
|
+
Sheets: This is used to create error messages. It ensures that the column names matches
|
|
114
|
+
the names in the table, even if they are renamed in the code.
|
|
115
|
+
PropertyColumns: This is used to create error messages for the properties table.
|
|
116
|
+
It ensures that the column names matches the names in the table, even if they are renamed in the code.
|
|
117
|
+
ContainerColumns: This is used to create error messages for the containers table.
|
|
118
|
+
It ensures that the column names matches the names in the table, even if they are renamed in the code.
|
|
119
|
+
ViewColumns: This is used to create error messages for the views table.
|
|
120
|
+
It ensures that the column names matches the names in the table, even if they are renamed in the code.
|
|
121
|
+
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
# The following classes are used when creating error messages. They ensure that the column names
|
|
125
|
+
# matches the names in the table, even if they are renamed in the code.
|
|
126
|
+
# Note that this is not a complete list of all columns, only those that are used in error messages.
|
|
127
|
+
class Sheets:
|
|
128
|
+
metadata = cast(str, TableDMS.model_fields["metadata"].validation_alias)
|
|
129
|
+
properties = cast(str, TableDMS.model_fields["properties"].validation_alias)
|
|
130
|
+
containers = cast(str, TableDMS.model_fields["containers"].validation_alias)
|
|
131
|
+
views = cast(str, TableDMS.model_fields["views"].validation_alias)
|
|
132
|
+
nodes = cast(str, TableDMS.model_fields["nodes"].validation_alias)
|
|
133
|
+
|
|
134
|
+
class PropertyColumns:
|
|
135
|
+
view = cast(str, DMSProperty.model_fields["view"].validation_alias)
|
|
136
|
+
view_property = cast(str, DMSProperty.model_fields["view_property"].validation_alias)
|
|
137
|
+
connection = cast(str, DMSProperty.model_fields["connection"].validation_alias)
|
|
138
|
+
value_type = cast(str, DMSProperty.model_fields["value_type"].validation_alias)
|
|
139
|
+
min_count = cast(str, DMSProperty.model_fields["min_count"].validation_alias)
|
|
140
|
+
max_count = cast(str, DMSProperty.model_fields["max_count"].validation_alias)
|
|
141
|
+
default = cast(str, DMSProperty.model_fields["default"].validation_alias)
|
|
142
|
+
auto_increment = cast(str, DMSProperty.model_fields["auto_increment"].validation_alias)
|
|
143
|
+
container = cast(str, DMSProperty.model_fields["container"].validation_alias)
|
|
144
|
+
container_property = cast(str, DMSProperty.model_fields["container_property"].validation_alias)
|
|
145
|
+
container_property_name = cast(str, DMSProperty.model_fields["container_property_name"].validation_alias)
|
|
146
|
+
container_property_description = cast(
|
|
147
|
+
str, DMSProperty.model_fields["container_property_description"].validation_alias
|
|
148
|
+
)
|
|
149
|
+
index = cast(str, DMSProperty.model_fields["index"].validation_alias)
|
|
150
|
+
constraint = cast(str, DMSProperty.model_fields["constraint"].validation_alias)
|
|
151
|
+
|
|
152
|
+
class ContainerColumns:
|
|
153
|
+
container = cast(str, DMSContainer.model_fields["container"].validation_alias)
|
|
154
|
+
|
|
155
|
+
class ViewColumns:
|
|
156
|
+
view = cast(str, DMSView.model_fields["view"].validation_alias)
|
|
157
|
+
filter = cast(str, DMSView.model_fields["filter"].validation_alias)
|
|
158
|
+
|
|
159
|
+
def __init__(self, default_space: str, default_version: str, source: TableSource) -> None:
|
|
160
|
+
self.default_space = default_space
|
|
161
|
+
self.default_version = default_version
|
|
162
|
+
self.source = source
|
|
163
|
+
self.errors: list[ModelSyntaxError] = []
|
|
164
|
+
|
|
165
|
+
def read_tables(self, tables: TableDMS) -> RequestSchema:
|
|
166
|
+
space_request = self.read_space(self.default_space)
|
|
167
|
+
node_types = self.read_nodes(tables.nodes)
|
|
168
|
+
enum_collections = self.read_enum_collections(tables.enum)
|
|
169
|
+
read = self.read_properties(tables.properties, enum_collections)
|
|
170
|
+
processed = self.process_properties(read)
|
|
171
|
+
containers = self.read_containers(tables.containers, processed)
|
|
172
|
+
views, valid_view_entities = self.read_views(tables.views, processed.view)
|
|
173
|
+
data_model = self.read_data_model(tables, valid_view_entities)
|
|
174
|
+
|
|
175
|
+
if self.errors:
|
|
176
|
+
raise DataModelImportError(self.errors) from None
|
|
177
|
+
return RequestSchema(
|
|
178
|
+
dataModel=data_model, views=views, containers=containers, spaces=[space_request], nodeTypes=node_types
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
def read_space(self, space: str) -> SpaceRequest:
|
|
182
|
+
space_request = self._validate_obj(SpaceRequest, {"space": space}, (self.Sheets.metadata,), field_name="value")
|
|
183
|
+
if space_request is None:
|
|
184
|
+
# If space is invalid, we stop parsing to avoid raising an error for every place the space is used.
|
|
185
|
+
raise DataModelImportError(self.errors) from None
|
|
186
|
+
return space_request
|
|
187
|
+
|
|
188
|
+
def read_nodes(self, nodes: list[DMSNode]) -> list[NodeReference]:
|
|
189
|
+
node_refs: list[NodeReference] = []
|
|
190
|
+
for row_no, row in enumerate(nodes):
|
|
191
|
+
data = self._create_node_ref(row.node)
|
|
192
|
+
instantiated = self._validate_obj(NodeReference, data, (self.Sheets.nodes, row_no))
|
|
193
|
+
if instantiated is not None:
|
|
194
|
+
node_refs.append(instantiated)
|
|
195
|
+
return node_refs
|
|
196
|
+
|
|
197
|
+
@staticmethod
|
|
198
|
+
def read_enum_collections(enum_rows: list[DMSEnum]) -> dict[str, dict[str, Any]]:
|
|
199
|
+
enum_collections: dict[str, dict[str, Any]] = defaultdict(dict)
|
|
200
|
+
for row in enum_rows:
|
|
201
|
+
enum_collections[row.collection][row.value] = {
|
|
202
|
+
"name": row.name,
|
|
203
|
+
"description": row.description,
|
|
204
|
+
}
|
|
205
|
+
return enum_collections
|
|
206
|
+
|
|
207
|
+
def read_properties(
|
|
208
|
+
self, properties: list[DMSProperty], enum_collections: dict[str, dict[str, Any]]
|
|
209
|
+
) -> ReadProperties:
|
|
210
|
+
read = ReadProperties()
|
|
211
|
+
for row_no, prop in enumerate(properties):
|
|
212
|
+
self._process_view_property(prop, read, row_no)
|
|
213
|
+
if prop.container is None or prop.container_property is None:
|
|
214
|
+
# This is when the property is an edge or reverse direct relation property.
|
|
215
|
+
continue
|
|
216
|
+
self._process_container_property(prop, read, enum_collections, row_no)
|
|
217
|
+
self._process_index(prop, read, row_no)
|
|
218
|
+
self._process_constraint(prop, read, row_no)
|
|
219
|
+
return read
|
|
220
|
+
|
|
221
|
+
def process_properties(self, read: ReadProperties) -> ProcessedProperties:
|
|
222
|
+
return ProcessedProperties(
|
|
223
|
+
container=self.create_container_properties(read),
|
|
224
|
+
view=self.create_view_properties(read),
|
|
225
|
+
indices=self.create_indices(read),
|
|
226
|
+
constraints=self.create_constraints(read),
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
def create_container_properties(
|
|
230
|
+
self, read: ReadProperties
|
|
231
|
+
) -> dict[ParsedEntity, dict[str, ContainerPropertyDefinition]]:
|
|
232
|
+
container_props: dict[ParsedEntity, dict[str, ContainerPropertyDefinition]] = defaultdict(dict)
|
|
233
|
+
for (container_entity, prop_id), prop_list in read.container.items():
|
|
234
|
+
if len(prop_list) == 0:
|
|
235
|
+
# Should not happen
|
|
236
|
+
continue
|
|
237
|
+
container_props[container_entity][prop_id] = prop_list[0].container_property
|
|
238
|
+
if len(prop_list) > 1 and self._are_definitions_different(prop_list):
|
|
239
|
+
# If multiple view properties are mapping to the same container property,
|
|
240
|
+
# the container property definitions must be the same.
|
|
241
|
+
rows_str = humanize_collection(
|
|
242
|
+
[self.source.adjust_row_number(self.Sheets.properties, p.row_no) for p in prop_list]
|
|
243
|
+
)
|
|
244
|
+
container_columns_str = humanize_collection(
|
|
245
|
+
[
|
|
246
|
+
self.PropertyColumns.connection,
|
|
247
|
+
self.PropertyColumns.value_type,
|
|
248
|
+
self.PropertyColumns.min_count,
|
|
249
|
+
self.PropertyColumns.max_count,
|
|
250
|
+
self.PropertyColumns.default,
|
|
251
|
+
self.PropertyColumns.auto_increment,
|
|
252
|
+
self.PropertyColumns.container_property_name,
|
|
253
|
+
self.PropertyColumns.container_property_description,
|
|
254
|
+
self.PropertyColumns.index,
|
|
255
|
+
self.PropertyColumns.constraint,
|
|
256
|
+
]
|
|
257
|
+
)
|
|
258
|
+
self.errors.append(
|
|
259
|
+
ModelSyntaxError(
|
|
260
|
+
message=(
|
|
261
|
+
f"In {self.source.location((self.Sheets.properties,))} "
|
|
262
|
+
f"when the column {self.PropertyColumns.container!r} and "
|
|
263
|
+
f"{self.PropertyColumns.container_property!r} are the same, "
|
|
264
|
+
f"all the container columns ({container_columns_str}) must be the same. "
|
|
265
|
+
f"Inconsistent definitions for container '{container_entity!s} "
|
|
266
|
+
f"and {prop_id!r}' found in rows {rows_str}."
|
|
267
|
+
)
|
|
268
|
+
)
|
|
269
|
+
)
|
|
270
|
+
return container_props
|
|
271
|
+
|
|
272
|
+
def _are_definitions_different(self, prop_list: list[ReadContainerProperty]) -> bool:
|
|
273
|
+
if len(prop_list) < 2:
|
|
274
|
+
return False
|
|
275
|
+
first_def = prop_list[0].container_property
|
|
276
|
+
for prop in prop_list[1:]:
|
|
277
|
+
if first_def != prop.container_property:
|
|
278
|
+
return True
|
|
279
|
+
return False
|
|
280
|
+
|
|
281
|
+
def create_view_properties(self, read: ReadProperties) -> dict[ParsedEntity, dict[str, ViewRequestProperty]]:
|
|
282
|
+
view_props: dict[ParsedEntity, dict[str, ViewRequestProperty]] = defaultdict(dict)
|
|
283
|
+
for (view_entity, prop_id), prop_list in read.view.items():
|
|
284
|
+
if len(prop_list) == 0:
|
|
285
|
+
# Should not happen
|
|
286
|
+
continue
|
|
287
|
+
view_props[view_entity][prop_id] = prop_list[0].view_property
|
|
288
|
+
if len(prop_list) > 1:
|
|
289
|
+
# Safeguard against duplicated rows for view properties.
|
|
290
|
+
rows_str = humanize_collection(
|
|
291
|
+
[self.source.adjust_row_number(self.Sheets.properties, p.row_no) for p in prop_list]
|
|
292
|
+
)
|
|
293
|
+
self.errors.append(
|
|
294
|
+
ModelSyntaxError(
|
|
295
|
+
message=(
|
|
296
|
+
f"In {self.source.location((self.Sheets.properties,))} the combination of columns "
|
|
297
|
+
f"{self.PropertyColumns.view!r} and {self.PropertyColumns.view_property!r} must be unique. "
|
|
298
|
+
f"Duplicated entries for view '{view_entity!s}' and "
|
|
299
|
+
f"property '{prop_id!s}' found in rows {rows_str}."
|
|
300
|
+
)
|
|
301
|
+
)
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
return view_props
|
|
305
|
+
|
|
306
|
+
def create_indices(self, read: ReadProperties) -> dict[ParsedEntity, dict[str, Index]]:
|
|
307
|
+
indices: dict[ParsedEntity, dict[str, Index]] = defaultdict(dict)
|
|
308
|
+
for (container_entity, index_id), index_list in read.indices.items():
|
|
309
|
+
if len(index_list) == 0:
|
|
310
|
+
continue
|
|
311
|
+
index = index_list[0].index
|
|
312
|
+
if len(index_list) == 1:
|
|
313
|
+
indices[container_entity][index_id] = index
|
|
314
|
+
continue
|
|
315
|
+
if missing_order := [idx for idx in index_list if idx.order is None]:
|
|
316
|
+
row_str = humanize_collection(
|
|
317
|
+
[self.source.adjust_row_number(self.Sheets.properties, idx.row_no) for idx in missing_order]
|
|
318
|
+
)
|
|
319
|
+
self.errors.append(
|
|
320
|
+
ModelSyntaxError(
|
|
321
|
+
message=(
|
|
322
|
+
f"In table {self.Sheets.properties!r} column {self.PropertyColumns.index!r}: "
|
|
323
|
+
f"the index {index_id!r} on container {container_entity!s} is defined on multiple "
|
|
324
|
+
f"properties. This requires the 'order' attribute to be set. It is missing in rows "
|
|
325
|
+
f"{row_str}."
|
|
326
|
+
)
|
|
327
|
+
)
|
|
328
|
+
)
|
|
329
|
+
continue
|
|
330
|
+
index.properties = [idx.prop_id for idx in sorted(index_list, key=lambda x: x.order or 999)]
|
|
331
|
+
indices[container_entity][index_id] = index
|
|
332
|
+
return indices
|
|
333
|
+
|
|
334
|
+
def create_constraints(self, read: ReadProperties) -> dict[ParsedEntity, dict[str, Constraint]]:
|
|
335
|
+
constraints: dict[ParsedEntity, dict[str, Constraint]] = defaultdict(dict)
|
|
336
|
+
for (container_entity, constraint_id), constraint_list in read.constraints.items():
|
|
337
|
+
if len(constraint_list) == 0:
|
|
338
|
+
continue
|
|
339
|
+
constraint = constraint_list[0].constraint
|
|
340
|
+
if len(constraint_list) == 1 or not isinstance(constraint, UniquenessConstraintDefinition):
|
|
341
|
+
constraints[container_entity][constraint_id] = constraint
|
|
342
|
+
continue
|
|
343
|
+
if missing_order := [c for c in constraint_list if c.order is None]:
|
|
344
|
+
row_str = humanize_collection(
|
|
345
|
+
[self.source.adjust_row_number(self.Sheets.properties, c.row_no) for c in missing_order]
|
|
346
|
+
)
|
|
347
|
+
self.errors.append(
|
|
348
|
+
ModelSyntaxError(
|
|
349
|
+
message=(
|
|
350
|
+
f"In table {self.Sheets.properties!r} column {self.PropertyColumns.constraint!r}: "
|
|
351
|
+
f"the uniqueness constraint {constraint_id!r} on container {container_entity!s} is defined "
|
|
352
|
+
f"on multiple properties. This requires the 'order' attribute to be set. It is missing in "
|
|
353
|
+
f"rows {row_str}."
|
|
354
|
+
)
|
|
355
|
+
)
|
|
356
|
+
)
|
|
357
|
+
continue
|
|
358
|
+
constraint.properties = [c.prop_id for c in sorted(constraint_list, key=lambda x: x.order or 999)]
|
|
359
|
+
constraints[container_entity][constraint_id] = constraint
|
|
360
|
+
return constraints
|
|
361
|
+
|
|
362
|
+
def _process_view_property(self, prop: DMSProperty, read: ReadProperties, row_no: int) -> None:
|
|
363
|
+
loc = (self.Sheets.properties, row_no)
|
|
364
|
+
data = self.read_view_property(prop, loc)
|
|
365
|
+
view_prop = self._validate_adapter(ViewRequestPropertyAdapter, data, loc)
|
|
366
|
+
if view_prop is not None:
|
|
367
|
+
read.view[(prop.view, prop.view_property)].append(
|
|
368
|
+
# MyPy has a very strange complaint here. It complains that given type is not expected type,
|
|
369
|
+
# even though they are exactly the same.
|
|
370
|
+
ReadViewProperty(prop.container_property, row_no, view_prop) # type: ignore[arg-type]
|
|
371
|
+
)
|
|
372
|
+
return None
|
|
373
|
+
|
|
374
|
+
def _process_container_property(
|
|
375
|
+
self, prop: DMSProperty, read: ReadProperties, enum_collections: dict[str, dict[str, Any]], row_no: int
|
|
376
|
+
) -> None:
|
|
377
|
+
loc = (self.Sheets.properties, row_no)
|
|
378
|
+
data = self.read_container_property(prop, enum_collections, loc=loc)
|
|
379
|
+
container_prop = self._validate_obj(ContainerPropertyDefinition, data, loc)
|
|
380
|
+
if container_prop is not None and prop.container and prop.container_property:
|
|
381
|
+
read.container[(prop.container, prop.container_property)].append(
|
|
382
|
+
ReadContainerProperty(prop.container_property, row_no, container_prop)
|
|
383
|
+
)
|
|
384
|
+
return None
|
|
385
|
+
|
|
386
|
+
def _process_index(self, prop: DMSProperty, read: ReadProperties, row_no: int) -> None:
|
|
387
|
+
if prop.index is None or prop.container_property is None or prop.container is None:
|
|
388
|
+
return
|
|
389
|
+
|
|
390
|
+
loc = (self.Sheets.properties, row_no, self.PropertyColumns.index)
|
|
391
|
+
for index in prop.index:
|
|
392
|
+
data = self.read_index(index, prop.container_property)
|
|
393
|
+
created = self._validate_adapter(IndexAdapter, data, loc)
|
|
394
|
+
if created is None:
|
|
395
|
+
continue
|
|
396
|
+
order = self._read_order(index.properties, loc)
|
|
397
|
+
read.indices[(prop.container, index.suffix)].append(
|
|
398
|
+
ReadIndex(
|
|
399
|
+
prop_id=prop.container_property, order=order, row_no=row_no, index_id=index.suffix, index=created
|
|
400
|
+
)
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
def _read_order(self, properties: dict[str, Any], loc: tuple[str | int, ...]) -> int | None:
|
|
404
|
+
if "order" not in properties:
|
|
405
|
+
return None
|
|
406
|
+
try:
|
|
407
|
+
return int(properties["order"])
|
|
408
|
+
except ValueError:
|
|
409
|
+
self.errors.append(
|
|
410
|
+
ModelSyntaxError(
|
|
411
|
+
message=f"In {self.source.location(loc)} invalid order value '{properties['order']}'. "
|
|
412
|
+
"Must be an integer."
|
|
413
|
+
)
|
|
414
|
+
)
|
|
415
|
+
return None
|
|
416
|
+
|
|
417
|
+
@staticmethod
|
|
418
|
+
def read_index(index: ParsedEntity, prop_id: str) -> dict[str, Any]:
|
|
419
|
+
return {
|
|
420
|
+
"indexType": index.prefix,
|
|
421
|
+
"properties": [prop_id],
|
|
422
|
+
**index.properties,
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
def _process_constraint(self, prop: DMSProperty, read: ReadProperties, row_no: int) -> None:
|
|
426
|
+
if prop.constraint is None or prop.container_property is None or prop.container is None:
|
|
427
|
+
return
|
|
428
|
+
loc = (self.Sheets.properties, row_no, self.PropertyColumns.constraint)
|
|
429
|
+
for constraint in prop.constraint:
|
|
430
|
+
data = self.read_constraint(constraint, prop.container_property)
|
|
431
|
+
created = self._validate_adapter(ConstraintAdapter, data, loc)
|
|
432
|
+
if created is None:
|
|
433
|
+
continue
|
|
434
|
+
order = self._read_order(constraint.properties, loc)
|
|
435
|
+
read.constraints[(prop.container, constraint.suffix)].append(
|
|
436
|
+
ReadConstraint(
|
|
437
|
+
prop_id=prop.container_property,
|
|
438
|
+
order=order,
|
|
439
|
+
constraint_id=constraint.suffix,
|
|
440
|
+
row_no=row_no,
|
|
441
|
+
constraint=created,
|
|
442
|
+
)
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
@staticmethod
|
|
446
|
+
def read_constraint(constraint: ParsedEntity, prop_id: str) -> dict[str, Any]:
|
|
447
|
+
return {"constraintType": constraint.prefix, "properties": [prop_id], **constraint.properties}
|
|
448
|
+
|
|
449
|
+
def read_view_property(self, prop: DMSProperty, loc: tuple[str | int, ...]) -> dict[str, Any]:
|
|
450
|
+
"""Reads a single view property from a given row in the properties table.
|
|
451
|
+
|
|
452
|
+
The type of property (core, edge, reverse direct relation) is determined based on the connection column
|
|
453
|
+
as follows:
|
|
454
|
+
1. If the connection is empty or 'direct' it is a core property.
|
|
455
|
+
2. If the connection is 'edge' it is an edge property.
|
|
456
|
+
3. If the connection is 'reverse' it is a reverse direct relation property
|
|
457
|
+
4. Otherwise, it is an error.
|
|
458
|
+
|
|
459
|
+
Args:
|
|
460
|
+
prop (DMSProperty): The property row to read.
|
|
461
|
+
loc (tuple[str | int, ...]): The location of the property in the source for error reporting.
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
ViewRequestProperty: The parsed view property.
|
|
465
|
+
"""
|
|
466
|
+
|
|
467
|
+
if prop.connection is None or prop.connection.suffix == "direct":
|
|
468
|
+
return self.read_core_view_property(prop)
|
|
469
|
+
elif prop.connection.suffix == "edge":
|
|
470
|
+
return self.read_edge_view_property(prop, loc)
|
|
471
|
+
elif prop.connection.suffix == "reverse":
|
|
472
|
+
return self.read_reverse_direct_relation_view_property(prop)
|
|
473
|
+
else:
|
|
474
|
+
self.errors.append(
|
|
475
|
+
ModelSyntaxError(
|
|
476
|
+
message=f"In {self.source.location(loc)} invalid connection type '{prop.connection.suffix}'. "
|
|
477
|
+
)
|
|
478
|
+
)
|
|
479
|
+
return {}
|
|
480
|
+
|
|
481
|
+
def read_core_view_property(self, prop: DMSProperty) -> dict[str, Any]:
|
|
482
|
+
return dict(
|
|
483
|
+
connectionType="primary_property",
|
|
484
|
+
name=prop.name,
|
|
485
|
+
description=prop.description,
|
|
486
|
+
container=self._create_container_ref(prop.container),
|
|
487
|
+
containerPropertyIdentifier=prop.container_property,
|
|
488
|
+
source=None if prop.connection is None else self._create_view_ref(prop.value_type),
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
def read_edge_view_property(self, prop: DMSProperty, loc: tuple[str | int, ...]) -> dict[str, Any]:
|
|
492
|
+
if prop.connection is None:
|
|
493
|
+
return {}
|
|
494
|
+
edge_source: dict[str, str | None] | None = None
|
|
495
|
+
if "edgeSource" in prop.connection.properties:
|
|
496
|
+
edge_source = self._create_view_ref_unparsed(
|
|
497
|
+
prop.connection.properties["edgeSource"], (*loc, self.PropertyColumns.connection, "edgeSource")
|
|
498
|
+
)
|
|
499
|
+
return dict(
|
|
500
|
+
connectionType="single_edge_connection" if prop.max_count == 1 else "multi_edge_connection",
|
|
501
|
+
name=prop.name,
|
|
502
|
+
description=prop.description,
|
|
503
|
+
source=self._create_view_ref(prop.value_type),
|
|
504
|
+
type=self._create_node_ref_unparsed(
|
|
505
|
+
prop.connection.properties.get("type"),
|
|
506
|
+
prop.view,
|
|
507
|
+
prop.view_property,
|
|
508
|
+
(*loc, self.PropertyColumns.connection, "type"),
|
|
509
|
+
),
|
|
510
|
+
edgeSource=edge_source,
|
|
511
|
+
direction=prop.connection.properties.get("direction", "outwards"),
|
|
512
|
+
)
|
|
513
|
+
|
|
514
|
+
def read_reverse_direct_relation_view_property(
|
|
515
|
+
self,
|
|
516
|
+
prop: DMSProperty,
|
|
517
|
+
) -> dict[str, Any]:
|
|
518
|
+
if prop.connection is None:
|
|
519
|
+
return {}
|
|
520
|
+
view_ref = self._create_view_ref(prop.value_type)
|
|
521
|
+
return dict(
|
|
522
|
+
connectionType="single_reverse_direct_relation" if prop.max_count == 1 else "multi_reverse_direct_relation",
|
|
523
|
+
name=prop.name,
|
|
524
|
+
description=prop.description,
|
|
525
|
+
source=view_ref,
|
|
526
|
+
through={
|
|
527
|
+
"source": view_ref,
|
|
528
|
+
"identifier": prop.connection.properties.get("property"),
|
|
529
|
+
},
|
|
530
|
+
)
|
|
531
|
+
|
|
532
|
+
def read_container_property(
|
|
533
|
+
self, prop: DMSProperty, enum_collections: dict[str, dict[str, Any]], loc: tuple[str | int, ...]
|
|
534
|
+
) -> dict[str, Any]:
|
|
535
|
+
data_type = self._read_data_type(prop, enum_collections, loc)
|
|
536
|
+
return dict(
|
|
537
|
+
immutable=prop.immutable,
|
|
538
|
+
nullable=prop.min_count == 0 or prop.min_count is None,
|
|
539
|
+
autoIncrement=prop.auto_increment,
|
|
540
|
+
defaultValue=prop.default,
|
|
541
|
+
description=prop.container_property_description,
|
|
542
|
+
name=prop.container_property_name,
|
|
543
|
+
type=data_type,
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
def _read_data_type(
|
|
547
|
+
self, prop: DMSProperty, enum_collections: dict[str, dict[str, Any]], loc: tuple[str | int, ...]
|
|
548
|
+
) -> dict[str, Any]:
|
|
549
|
+
# Implementation to read the container property type from DMSProperty
|
|
550
|
+
is_list = None if prop.max_count is None else prop.max_count > 1
|
|
551
|
+
max_list_size: int | None = None
|
|
552
|
+
if is_list and prop.max_count is not None:
|
|
553
|
+
max_list_size = prop.max_count
|
|
554
|
+
|
|
555
|
+
args: dict[str, Any] = {
|
|
556
|
+
"maxListSize": max_list_size,
|
|
557
|
+
"list": is_list,
|
|
558
|
+
"type": "direct" if prop.connection is not None else prop.value_type.suffix,
|
|
559
|
+
}
|
|
560
|
+
args.update(prop.value_type.properties)
|
|
561
|
+
if "container" in args and prop.connection is not None:
|
|
562
|
+
# Direct relation constraint.
|
|
563
|
+
args["container"] = self._create_container_ref_unparsed(
|
|
564
|
+
prop.connection.properties["container"], (*loc, self.PropertyColumns.connection, "container")
|
|
565
|
+
)
|
|
566
|
+
if args["type"] == "enum" and "collection" in prop.value_type.properties:
|
|
567
|
+
args["values"] = enum_collections.get(prop.value_type.properties["collection"], {})
|
|
568
|
+
return args
|
|
569
|
+
|
|
570
|
+
def read_containers(
|
|
571
|
+
self, containers: list[DMSContainer], properties: ProcessedProperties
|
|
572
|
+
) -> list[ContainerRequest]:
|
|
573
|
+
# Implementation to read containers from DMSContainer list
|
|
574
|
+
containers_requests: list[ContainerRequest] = []
|
|
575
|
+
rows_by_seen: dict[ParsedEntity, list[int]] = defaultdict(list)
|
|
576
|
+
for row_no, container in enumerate(containers):
|
|
577
|
+
container_request = self._validate_obj(
|
|
578
|
+
ContainerRequest,
|
|
579
|
+
dict(
|
|
580
|
+
**self._create_container_ref(container.container),
|
|
581
|
+
usedFor=container.used_for,
|
|
582
|
+
name=container.name,
|
|
583
|
+
description=container.description,
|
|
584
|
+
properties=properties.container[container.container],
|
|
585
|
+
indexes=properties.indices.get(container.container),
|
|
586
|
+
constraints=properties.constraints.get(container.container),
|
|
587
|
+
),
|
|
588
|
+
(self.Sheets.containers, row_no),
|
|
589
|
+
)
|
|
590
|
+
if container_request is None:
|
|
591
|
+
continue
|
|
592
|
+
if container.container in rows_by_seen:
|
|
593
|
+
rows_by_seen[container.container].append(row_no)
|
|
594
|
+
else:
|
|
595
|
+
containers_requests.append(container_request)
|
|
596
|
+
rows_by_seen[container.container] = [row_no]
|
|
597
|
+
for entity, rows in rows_by_seen.items():
|
|
598
|
+
if len(rows) > 1:
|
|
599
|
+
rows_str = humanize_collection([self.source.adjust_row_number(self.Sheets.containers, r) for r in rows])
|
|
600
|
+
self.errors.append(
|
|
601
|
+
ModelSyntaxError(
|
|
602
|
+
message=(
|
|
603
|
+
f"In {self.source.location((self.Sheets.containers,))} the values in "
|
|
604
|
+
f"column {self.ContainerColumns.container!r} must be unique. "
|
|
605
|
+
f"Duplicated entries for container '{entity!s}' found in rows {rows_str}."
|
|
606
|
+
)
|
|
607
|
+
)
|
|
608
|
+
)
|
|
609
|
+
return containers_requests
|
|
610
|
+
|
|
611
|
+
def read_views(
|
|
612
|
+
self,
|
|
613
|
+
views: list[DMSView],
|
|
614
|
+
properties: dict[ParsedEntity, dict[str, ViewRequestProperty]],
|
|
615
|
+
) -> tuple[list[ViewRequest], set[ParsedEntity]]:
|
|
616
|
+
views_requests: list[ViewRequest] = []
|
|
617
|
+
rows_by_seen: dict[ParsedEntity, list[int]] = defaultdict(list)
|
|
618
|
+
for row_no, view in enumerate(views):
|
|
619
|
+
filter_dict: dict[str, Any] | None = None
|
|
620
|
+
if view.filter is not None:
|
|
621
|
+
try:
|
|
622
|
+
filter_dict = json.loads(view.filter)
|
|
623
|
+
except ValueError as e:
|
|
624
|
+
self.errors.append(
|
|
625
|
+
ModelSyntaxError(
|
|
626
|
+
message=(
|
|
627
|
+
f"In {self.source.location((self.Sheets.views, row_no, self.ViewColumns.filter))} "
|
|
628
|
+
f"must be valid json. Got error {e!s}"
|
|
629
|
+
)
|
|
630
|
+
)
|
|
631
|
+
)
|
|
632
|
+
view_request = self._validate_obj(
|
|
633
|
+
ViewRequest,
|
|
634
|
+
dict(
|
|
635
|
+
**self._create_view_ref(view.view),
|
|
636
|
+
name=view.name,
|
|
637
|
+
description=view.description,
|
|
638
|
+
implements=[self._create_view_ref(impl) for impl in view.implements] if view.implements else None,
|
|
639
|
+
filter=filter_dict,
|
|
640
|
+
properties=properties.get(view.view, {}),
|
|
641
|
+
),
|
|
642
|
+
(self.Sheets.views, row_no),
|
|
643
|
+
)
|
|
644
|
+
if view_request is None:
|
|
645
|
+
continue
|
|
646
|
+
if view.view in rows_by_seen:
|
|
647
|
+
rows_by_seen[view.view].append(row_no)
|
|
648
|
+
else:
|
|
649
|
+
views_requests.append(view_request)
|
|
650
|
+
rows_by_seen[view.view] = [row_no]
|
|
651
|
+
for entity, rows in rows_by_seen.items():
|
|
652
|
+
if len(rows) > 1:
|
|
653
|
+
rows_str = humanize_collection([self.source.adjust_row_number(self.Sheets.views, r) for r in rows])
|
|
654
|
+
self.errors.append(
|
|
655
|
+
ModelSyntaxError(
|
|
656
|
+
message=(
|
|
657
|
+
f"In {self.source.location((self.Sheets.views,))} the values in "
|
|
658
|
+
f"column {self.ViewColumns.view!r} must be unique. "
|
|
659
|
+
f"Duplicated entries for view '{entity!s}' found in rows {rows_str}."
|
|
660
|
+
)
|
|
661
|
+
)
|
|
662
|
+
)
|
|
663
|
+
return views_requests, set(rows_by_seen.keys())
|
|
664
|
+
|
|
665
|
+
def read_data_model(self, tables: TableDMS, valid_view_entities: set[ParsedEntity]) -> DataModelRequest:
|
|
666
|
+
data = {
|
|
667
|
+
**{meta.key: meta.value for meta in tables.metadata},
|
|
668
|
+
"views": [
|
|
669
|
+
self._create_view_ref(view.view)
|
|
670
|
+
for view in tables.views
|
|
671
|
+
if view.in_model is not False and view.view in valid_view_entities
|
|
672
|
+
],
|
|
673
|
+
}
|
|
674
|
+
model = self._validate_obj(DataModelRequest, data, (self.Sheets.metadata,), field_name="value")
|
|
675
|
+
if model is None:
|
|
676
|
+
# This is the last step, so we can raise the error here.
|
|
677
|
+
raise DataModelImportError(self.errors) from None
|
|
678
|
+
return model
|
|
679
|
+
|
|
680
|
+
def _parse_entity(self, entity: str, loc: tuple[str | int, ...]) -> ParsedEntity | None:
|
|
681
|
+
try:
|
|
682
|
+
parsed = parse_entity(entity)
|
|
683
|
+
except ValueError as e:
|
|
684
|
+
self.errors.append(
|
|
685
|
+
ModelSyntaxError(message=f"In {self.source.location(loc)} failed to parse entity '{entity}': {e!s}")
|
|
686
|
+
)
|
|
687
|
+
return None
|
|
688
|
+
return parsed
|
|
689
|
+
|
|
690
|
+
def _create_view_ref_unparsed(self, entity: str, loc: tuple[str | int, ...]) -> dict[str, str | None]:
|
|
691
|
+
parsed = self._parse_entity(entity, loc)
|
|
692
|
+
if parsed is None:
|
|
693
|
+
return dict()
|
|
694
|
+
return self._create_view_ref(parsed)
|
|
695
|
+
|
|
696
|
+
def _create_view_ref(self, entity: ParsedEntity | None) -> dict[str, str | None]:
|
|
697
|
+
if entity is None or entity.suffix == "":
|
|
698
|
+
# If no suffix is given, we cannot create a valid reference.
|
|
699
|
+
return dict()
|
|
700
|
+
space, version = entity.prefix, entity.properties.get("version")
|
|
701
|
+
if space == "":
|
|
702
|
+
space = self.default_space
|
|
703
|
+
# Only if default space is used, we can use default version.
|
|
704
|
+
if version is None:
|
|
705
|
+
version = self.default_version
|
|
706
|
+
return {
|
|
707
|
+
"space": space,
|
|
708
|
+
"externalId": entity.suffix,
|
|
709
|
+
"version": version,
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
def _create_container_ref_unparsed(self, entity: str, loc: tuple[str | int, ...]) -> dict[str, str]:
|
|
713
|
+
parsed = self._parse_entity(entity, loc)
|
|
714
|
+
if parsed is None:
|
|
715
|
+
return dict()
|
|
716
|
+
return self._create_container_ref(parsed)
|
|
717
|
+
|
|
718
|
+
def _create_container_ref(self, entity: ParsedEntity | None) -> dict[str, str]:
|
|
719
|
+
if entity is None or entity.suffix == "":
|
|
720
|
+
# If no suffix is given, we cannot create a valid reference.
|
|
721
|
+
return dict()
|
|
722
|
+
return {
|
|
723
|
+
"space": entity.prefix or self.default_space,
|
|
724
|
+
"externalId": entity.suffix,
|
|
725
|
+
}
|
|
726
|
+
|
|
727
|
+
def _create_node_ref_unparsed(
|
|
728
|
+
self, entity: str | None, view: ParsedEntity, view_prop: str, loc: tuple[str | int, ...]
|
|
729
|
+
) -> dict[str, str | None]:
|
|
730
|
+
if entity is None:
|
|
731
|
+
# Use default
|
|
732
|
+
return self._create_node_ref(None, view, view_prop)
|
|
733
|
+
parsed = self._parse_entity(entity, loc)
|
|
734
|
+
if parsed is None:
|
|
735
|
+
return dict()
|
|
736
|
+
return self._create_node_ref(parsed, view, view_prop)
|
|
737
|
+
|
|
738
|
+
@overload
|
|
739
|
+
def _create_node_ref(
|
|
740
|
+
self, entity: ParsedEntity, *, view: None = None, view_prop: None = None
|
|
741
|
+
) -> dict[str, str | None]: ...
|
|
742
|
+
|
|
743
|
+
@overload
|
|
744
|
+
def _create_node_ref(
|
|
745
|
+
self, entity: ParsedEntity | None, view: ParsedEntity, view_prop: str
|
|
746
|
+
) -> dict[str, str | None]: ...
|
|
747
|
+
|
|
748
|
+
def _create_node_ref(
|
|
749
|
+
self, entity: ParsedEntity | None, view: ParsedEntity | None = None, view_prop: str | None = None
|
|
750
|
+
) -> dict[str, str | None]:
|
|
751
|
+
if entity is None or entity.suffix == "":
|
|
752
|
+
if view is None or view_prop is None:
|
|
753
|
+
return dict()
|
|
754
|
+
# If no suffix is given, we fallback to the view's property
|
|
755
|
+
return {
|
|
756
|
+
"space": view.prefix or self.default_space,
|
|
757
|
+
"externalId": f"{view.suffix}.{view_prop}",
|
|
758
|
+
}
|
|
759
|
+
return {
|
|
760
|
+
"space": entity.prefix or self.default_space,
|
|
761
|
+
"externalId": entity.suffix,
|
|
762
|
+
}
|
|
763
|
+
|
|
764
|
+
def _validate_obj(
|
|
765
|
+
self,
|
|
766
|
+
obj: type[T_BaseModel],
|
|
767
|
+
data: dict,
|
|
768
|
+
parent_loc: tuple[str | int, ...],
|
|
769
|
+
field_name: Literal["field", "column", "value"] = "column",
|
|
770
|
+
) -> T_BaseModel | None:
|
|
771
|
+
try:
|
|
772
|
+
return obj.model_validate(data)
|
|
773
|
+
except ValidationError as e:
|
|
774
|
+
self._add_error_messages(e, parent_loc, field_name=field_name)
|
|
775
|
+
return None
|
|
776
|
+
|
|
777
|
+
def _validate_adapter(
|
|
778
|
+
self, adapter: TypeAdapter[T_BaseModel], data: dict[str, Any], parent_loc: tuple[str | int, ...]
|
|
779
|
+
) -> T_BaseModel | None:
|
|
780
|
+
try:
|
|
781
|
+
return adapter.validate_python(data, strict=True)
|
|
782
|
+
except ValidationError as e:
|
|
783
|
+
self._add_error_messages(e, parent_loc, field_name="column")
|
|
784
|
+
return None
|
|
785
|
+
|
|
786
|
+
def _add_error_messages(
|
|
787
|
+
self,
|
|
788
|
+
error: ValidationError,
|
|
789
|
+
parent_loc: tuple[str | int, ...],
|
|
790
|
+
field_name: Literal["field", "column", "value"] = "column",
|
|
791
|
+
) -> None:
|
|
792
|
+
seen: set[str] = set()
|
|
793
|
+
for message in humanize_validation_error(
|
|
794
|
+
error,
|
|
795
|
+
parent_loc=parent_loc,
|
|
796
|
+
humanize_location=self.source.location,
|
|
797
|
+
field_name=field_name,
|
|
798
|
+
field_renaming=self.source.field_mapping(parent_loc[0]),
|
|
799
|
+
missing_required_descriptor="empty" if field_name == "column" else "missing",
|
|
800
|
+
):
|
|
801
|
+
if message in seen:
|
|
802
|
+
continue
|
|
803
|
+
seen.add(message)
|
|
804
|
+
self.errors.append(ModelSyntaxError(message=message))
|
|
@@ -60,6 +60,7 @@ from ._view_property import (
|
|
|
60
60
|
ViewCorePropertyResponse,
|
|
61
61
|
ViewPropertyDefinition,
|
|
62
62
|
ViewRequestProperty,
|
|
63
|
+
ViewRequestPropertyAdapter,
|
|
63
64
|
ViewResponseProperty,
|
|
64
65
|
)
|
|
65
66
|
from ._views import (
|
|
@@ -135,6 +136,7 @@ __all__ = [
|
|
|
135
136
|
"ViewReference",
|
|
136
137
|
"ViewRequest",
|
|
137
138
|
"ViewRequestProperty",
|
|
139
|
+
"ViewRequestPropertyAdapter",
|
|
138
140
|
"ViewResponse",
|
|
139
141
|
"ViewResponseProperty",
|
|
140
142
|
"WriteableResource",
|
|
@@ -9,3 +9,6 @@ T_ID = TypeVar("T_ID", bound=Hashable)
|
|
|
9
9
|
|
|
10
10
|
# These are the types that openpyxl supports in cells
|
|
11
11
|
CellValueType: TypeAlias = str | int | float | bool | datetime | date | time | timedelta | None
|
|
12
|
+
|
|
13
|
+
# The format expected for excel sheets representing a data model
|
|
14
|
+
DataModelTableType: TypeAlias = dict[str, list[dict[str, CellValueType]]]
|
cognite/neat/_version.py
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
__version__ = "0.
|
|
1
|
+
__version__ = "0.126.0"
|
|
2
2
|
__engine__ = "^2.0.4"
|
|
@@ -377,7 +377,7 @@ def _replace_legacy_constraint_form(sheet: Worksheet) -> None:
|
|
|
377
377
|
constraints = []
|
|
378
378
|
for constraint in SPLIT_ON_COMMA_PATTERN.split(str(cell.value)):
|
|
379
379
|
# latest format, do nothing
|
|
380
|
-
if "
|
|
380
|
+
if "require" in constraint.lower():
|
|
381
381
|
constraints.append(constraint)
|
|
382
382
|
continue
|
|
383
383
|
|
|
@@ -385,7 +385,7 @@ def _replace_legacy_constraint_form(sheet: Worksheet) -> None:
|
|
|
385
385
|
container = ContainerEntity.load(constraint, space="default")
|
|
386
386
|
container_str = container.external_id if container.space == "default" else str(container)
|
|
387
387
|
constraints.append(
|
|
388
|
-
f"requires:{container.space}_{container.external_id}(
|
|
388
|
+
f"requires:{container.space}_{container.external_id}(require={container_str})"
|
|
389
389
|
)
|
|
390
390
|
replaced = True
|
|
391
391
|
except ValidationError:
|
|
@@ -666,7 +666,7 @@ class ContainerConstraintEntity(PhysicalEntity[None]):
|
|
|
666
666
|
type_: ClassVar[EntityTypes] = EntityTypes.container_constraint
|
|
667
667
|
prefix: _UndefinedType | Literal["uniqueness", "requires"] = Undefined # type: ignore[assignment]
|
|
668
668
|
suffix: str
|
|
669
|
-
|
|
669
|
+
require: ContainerEntity | None = None
|
|
670
670
|
|
|
671
671
|
def as_id(self) -> None:
|
|
672
672
|
return None
|
|
@@ -285,7 +285,7 @@ class UnverifiedPhysicalContainer(UnverifiedComponent[PhysicalContainer]):
|
|
|
285
285
|
for constraint_name, constraint_obj in (container.constraints or {}).items():
|
|
286
286
|
if isinstance(constraint_obj, dm.RequiresConstraint):
|
|
287
287
|
constraint = ContainerConstraintEntity(
|
|
288
|
-
prefix="requires", suffix=constraint_name,
|
|
288
|
+
prefix="requires", suffix=constraint_name, require=ContainerEntity.from_id(constraint_obj.require)
|
|
289
289
|
)
|
|
290
290
|
constraints.append(str(constraint))
|
|
291
291
|
|
|
@@ -123,8 +123,8 @@ class PhysicalValidation:
|
|
|
123
123
|
|
|
124
124
|
for container in self._containers or []:
|
|
125
125
|
for constraint in container.constraint or []:
|
|
126
|
-
if constraint.
|
|
127
|
-
imported_containers.add(cast(ContainerEntity, constraint.
|
|
126
|
+
if constraint.require not in existing_containers:
|
|
127
|
+
imported_containers.add(cast(ContainerEntity, constraint.require))
|
|
128
128
|
|
|
129
129
|
if include_views_with_no_properties:
|
|
130
130
|
extra_views = existing_views - view_with_properties
|
|
@@ -419,7 +419,7 @@ class PhysicalContainer(SheetRow):
|
|
|
419
419
|
message = f"Constraint id '{constraint.suffix}' exceeds maximum length of {CONSTRAINT_ID_MAX_LENGTH}."
|
|
420
420
|
raise ValueError(message) from None
|
|
421
421
|
|
|
422
|
-
if constraint.
|
|
422
|
+
if constraint.require is None:
|
|
423
423
|
message = (
|
|
424
424
|
f"Container constraint must have a container set. "
|
|
425
425
|
f"Please set 'requires:{constraint!s}(container=space:external_id)'."
|
|
@@ -435,9 +435,9 @@ class PhysicalContainer(SheetRow):
|
|
|
435
435
|
container_id = self.container.as_id()
|
|
436
436
|
constraints: dict[str, dm.Constraint] = {}
|
|
437
437
|
for constraint in self.constraint or []:
|
|
438
|
-
if constraint.
|
|
438
|
+
if constraint.require is None:
|
|
439
439
|
continue
|
|
440
|
-
requires = dm.RequiresConstraint(constraint.
|
|
440
|
+
requires = dm.RequiresConstraint(constraint.require.as_id())
|
|
441
441
|
constraints[constraint.suffix] = requires
|
|
442
442
|
|
|
443
443
|
return dm.ContainerApply(
|
|
@@ -1650,7 +1650,7 @@ class _ConceptualDataModelConverter:
|
|
|
1650
1650
|
ContainerConstraintEntity(
|
|
1651
1651
|
prefix="requires",
|
|
1652
1652
|
suffix=f"{parent_entity.space}_{parent_entity.external_id}"[:CONSTRAINT_ID_MAX_LENGTH],
|
|
1653
|
-
|
|
1653
|
+
require=parent_entity,
|
|
1654
1654
|
)
|
|
1655
1655
|
)
|
|
1656
1656
|
return constrains
|
|
@@ -451,9 +451,21 @@ class SelectQueries(BaseQuery):
|
|
|
451
451
|
else:
|
|
452
452
|
yield instance_id, str(space)
|
|
453
453
|
|
|
454
|
-
def
|
|
454
|
+
def get_graph_diff(
|
|
455
455
|
self, source_graph: URIRef, target_graph: URIRef
|
|
456
456
|
) -> Iterable[tuple[URIRef, URIRef, URIRef | RdfLiteral]]:
|
|
457
|
+
"""Return triples that exist in the source graph but not in the target graph.
|
|
458
|
+
|
|
459
|
+
This method compares two named graphs within the dataset and identifies all triples
|
|
460
|
+
that are present in the `source_graph` but are missing from the `target_graph`.
|
|
461
|
+
|
|
462
|
+
Args:
|
|
463
|
+
source_graph: URI of the graph to compare from.
|
|
464
|
+
target_graph: URI of the graph to compare against.
|
|
465
|
+
|
|
466
|
+
Returns:
|
|
467
|
+
Iterable of triples (subject, predicate, object)
|
|
468
|
+
"""
|
|
457
469
|
query = f"""
|
|
458
470
|
SELECT ?s ?p ?o
|
|
459
471
|
WHERE {{
|
|
@@ -464,31 +476,3 @@ class SelectQueries(BaseQuery):
|
|
|
464
476
|
}}
|
|
465
477
|
"""
|
|
466
478
|
return cast(Iterable[tuple[URIRef, URIRef, URIRef | RdfLiteral]], self.dataset.query(query))
|
|
467
|
-
|
|
468
|
-
def get_triples_to_delete(
|
|
469
|
-
self, old_graph: URIRef, new_graph: URIRef
|
|
470
|
-
) -> Iterable[tuple[URIRef, URIRef, URIRef | RdfLiteral]]:
|
|
471
|
-
"""Find triples that exist in old graph but not in new graph.
|
|
472
|
-
|
|
473
|
-
Args:
|
|
474
|
-
old_graph: URI of the old named graph
|
|
475
|
-
new_graph: URI of the new named graph
|
|
476
|
-
|
|
477
|
-
Returns:
|
|
478
|
-
List of triples (subject, predicate, object) to delete
|
|
479
|
-
"""
|
|
480
|
-
return self._get_graph_diff(old_graph, new_graph)
|
|
481
|
-
|
|
482
|
-
def get_triples_to_add(
|
|
483
|
-
self, old_graph: URIRef, new_graph: URIRef
|
|
484
|
-
) -> Iterable[tuple[URIRef, URIRef, URIRef | RdfLiteral]]:
|
|
485
|
-
"""Find triples that exist in new graph but not in old graph.
|
|
486
|
-
|
|
487
|
-
Args:
|
|
488
|
-
old_graph: URI of the old named graph
|
|
489
|
-
new_graph: URI of the new named graph
|
|
490
|
-
|
|
491
|
-
Returns:
|
|
492
|
-
List of triples (subject, predicate, object) to add
|
|
493
|
-
"""
|
|
494
|
-
return self._get_graph_diff(new_graph, old_graph)
|
|
@@ -476,10 +476,10 @@ class NeatInstanceStore:
|
|
|
476
476
|
|
|
477
477
|
# Store new diff results
|
|
478
478
|
self._add_triples(
|
|
479
|
-
self.queries.select.
|
|
479
|
+
self.queries.select.get_graph_diff(new_named_graph, current_named_graph),
|
|
480
480
|
named_graph=NAMED_GRAPH_NAMESPACE["DIFF_ADD"],
|
|
481
481
|
)
|
|
482
482
|
self._add_triples(
|
|
483
|
-
self.queries.select.
|
|
483
|
+
self.queries.select.get_graph_diff(current_named_graph, new_named_graph),
|
|
484
484
|
named_graph=NAMED_GRAPH_NAMESPACE["DIFF_DELETE"],
|
|
485
485
|
)
|
|
@@ -1,16 +1,20 @@
|
|
|
1
1
|
cognite/neat/__init__.py,sha256=Lo4DbjDOwnhCYUoAgPp5RG1fDdF7OlnomalTe7n1ydw,211
|
|
2
2
|
cognite/neat/_exceptions.py,sha256=IvDKO8kHk4dCkrmQU6z_svJcPqs2m7QYLosuOuM_iwE,473
|
|
3
3
|
cognite/neat/_issues.py,sha256=uv0fkkWwTKqNmTmHqyoBB3L6yMCh42EZpEkLGmIJYOY,812
|
|
4
|
-
cognite/neat/_version.py,sha256=
|
|
4
|
+
cognite/neat/_version.py,sha256=kvjU7yRcXW905fNDDrk9Rmv1eMH9zdS38_vx_BqoxPA,46
|
|
5
5
|
cognite/neat/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
cognite/neat/_data_model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
7
|
cognite/neat/_data_model/_constants.py,sha256=NGGvWHlQqhkkSBP_AqoofGYjNph3SiZX6QPINlMsy04,107
|
|
8
8
|
cognite/neat/_data_model/_identifiers.py,sha256=a0LcQ_h0NffxSKTCrzCDpYkrlaUTk-D_rfaQUh-BhWc,1921
|
|
9
|
+
cognite/neat/_data_model/exporters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
cognite/neat/_data_model/exporters/_base.py,sha256=sCBa02bW5HU9MHIGcoCHq_xyaQ6jlbtGnk2tor3wmm0,430
|
|
11
|
+
cognite/neat/_data_model/exporters/_table_exporter.py,sha256=_0-GABdkqWKimUByT63H_dXNI2Uxuc1zOvvThP3lpko,1212
|
|
9
12
|
cognite/neat/_data_model/importers/__init__.py,sha256=Ntk6Z8jpPvr7awV4kgs-uMRB2RrA3ufi0O7sSMgcv0o,133
|
|
10
13
|
cognite/neat/_data_model/importers/_base.py,sha256=NRB0FcEBj4GaethU68nRffBfTedBBA866A3zfJNfmiQ,433
|
|
11
14
|
cognite/neat/_data_model/importers/_table_importer/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
15
|
cognite/neat/_data_model/importers/_table_importer/data_classes.py,sha256=c_P4j7cHTyltNRiNzJJkJ-TKM5uoiizyLI6cgvLA3nM,4057
|
|
13
|
-
cognite/neat/_data_model/importers/_table_importer/importer.py,sha256=
|
|
16
|
+
cognite/neat/_data_model/importers/_table_importer/importer.py,sha256=pkE7OBe5qQ1Rs0wEk5DEUuttmCp8zKJ0U_Nu9GP0rPk,4559
|
|
17
|
+
cognite/neat/_data_model/importers/_table_importer/reader.py,sha256=U7hDYAk1bQq4xTj3na7n1G9qllp522hWzojXAGQ9FUw,36876
|
|
14
18
|
cognite/neat/_data_model/importers/_table_importer/source.py,sha256=CU6kHJquKxDgYSMwONALbl6yleF_FoAgGU2SXl_uj0Y,3098
|
|
15
19
|
cognite/neat/_data_model/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
20
|
cognite/neat/_data_model/models/conceptual/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -19,7 +23,7 @@ cognite/neat/_data_model/models/conceptual/_concept.py,sha256=0Pk4W2TJ_Y0Z7oPHpz
|
|
|
19
23
|
cognite/neat/_data_model/models/conceptual/_data_model.py,sha256=mSX0z8i29ufcRUhvC_NPeo2xGidlK3B1n89kngY_SqQ,1695
|
|
20
24
|
cognite/neat/_data_model/models/conceptual/_properties.py,sha256=CpF37vJYBTLT4DH4ZOu2U-JyWtkb_27V8fw52qiaE_k,4007
|
|
21
25
|
cognite/neat/_data_model/models/conceptual/_property.py,sha256=blSZQxX52zaILAtjUkldPzPeysz7wnG-UGSNU5tacI8,4138
|
|
22
|
-
cognite/neat/_data_model/models/dms/__init__.py,sha256=
|
|
26
|
+
cognite/neat/_data_model/models/dms/__init__.py,sha256=du7UBnz1BxvAHFgpHG-aNkZXfz6NXRmuptpCv27LVtk,3895
|
|
23
27
|
cognite/neat/_data_model/models/dms/_base.py,sha256=F49CLKUmtjTaaW0NrQbjqGkpWNgnZMV8m8uomCU_FJU,719
|
|
24
28
|
cognite/neat/_data_model/models/dms/_constants.py,sha256=wBkLjAPwufPc2naxOfPA1XC0CM2RbDbo6Dpiv9dPrew,1344
|
|
25
29
|
cognite/neat/_data_model/models/dms/_constraints.py,sha256=QxcsHzdaYe3E5_xTIdtQDfkLPOzKa2c-0D02QKLto9o,1064
|
|
@@ -47,7 +51,7 @@ cognite/neat/_session/_state_machine/_states.py,sha256=RX6C0YNNT9mX0C6c-ZTiUVh6p
|
|
|
47
51
|
cognite/neat/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
52
|
cognite/neat/_utils/auxiliary.py,sha256=Cx-LP8dfN782R3iUcm--q26zdzQ0k_RFnVbJ0bwVZMI,1345
|
|
49
53
|
cognite/neat/_utils/text.py,sha256=tDspKjU4AkzB6P1kKPNaVpx-yvx3V2uQVC3FAr6nibM,1835
|
|
50
|
-
cognite/neat/_utils/useful_types.py,sha256=
|
|
54
|
+
cognite/neat/_utils/useful_types.py,sha256=iCpT4vrn95Y4ATmyOI_eHMm7Lb-h1CeO5fVuWOVUgDQ,555
|
|
51
55
|
cognite/neat/_utils/validation.py,sha256=vS7GAvMMeco1jUaEs_Rz2pUoz28uee6xU4yt8CrCzE4,5430
|
|
52
56
|
cognite/neat/_utils/http_client/__init__.py,sha256=gJBrOH1tIzEzLforHbeakYimTn4RlelyANps-jtpREI,894
|
|
53
57
|
cognite/neat/_utils/http_client/_client.py,sha256=2RVwTbbPFlQ8eJVLKNUXwnc4Yq_783PkY44zwr6LlT8,11509
|
|
@@ -94,7 +98,7 @@ cognite/neat/v0/core/_data_model/importers/_base_file_reader.py,sha256=OjjXb0IOl
|
|
|
94
98
|
cognite/neat/v0/core/_data_model/importers/_dict2data_model.py,sha256=CeDZNAO4W5JZmrboYtTfHnuU1UoXRI2nOUaasWmxfqM,4720
|
|
95
99
|
cognite/neat/v0/core/_data_model/importers/_dms2data_model.py,sha256=yXQucBtmJLzEHKly2RcOB-g2iiAyM_h4jFbVT-38gHM,29557
|
|
96
100
|
cognite/neat/v0/core/_data_model/importers/_graph2data_model.py,sha256=ILgrhror8PUpwKT9Q9Y8XO_rLSe4qPV4v7jvltjhzeM,13720
|
|
97
|
-
cognite/neat/v0/core/_data_model/importers/_spreadsheet2data_model.py,sha256=
|
|
101
|
+
cognite/neat/v0/core/_data_model/importers/_spreadsheet2data_model.py,sha256=ksD_AnFngPNTNOzQdguoV2MNcIJXBVwTkKJKmKOGm7s,15433
|
|
98
102
|
cognite/neat/v0/core/_data_model/importers/_rdf/__init__.py,sha256=1yOjV2PKCxwH7uCTXVZhSdxtn5etmFX40cksvwtKcZ8,199
|
|
99
103
|
cognite/neat/v0/core/_data_model/importers/_rdf/_base.py,sha256=Rv24TQQDZqAuFD8Qh0yRBiB7-JvoMVj3mGoPBm-xBFs,6052
|
|
100
104
|
cognite/neat/v0/core/_data_model/importers/_rdf/_inference2rdata_model.py,sha256=w8LsdkzgoTXN3LZyjEL_5grWZGlf7PPj8qOu370_K6M,28976
|
|
@@ -115,7 +119,7 @@ cognite/neat/v0/core/_data_model/models/entities/_constants.py,sha256=GXRzVfArwx
|
|
|
115
119
|
cognite/neat/v0/core/_data_model/models/entities/_loaders.py,sha256=6UD1ik4juMh-yW9cEEnXuheV1vwopdM6d_BBpyUOUUY,5709
|
|
116
120
|
cognite/neat/v0/core/_data_model/models/entities/_multi_value.py,sha256=lkQ95o3Tvf7QImDq8d6KYfgYyq4_zQCqVBAZDOdK2TY,2798
|
|
117
121
|
cognite/neat/v0/core/_data_model/models/entities/_restrictions.py,sha256=wjA6GpfL1FGIjsf6Mo6Rj9Ht-gRnBvOkhj_waN2w2p4,8943
|
|
118
|
-
cognite/neat/v0/core/_data_model/models/entities/_single_value.py,sha256=
|
|
122
|
+
cognite/neat/v0/core/_data_model/models/entities/_single_value.py,sha256=hoBU7bfTcjnhkn83zW24JAVrargu_j-wy1NqceR18JA,24402
|
|
119
123
|
cognite/neat/v0/core/_data_model/models/entities/_types.py,sha256=ZmtRvQf4Ghhf4hDnKbZeQq4tZyj5NcQngtw8GeZYa_k,2432
|
|
120
124
|
cognite/neat/v0/core/_data_model/models/entities/_wrapped.py,sha256=hOvdyxCNFgv1UdfaasviKnbEN4yN09Iip0ggQiaXgB4,7993
|
|
121
125
|
cognite/neat/v0/core/_data_model/models/mapping/__init__.py,sha256=T68Hf7rhiXa7b03h4RMwarAmkGnB-Bbhc1H07b2PyC4,100
|
|
@@ -123,12 +127,12 @@ cognite/neat/v0/core/_data_model/models/mapping/_classic2core.py,sha256=F0zusTh9
|
|
|
123
127
|
cognite/neat/v0/core/_data_model/models/mapping/_classic2core.yaml,sha256=ei-nuivNWVW9HmvzDBKIPF6ZdgaMq64XHw_rKm0CMxg,22584
|
|
124
128
|
cognite/neat/v0/core/_data_model/models/physical/__init__.py,sha256=pH5ZF8jiW0A2w7VCSoHUsXxe894QFvTtgjxXNGVVaxk,990
|
|
125
129
|
cognite/neat/v0/core/_data_model/models/physical/_exporter.py,sha256=ega1Yyosq-D9XYCypx9wtuMTFwS0EMRo_e3JwW434XU,30335
|
|
126
|
-
cognite/neat/v0/core/_data_model/models/physical/_unverified.py,sha256=
|
|
127
|
-
cognite/neat/v0/core/_data_model/models/physical/_validation.py,sha256=
|
|
128
|
-
cognite/neat/v0/core/_data_model/models/physical/_verified.py,sha256=
|
|
130
|
+
cognite/neat/v0/core/_data_model/models/physical/_unverified.py,sha256=eE2D2DPfqEfOnYaUmOTPhBpHp9oN-gxjn8CTe_GKtz0,22419
|
|
131
|
+
cognite/neat/v0/core/_data_model/models/physical/_validation.py,sha256=g0uSZImYaGmMZEwHUpb8KF9LTPiztz7k7cFQT3jrs4Y,41326
|
|
132
|
+
cognite/neat/v0/core/_data_model/models/physical/_verified.py,sha256=49dn3K_gWO3VgfS-SK4kHpbSJywtj4mwES3iy-43z4M,29824
|
|
129
133
|
cognite/neat/v0/core/_data_model/transformers/__init__.py,sha256=N6yRBplAkrwwxoTAre_1BE_fdSZL5jihr7xTQjW3KnM,1876
|
|
130
134
|
cognite/neat/v0/core/_data_model/transformers/_base.py,sha256=3ZO73A3xIAFUAyH4U682CbPUmZLPUB8HpWGpYvVbRBQ,3145
|
|
131
|
-
cognite/neat/v0/core/_data_model/transformers/_converters.py,sha256=
|
|
135
|
+
cognite/neat/v0/core/_data_model/transformers/_converters.py,sha256=Wmd4Lc62Xu8o_HCp7QVA6EEubEWzjbGQH5MEI88fpFo,112040
|
|
132
136
|
cognite/neat/v0/core/_data_model/transformers/_mapping.py,sha256=6R4QVblqivI1NvI0iSG5xC7fGHrdZcxqRg3c2Zx5o2E,19045
|
|
133
137
|
cognite/neat/v0/core/_data_model/transformers/_union_conceptual.py,sha256=Dp8Oe6i2duihwDWnGnfPEXRExyKIMiFM_XEcr0U9IbE,8867
|
|
134
138
|
cognite/neat/v0/core/_data_model/transformers/_verification.py,sha256=rIsmrmHvCgEddpPFIEXMMbRb9x9m3jCBi184g3JcrNA,5214
|
|
@@ -167,7 +171,7 @@ cognite/neat/v0/core/_instances/loaders/_rdf_to_instance_space.py,sha256=T1nNzhY
|
|
|
167
171
|
cognite/neat/v0/core/_instances/queries/__init__.py,sha256=W477LMyB4l6HIRbQhJoFgA_MUBwVCh2GBvtFeZu0AUA,53
|
|
168
172
|
cognite/neat/v0/core/_instances/queries/_base.py,sha256=APevHeeWQDEoOQ0MlBtVlPf9hbZukVkI5fOvt5oPJCE,543
|
|
169
173
|
cognite/neat/v0/core/_instances/queries/_queries.py,sha256=4BidSQXhdZYZ6_kyG7jMJ2iG0UtSrbQxfmwPM7V167A,466
|
|
170
|
-
cognite/neat/v0/core/_instances/queries/_select.py,sha256=
|
|
174
|
+
cognite/neat/v0/core/_instances/queries/_select.py,sha256=MoMCjRVeOhOPAJ6Tlc91ZSj8ibOIfEV5aoGg4kR1qS0,20068
|
|
171
175
|
cognite/neat/v0/core/_instances/queries/_update.py,sha256=WJmh0hGoKT4pbbWeED6udFAXiv_qFPd3v9tnZLORcNk,1293
|
|
172
176
|
cognite/neat/v0/core/_instances/transformers/__init__.py,sha256=YzC1Z8BuT77NwagWX4Z-F9R9BARLSS7zM4bCdxBbqKg,1761
|
|
173
177
|
cognite/neat/v0/core/_instances/transformers/_base.py,sha256=a8TVhgYGdt7Mj5-omT6gxOHeGvYnMd9vJCty6p7ctx4,4707
|
|
@@ -195,7 +199,7 @@ cognite/neat/v0/core/_issues/warnings/_resources.py,sha256=L4iTuVYgfwcaCRTbTCVoo
|
|
|
195
199
|
cognite/neat/v0/core/_issues/warnings/user_modeling.py,sha256=neM9IJzLGWFcBiuo5p5CLFglXjrUXR61FNqvupNw7Y0,4147
|
|
196
200
|
cognite/neat/v0/core/_store/__init__.py,sha256=wpsF8xjIQ5V21NOh45XQV813n_EzgyPOt0VVinYjnDI,140
|
|
197
201
|
cognite/neat/v0/core/_store/_data_model.py,sha256=09JlHEkJVEPHCju8ixRUUsvRcZb0UrDE7wevB7tq4PI,19682
|
|
198
|
-
cognite/neat/v0/core/_store/_instance.py,sha256=
|
|
202
|
+
cognite/neat/v0/core/_store/_instance.py,sha256=zt7pgYwgMbnAwz7Xn-_AiMsXhbBdwXWBRKu2MrgE3jI,18924
|
|
199
203
|
cognite/neat/v0/core/_store/_provenance.py,sha256=Q96wkVXRovO_uTlNvwCAOl6pAoWItTgFq1F79L_FqBk,7335
|
|
200
204
|
cognite/neat/v0/core/_store/exceptions.py,sha256=dTaBSt7IV7XWtS3EsE8lBX1Dv3tfWX1nIEgGHkluy3s,1668
|
|
201
205
|
cognite/neat/v0/core/_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -244,7 +248,7 @@ cognite/neat/v0/session/engine/__init__.py,sha256=D3MxUorEs6-NtgoICqtZ8PISQrjrr4
|
|
|
244
248
|
cognite/neat/v0/session/engine/_import.py,sha256=1QxA2_EK613lXYAHKQbZyw2yjo5P9XuiX4Z6_6-WMNQ,169
|
|
245
249
|
cognite/neat/v0/session/engine/_interface.py,sha256=3W-cYr493c_mW3P5O6MKN1xEQg3cA7NHR_ev3zdF9Vk,533
|
|
246
250
|
cognite/neat/v0/session/engine/_load.py,sha256=u0x7vuQCRoNcPt25KJBJRn8sJabonYK4vtSZpiTdP4k,5201
|
|
247
|
-
cognite_neat-0.
|
|
248
|
-
cognite_neat-0.
|
|
249
|
-
cognite_neat-0.
|
|
250
|
-
cognite_neat-0.
|
|
251
|
+
cognite_neat-0.126.0.dist-info/METADATA,sha256=bD3aNMQGJUehi3xBg0fVg8pYW6mKPAwN1mXNIBIGiXw,9147
|
|
252
|
+
cognite_neat-0.126.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
253
|
+
cognite_neat-0.126.0.dist-info/licenses/LICENSE,sha256=W8VmvFia4WHa3Gqxq1Ygrq85McUNqIGDVgtdvzT-XqA,11351
|
|
254
|
+
cognite_neat-0.126.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|