cognite-neat 0.125.1__py3-none-any.whl → 0.126.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of cognite-neat might be problematic. Click here for more details.
- cognite/neat/_client/__init__.py +4 -0
- cognite/neat/_client/api.py +8 -0
- cognite/neat/_client/client.py +19 -0
- cognite/neat/_client/config.py +40 -0
- cognite/neat/_client/containers_api.py +73 -0
- cognite/neat/_client/data_classes.py +10 -0
- cognite/neat/_client/data_model_api.py +63 -0
- cognite/neat/_client/spaces_api.py +67 -0
- cognite/neat/_client/views_api.py +82 -0
- cognite/neat/_data_model/_analysis.py +127 -0
- cognite/neat/_data_model/_constants.py +59 -0
- cognite/neat/_data_model/_shared.py +46 -0
- cognite/neat/_data_model/deployer/__init__.py +0 -0
- cognite/neat/_data_model/deployer/_differ.py +113 -0
- cognite/neat/_data_model/deployer/_differ_container.py +354 -0
- cognite/neat/_data_model/deployer/_differ_data_model.py +29 -0
- cognite/neat/_data_model/deployer/_differ_space.py +9 -0
- cognite/neat/_data_model/deployer/_differ_view.py +194 -0
- cognite/neat/_data_model/deployer/data_classes.py +176 -0
- cognite/neat/_data_model/exporters/__init__.py +4 -0
- cognite/neat/_data_model/exporters/_base.py +22 -0
- cognite/neat/_data_model/exporters/_table_exporter/__init__.py +0 -0
- cognite/neat/_data_model/exporters/_table_exporter/exporter.py +106 -0
- cognite/neat/_data_model/exporters/_table_exporter/workbook.py +414 -0
- cognite/neat/_data_model/exporters/_table_exporter/writer.py +391 -0
- cognite/neat/_data_model/importers/__init__.py +2 -1
- cognite/neat/_data_model/importers/_api_importer.py +88 -0
- cognite/neat/_data_model/importers/_table_importer/data_classes.py +48 -8
- cognite/neat/_data_model/importers/_table_importer/importer.py +102 -6
- cognite/neat/_data_model/importers/_table_importer/reader.py +860 -0
- cognite/neat/_data_model/models/dms/__init__.py +19 -1
- cognite/neat/_data_model/models/dms/_base.py +12 -8
- cognite/neat/_data_model/models/dms/_constants.py +1 -1
- cognite/neat/_data_model/models/dms/_constraints.py +2 -1
- cognite/neat/_data_model/models/dms/_container.py +5 -5
- cognite/neat/_data_model/models/dms/_data_model.py +3 -3
- cognite/neat/_data_model/models/dms/_data_types.py +8 -1
- cognite/neat/_data_model/models/dms/_http.py +18 -0
- cognite/neat/_data_model/models/dms/_indexes.py +2 -1
- cognite/neat/_data_model/models/dms/_references.py +17 -4
- cognite/neat/_data_model/models/dms/_space.py +11 -7
- cognite/neat/_data_model/models/dms/_view_property.py +7 -4
- cognite/neat/_data_model/models/dms/_views.py +16 -6
- cognite/neat/_data_model/validation/__init__.py +0 -0
- cognite/neat/_data_model/validation/_base.py +16 -0
- cognite/neat/_data_model/validation/dms/__init__.py +9 -0
- cognite/neat/_data_model/validation/dms/_orchestrator.py +68 -0
- cognite/neat/_data_model/validation/dms/_validators.py +139 -0
- cognite/neat/_exceptions.py +15 -3
- cognite/neat/_issues.py +39 -6
- cognite/neat/_session/__init__.py +3 -0
- cognite/neat/_session/_physical.py +88 -0
- cognite/neat/_session/_session.py +34 -25
- cognite/neat/_session/_wrappers.py +61 -0
- cognite/neat/_state_machine/__init__.py +10 -0
- cognite/neat/{_session/_state_machine → _state_machine}/_base.py +11 -1
- cognite/neat/_state_machine/_states.py +53 -0
- cognite/neat/_store/__init__.py +3 -0
- cognite/neat/_store/_provenance.py +55 -0
- cognite/neat/_store/_store.py +124 -0
- cognite/neat/_utils/_reader.py +194 -0
- cognite/neat/_utils/http_client/__init__.py +14 -20
- cognite/neat/_utils/http_client/_client.py +22 -61
- cognite/neat/_utils/http_client/_data_classes.py +167 -268
- cognite/neat/_utils/text.py +6 -0
- cognite/neat/_utils/useful_types.py +26 -2
- cognite/neat/_version.py +1 -1
- cognite/neat/v0/core/_data_model/importers/_rdf/_shared.py +2 -2
- cognite/neat/v0/core/_data_model/importers/_spreadsheet2data_model.py +2 -2
- cognite/neat/v0/core/_data_model/models/entities/_single_value.py +1 -1
- cognite/neat/v0/core/_data_model/models/physical/_unverified.py +1 -1
- cognite/neat/v0/core/_data_model/models/physical/_validation.py +2 -2
- cognite/neat/v0/core/_data_model/models/physical/_verified.py +3 -3
- cognite/neat/v0/core/_data_model/transformers/_converters.py +1 -1
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/METADATA +1 -1
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/RECORD +78 -40
- cognite/neat/_session/_state_machine/__init__.py +0 -23
- cognite/neat/_session/_state_machine/_states.py +0 -150
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/WHEEL +0 -0
- {cognite_neat-0.125.1.dist-info → cognite_neat-0.126.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from typing import Generic, Literal, TypeAlias
|
|
5
|
+
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
from pydantic.alias_generators import to_camel
|
|
8
|
+
|
|
9
|
+
from cognite.neat._data_model.models.dms import (
|
|
10
|
+
BaseModelObject,
|
|
11
|
+
ContainerReference,
|
|
12
|
+
ContainerRequest,
|
|
13
|
+
DataModelReference,
|
|
14
|
+
DataModelRequest,
|
|
15
|
+
NodeReference,
|
|
16
|
+
SpaceRequest,
|
|
17
|
+
T_Resource,
|
|
18
|
+
ViewReference,
|
|
19
|
+
ViewRequest,
|
|
20
|
+
)
|
|
21
|
+
from cognite.neat._utils.http_client._data_classes import HTTPMessage
|
|
22
|
+
from cognite.neat._utils.useful_types import T_Reference
|
|
23
|
+
|
|
24
|
+
JsonPath: TypeAlias = str # e.g., 'properties.temperature', 'constraints.uniqueKey'
|
|
25
|
+
DataModelEndpoint: TypeAlias = Literal["spaces", "containers", "views", "datamodels", "instances"]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class SeverityType(Enum):
|
|
29
|
+
SAFE = 1
|
|
30
|
+
WARNING = 2
|
|
31
|
+
BREAKING = 3
|
|
32
|
+
|
|
33
|
+
@classmethod
|
|
34
|
+
def max_severity(cls, severities: list["SeverityType"], default: "SeverityType") -> "SeverityType":
|
|
35
|
+
value = max([severity.value for severity in severities], default=default.value)
|
|
36
|
+
return cls(value)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class BaseDeployObject(BaseModel, alias_generator=to_camel, extra="ignore", populate_by_name=True):
|
|
40
|
+
"""Base class for all deployer data model objects."""
|
|
41
|
+
|
|
42
|
+
...
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class FieldChange(BaseDeployObject, ABC):
|
|
46
|
+
"""Represents a change to a specific property or field."""
|
|
47
|
+
|
|
48
|
+
field_path: JsonPath
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
@abstractmethod
|
|
52
|
+
def severity(self) -> SeverityType:
|
|
53
|
+
"""The severity of the change."""
|
|
54
|
+
raise NotImplementedError()
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class PrimitiveField(FieldChange, ABC):
|
|
58
|
+
"""Base class for changes to primitive properties."""
|
|
59
|
+
|
|
60
|
+
item_severity: SeverityType
|
|
61
|
+
|
|
62
|
+
@property
|
|
63
|
+
def severity(self) -> SeverityType:
|
|
64
|
+
return self.item_severity
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class AddedField(PrimitiveField):
|
|
68
|
+
new_value: BaseModelObject | str | int | float | bool | None
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def description(self) -> str:
|
|
72
|
+
return f"added with value {self.new_value!r}"
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class RemovedField(PrimitiveField):
|
|
76
|
+
current_value: BaseModelObject | str | int | float | bool | None
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def description(self) -> str:
|
|
80
|
+
return f"removed (was {self.current_value!r})"
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class ChangedField(PrimitiveField):
|
|
84
|
+
new_value: BaseModelObject | str | int | float | bool | None
|
|
85
|
+
current_value: BaseModelObject | str | int | float | bool | None
|
|
86
|
+
|
|
87
|
+
@property
|
|
88
|
+
def description(self) -> str:
|
|
89
|
+
if self.new_value is None:
|
|
90
|
+
return f"removed (was {self.current_value!r})"
|
|
91
|
+
elif self.current_value is None:
|
|
92
|
+
return f"added with value {self.new_value!r}"
|
|
93
|
+
return f"changed from {self.current_value!r} to {self.new_value!r}"
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class FieldChanges(FieldChange):
|
|
97
|
+
"""Represents a nested property, i.e., a property that contains other properties."""
|
|
98
|
+
|
|
99
|
+
changes: list[FieldChange]
|
|
100
|
+
|
|
101
|
+
@property
|
|
102
|
+
def severity(self) -> SeverityType:
|
|
103
|
+
return SeverityType.max_severity([item.severity for item in self.changes], default=SeverityType.SAFE)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
class ResourceChange(BaseDeployObject, Generic[T_Reference, T_Resource]):
|
|
107
|
+
resource_id: T_Reference
|
|
108
|
+
new_value: T_Resource
|
|
109
|
+
old_value: T_Resource | None = None
|
|
110
|
+
changes: list[FieldChange] = Field(default_factory=list)
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def change_type(self) -> Literal["create", "update", "delete", "unchanged"]:
|
|
114
|
+
if self.old_value is None:
|
|
115
|
+
return "create"
|
|
116
|
+
elif self.changes:
|
|
117
|
+
return "update"
|
|
118
|
+
else:
|
|
119
|
+
return "unchanged"
|
|
120
|
+
|
|
121
|
+
@property
|
|
122
|
+
def severity(self) -> SeverityType:
|
|
123
|
+
return SeverityType.max_severity([change.severity for change in self.changes], default=SeverityType.SAFE)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
class ResourceDeploymentPlan(BaseDeployObject, Generic[T_Reference, T_Resource]):
|
|
127
|
+
endpoint: DataModelEndpoint
|
|
128
|
+
resources: list[ResourceChange[T_Reference, T_Resource]]
|
|
129
|
+
|
|
130
|
+
@property
|
|
131
|
+
def to_upsert(self) -> list[ResourceChange[T_Reference, T_Resource]]:
|
|
132
|
+
return [change for change in self.resources if change.change_type in ("create", "update")]
|
|
133
|
+
|
|
134
|
+
@property
|
|
135
|
+
def to_delete(self) -> list[ResourceChange[T_Reference, T_Resource]]:
|
|
136
|
+
return [change for change in self.resources if change.change_type == "delete"]
|
|
137
|
+
|
|
138
|
+
@property
|
|
139
|
+
def unchanged(self) -> list[ResourceChange[T_Reference, T_Resource]]:
|
|
140
|
+
return [change for change in self.resources if change.change_type == "unchanged"]
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class SchemaSnapshot(BaseDeployObject):
|
|
144
|
+
timestamp: datetime
|
|
145
|
+
data_model: dict[DataModelReference, DataModelRequest]
|
|
146
|
+
views: dict[ViewReference, ViewRequest]
|
|
147
|
+
containers: dict[ContainerReference, ContainerRequest]
|
|
148
|
+
spaces: dict[str, SpaceRequest]
|
|
149
|
+
node_types: dict[NodeReference, NodeReference]
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class ChangeResult(BaseDeployObject, Generic[T_Reference, T_Resource]):
|
|
153
|
+
change: ResourceChange[T_Reference, T_Resource]
|
|
154
|
+
message: HTTPMessage
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class AppliedChanges(BaseDeployObject, Generic[T_Reference, T_Resource]):
|
|
158
|
+
created: list[ChangeResult[T_Reference, T_Resource]] = Field(default_factory=list)
|
|
159
|
+
updated: list[ChangeResult[T_Reference, T_Resource]] = Field(default_factory=list)
|
|
160
|
+
deletions: list[ChangeResult[T_Reference, T_Resource]] = Field(default_factory=list)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class DeploymentResult(BaseDeployObject):
|
|
164
|
+
status: Literal["success", "failure", "partial", "pending"]
|
|
165
|
+
plan: list[ResourceDeploymentPlan]
|
|
166
|
+
snapshot: SchemaSnapshot
|
|
167
|
+
responses: AppliedChanges | None = None
|
|
168
|
+
recovery: AppliedChanges | None = None
|
|
169
|
+
|
|
170
|
+
@property
|
|
171
|
+
def is_dry_run(self) -> bool:
|
|
172
|
+
return self.status == "pending"
|
|
173
|
+
|
|
174
|
+
@property
|
|
175
|
+
def is_success(self) -> bool:
|
|
176
|
+
return self.status == "success"
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Generic, TypeVar
|
|
4
|
+
|
|
5
|
+
from cognite.neat._data_model.models.dms import RequestSchema
|
|
6
|
+
|
|
7
|
+
T_Export = TypeVar("T_Export")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class DMSExporter(ABC, Generic[T_Export]):
|
|
11
|
+
"""This is the base class for all DMS exporters."""
|
|
12
|
+
|
|
13
|
+
NEW_LINE = "\n"
|
|
14
|
+
ENCODING = "utf-8"
|
|
15
|
+
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def _export(self, data_model: RequestSchema) -> T_Export:
|
|
18
|
+
raise NotImplementedError()
|
|
19
|
+
|
|
20
|
+
@abstractmethod
|
|
21
|
+
def export(self, data_model: RequestSchema, file_path: Path) -> None:
|
|
22
|
+
raise NotImplementedError()
|
|
File without changes
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import cast
|
|
3
|
+
|
|
4
|
+
import yaml
|
|
5
|
+
from pyparsing import ABC
|
|
6
|
+
|
|
7
|
+
from cognite.neat._data_model.exporters._base import DMSExporter
|
|
8
|
+
from cognite.neat._data_model.importers._table_importer.data_classes import DMSProperty, TableDMS
|
|
9
|
+
from cognite.neat._data_model.models.dms import RequestSchema
|
|
10
|
+
from cognite.neat._utils.useful_types import DataModelTableType
|
|
11
|
+
|
|
12
|
+
from .workbook import WorkbookCreator, WorkbookOptions
|
|
13
|
+
from .writer import DMSTableWriter
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class DMSTableExporter(DMSExporter[DataModelTableType], ABC):
|
|
17
|
+
"""Exports DMS to a table structure.
|
|
18
|
+
|
|
19
|
+
The tables can are expected to be a dictionary where the keys are the table names and the values
|
|
20
|
+
are lists of dictionaries representing the rows in the table.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
class Sheets:
|
|
24
|
+
properties = cast(str, TableDMS.model_fields["properties"].validation_alias)
|
|
25
|
+
|
|
26
|
+
def __init__(self, exclude_none: bool = False) -> None:
|
|
27
|
+
self._exclude_none = exclude_none
|
|
28
|
+
|
|
29
|
+
def _export(self, data_model: RequestSchema) -> DataModelTableType:
|
|
30
|
+
model = data_model.data_model
|
|
31
|
+
tables = DMSTableWriter(model.space, model.version).write_tables(data_model)
|
|
32
|
+
exclude: set[str] = set()
|
|
33
|
+
if self._exclude_none:
|
|
34
|
+
if not tables.enum:
|
|
35
|
+
exclude.add("enum")
|
|
36
|
+
if not tables.nodes:
|
|
37
|
+
exclude.add("nodes")
|
|
38
|
+
if not tables.containers:
|
|
39
|
+
exclude.add("containers")
|
|
40
|
+
|
|
41
|
+
output = tables.model_dump(mode="json", by_alias=True, exclude_none=self._exclude_none, exclude=exclude)
|
|
42
|
+
# When we have exclude_none we only want to exclude none of optional properties, not required.
|
|
43
|
+
# Thus, we do the implementation below
|
|
44
|
+
required_properties = [
|
|
45
|
+
field_.serialization_alias for field_ in DMSProperty.model_fields.values() if field_.is_required()
|
|
46
|
+
]
|
|
47
|
+
for row in output[self.Sheets.properties]:
|
|
48
|
+
for prop in required_properties:
|
|
49
|
+
if prop not in row:
|
|
50
|
+
row[prop] = None
|
|
51
|
+
return output
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class DMSYamlExporter(DMSTableExporter):
|
|
55
|
+
"""Exports DMS to YAML."""
|
|
56
|
+
|
|
57
|
+
def __init__(self) -> None:
|
|
58
|
+
super().__init__(exclude_none=True)
|
|
59
|
+
|
|
60
|
+
def export(self, data_model: RequestSchema, file_path: Path) -> None:
|
|
61
|
+
"""Exports the data model as a flat YAML file, which is identical to the spreadsheet representation
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
data_model (RequestSchema): The data model to export.
|
|
65
|
+
file_path (Path): The path to the YAML file to create.
|
|
66
|
+
"""
|
|
67
|
+
table_format = self._export(data_model)
|
|
68
|
+
file_path.write_text(
|
|
69
|
+
yaml.safe_dump(table_format, sort_keys=False), encoding=self.ENCODING, newline=self.NEW_LINE
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class DMSExcelExporter(DMSTableExporter):
|
|
74
|
+
"""Exports DMS to Excel file."""
|
|
75
|
+
|
|
76
|
+
def __init__(self, options: WorkbookOptions | None = None) -> None:
|
|
77
|
+
super().__init__(exclude_none=False)
|
|
78
|
+
self._options = options or WorkbookOptions()
|
|
79
|
+
|
|
80
|
+
def export(self, data_model: RequestSchema, file_path: Path) -> None:
|
|
81
|
+
"""Exports the data model as a Excel file.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
data_model (RequestSchema): The data model to export.
|
|
85
|
+
file_path (Path): The path to the Excel file to create.
|
|
86
|
+
options (WorkbookOptions | None): Options for creating the workbook.
|
|
87
|
+
"""
|
|
88
|
+
table_format = self._export(data_model)
|
|
89
|
+
workbook = WorkbookCreator(self._options).create_workbook(table_format)
|
|
90
|
+
try:
|
|
91
|
+
workbook.save(file_path)
|
|
92
|
+
finally:
|
|
93
|
+
workbook.close()
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class DMSCsvExporter(DMSTableExporter):
|
|
97
|
+
"""Exports DMS to CSV files in a directory."""
|
|
98
|
+
|
|
99
|
+
def export(self, data_model: RequestSchema, directory_path: Path) -> None:
|
|
100
|
+
"""Exports the data model as a set of CSV files, one for each table.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
data_model (RequestSchema): The data model to export.
|
|
104
|
+
directory_path (Path): The path to the directory to create the CSV files in.
|
|
105
|
+
"""
|
|
106
|
+
raise NotImplementedError()
|