cognite-neat 1.0.31__py3-none-any.whl → 1.0.33__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cognite/neat/_client/data_classes.py +32 -0
- cognite/neat/_client/statistics_api.py +28 -1
- cognite/neat/_data_model/_analysis.py +3 -0
- cognite/neat/_data_model/_constants.py +4 -0
- cognite/neat/_data_model/deployer/_differ_container.py +1 -1
- cognite/neat/_data_model/deployer/data_classes.py +15 -16
- cognite/neat/_data_model/deployer/deployer.py +40 -13
- cognite/neat/_data_model/models/dms/_http.py +10 -1
- cognite/neat/_data_model/models/dms/_references.py +38 -4
- cognite/neat/_data_model/{validation/dms → rules}/_base.py +11 -5
- cognite/neat/_data_model/rules/cdf/__init__.py +3 -0
- cognite/neat/_data_model/rules/cdf/_base.py +5 -0
- cognite/neat/_data_model/rules/cdf/_orchestrator.py +56 -0
- cognite/neat/_data_model/rules/cdf/_spaces.py +47 -0
- cognite/neat/_data_model/{validation → rules}/dms/__init__.py +2 -2
- cognite/neat/_data_model/{validation → rules}/dms/_ai_readiness.py +17 -17
- cognite/neat/_data_model/rules/dms/_base.py +5 -0
- cognite/neat/_data_model/{validation → rules}/dms/_connections.py +23 -23
- cognite/neat/_data_model/{validation → rules}/dms/_consistency.py +3 -3
- cognite/neat/_data_model/{validation → rules}/dms/_containers.py +9 -9
- cognite/neat/_data_model/{validation → rules}/dms/_limits.py +14 -14
- cognite/neat/_data_model/{validation → rules}/dms/_orchestrator.py +7 -7
- cognite/neat/_data_model/{validation → rules}/dms/_performance.py +7 -7
- cognite/neat/_data_model/{validation → rules}/dms/_views.py +7 -7
- cognite/neat/_session/_cdf.py +15 -1
- cognite/neat/_session/_physical.py +6 -6
- cognite/neat/_session/_wrappers.py +1 -1
- cognite/neat/_state_machine/_states.py +1 -1
- cognite/neat/_store/_store.py +19 -1
- cognite/neat/_version.py +1 -1
- {cognite_neat-1.0.31.dist-info → cognite_neat-1.0.33.dist-info}/METADATA +1 -1
- {cognite_neat-1.0.31.dist-info → cognite_neat-1.0.33.dist-info}/RECORD +34 -29
- {cognite_neat-1.0.31.dist-info → cognite_neat-1.0.33.dist-info}/WHEEL +1 -1
- /cognite/neat/_data_model/{validation → rules}/__init__.py +0 -0
|
@@ -2,6 +2,8 @@ from typing import Generic, TypeVar
|
|
|
2
2
|
|
|
3
3
|
from pydantic import BaseModel, Field
|
|
4
4
|
|
|
5
|
+
from cognite.neat._utils.useful_types import BaseModelObject
|
|
6
|
+
|
|
5
7
|
T = TypeVar("T", bound=BaseModel)
|
|
6
8
|
|
|
7
9
|
|
|
@@ -42,3 +44,33 @@ class StatisticsResponse(BaseModel, populate_by_name=True):
|
|
|
42
44
|
concurrent_read_limit: int = Field(alias="concurrentReadLimit")
|
|
43
45
|
concurrent_write_limit: int = Field(alias="concurrentWriteLimit")
|
|
44
46
|
concurrent_delete_limit: int = Field(alias="concurrentDeleteLimit")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class SpaceStatisticsItem(BaseModelObject, populate_by_name=True):
|
|
50
|
+
"""Individual space statistics item."""
|
|
51
|
+
|
|
52
|
+
space: str
|
|
53
|
+
containers: int
|
|
54
|
+
views: int
|
|
55
|
+
data_models: int
|
|
56
|
+
edges: int
|
|
57
|
+
soft_deleted_edges: int
|
|
58
|
+
nodes: int
|
|
59
|
+
soft_deleted_nodes: int
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def is_empty(self) -> bool:
|
|
63
|
+
"""Check if the space has zero usage."""
|
|
64
|
+
return (
|
|
65
|
+
self.containers == 0 and self.views == 0 and self.data_models == 0 and self.edges == 0 and self.nodes == 0
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class SpaceStatisticsResponse(BaseModelObject, populate_by_name=True):
|
|
70
|
+
"""Response model for space statistics endpoint."""
|
|
71
|
+
|
|
72
|
+
items: list[SpaceStatisticsItem]
|
|
73
|
+
|
|
74
|
+
def empty_spaces(self) -> list[str]:
|
|
75
|
+
"""Get a list of space identifiers that have zero usage."""
|
|
76
|
+
return [item.space for item in self.items if item.is_empty]
|
|
@@ -1,7 +1,10 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
1
3
|
from cognite.neat._utils.http_client import HTTPClient, ParametersRequest
|
|
4
|
+
from cognite.neat._utils.http_client._data_classes import SimpleBodyRequest
|
|
2
5
|
|
|
3
6
|
from .config import NeatClientConfig
|
|
4
|
-
from .data_classes import StatisticsResponse
|
|
7
|
+
from .data_classes import SpaceStatisticsResponse, StatisticsResponse
|
|
5
8
|
|
|
6
9
|
|
|
7
10
|
class StatisticsAPI:
|
|
@@ -27,3 +30,27 @@ class StatisticsAPI:
|
|
|
27
30
|
result.raise_for_status()
|
|
28
31
|
result = StatisticsResponse.model_validate_json(result.success_response.body)
|
|
29
32
|
return result
|
|
33
|
+
|
|
34
|
+
def space_statistics(self, spaces: list[str]) -> SpaceStatisticsResponse:
|
|
35
|
+
"""Retrieve space-wise usage data and limits.
|
|
36
|
+
|
|
37
|
+
Args:
|
|
38
|
+
spaces: List of space identifiers to retrieve statistics for.
|
|
39
|
+
|
|
40
|
+
Returns:
|
|
41
|
+
SpaceStatisticsResponse object.
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
body = {"items": [{"space": space} for space in spaces]}
|
|
45
|
+
|
|
46
|
+
result = self._http_client.request_with_retries(
|
|
47
|
+
SimpleBodyRequest(
|
|
48
|
+
endpoint_url=self._config.create_api_url("/models/statistics/spaces/byids"),
|
|
49
|
+
method="POST",
|
|
50
|
+
body=json.dumps(body),
|
|
51
|
+
)
|
|
52
|
+
)
|
|
53
|
+
|
|
54
|
+
result.raise_for_status()
|
|
55
|
+
result = SpaceStatisticsResponse.model_validate_json(result.success_response.body)
|
|
56
|
+
return result
|
|
@@ -8,6 +8,7 @@ from typing import Literal, TypeAlias, TypeVar
|
|
|
8
8
|
import networkx as nx
|
|
9
9
|
from pyparsing import cached_property
|
|
10
10
|
|
|
11
|
+
from cognite.neat._client.data_classes import SpaceStatisticsResponse
|
|
11
12
|
from cognite.neat._data_model._constants import COGNITE_SPACES
|
|
12
13
|
from cognite.neat._data_model._snapshot import SchemaSnapshot
|
|
13
14
|
from cognite.neat._data_model.models.dms._constraints import RequiresConstraintDefinition
|
|
@@ -70,9 +71,11 @@ class ValidationResources:
|
|
|
70
71
|
local: SchemaSnapshot,
|
|
71
72
|
cdf: SchemaSnapshot,
|
|
72
73
|
limits: SchemaLimits | None = None,
|
|
74
|
+
space_statistics: SpaceStatisticsResponse | None = None,
|
|
73
75
|
) -> None:
|
|
74
76
|
self._modus_operandi = modus_operandi
|
|
75
77
|
self.limits = limits or SchemaLimits()
|
|
78
|
+
self.space_statistics = space_statistics
|
|
76
79
|
|
|
77
80
|
self.local = local
|
|
78
81
|
self.cdf = cdf
|
|
@@ -67,8 +67,12 @@ COGNITE_SPACES = (
|
|
|
67
67
|
"cdf_apm",
|
|
68
68
|
"cdf_apps_shared",
|
|
69
69
|
"cdf_cdm_3d",
|
|
70
|
+
"cdf_time_series_data",
|
|
71
|
+
"cdf_cdm_units",
|
|
70
72
|
)
|
|
71
73
|
|
|
74
|
+
COGNITE_APP_SPACES = ("CommentInstanceSpace", "IndustrialCanvasInstanceSpace", "SolutionTagsInstanceSpace", "scene")
|
|
75
|
+
|
|
72
76
|
# Defaults from https://docs.cognite.com/cdf/dm/dm_reference/dm_limits_and_restrictions#list-size-limits
|
|
73
77
|
|
|
74
78
|
DEFAULT_MAX_LIST_SIZE = 1000
|
|
@@ -55,7 +55,7 @@ class ContainerDiffer(ItemDiffer[ContainerRequest]):
|
|
|
55
55
|
"constraints",
|
|
56
56
|
current.constraints,
|
|
57
57
|
new.constraints,
|
|
58
|
-
add_severity=SeverityType.
|
|
58
|
+
add_severity=SeverityType.WARNING,
|
|
59
59
|
remove_severity=SeverityType.WARNING,
|
|
60
60
|
differ=ConstraintDiffer("constraints"),
|
|
61
61
|
)
|
|
@@ -12,7 +12,6 @@ from pydantic.alias_generators import to_camel
|
|
|
12
12
|
from cognite.neat._data_model._snapshot import SchemaSnapshot
|
|
13
13
|
from cognite.neat._data_model.models.dms import (
|
|
14
14
|
BaseModelObject,
|
|
15
|
-
Constraint,
|
|
16
15
|
ContainerConstraintReference,
|
|
17
16
|
ContainerIndexReference,
|
|
18
17
|
ContainerPropertyDefinition,
|
|
@@ -20,7 +19,6 @@ from cognite.neat._data_model.models.dms import (
|
|
|
20
19
|
ContainerRequest,
|
|
21
20
|
DataModelRequest,
|
|
22
21
|
DataModelResource,
|
|
23
|
-
Index,
|
|
24
22
|
T_DataModelResource,
|
|
25
23
|
T_ResourceId,
|
|
26
24
|
ViewRequest,
|
|
@@ -193,7 +191,9 @@ class ContainerDeploymentPlan(ResourceDeploymentPlan[ContainerReference, Contain
|
|
|
193
191
|
def constraints_to_remove(self) -> dict[ContainerConstraintReference, RemovedField]:
|
|
194
192
|
return self._get_fields_to_remove("constraints.", ContainerConstraintReference)
|
|
195
193
|
|
|
196
|
-
def _get_fields_to_remove(
|
|
194
|
+
def _get_fields_to_remove(
|
|
195
|
+
self, field_prefix: str, ref_cls: type[ContainerIndexReference] | type[ContainerConstraintReference]
|
|
196
|
+
) -> dict:
|
|
197
197
|
items: dict = {}
|
|
198
198
|
for resource_change in self.resources:
|
|
199
199
|
for change in resource_change.changes:
|
|
@@ -202,7 +202,7 @@ class ContainerDeploymentPlan(ResourceDeploymentPlan[ContainerReference, Contain
|
|
|
202
202
|
items[
|
|
203
203
|
ref_cls(
|
|
204
204
|
space=resource_change.resource_id.space,
|
|
205
|
-
|
|
205
|
+
container_external_id=resource_change.resource_id.external_id,
|
|
206
206
|
identifier=identifier,
|
|
207
207
|
)
|
|
208
208
|
] = change
|
|
@@ -254,7 +254,7 @@ class ResourceDeploymentPlanList(UserList[ResourceDeploymentPlan]):
|
|
|
254
254
|
updated_resource = resource.model_copy(update={"new_value": resource.current_value})
|
|
255
255
|
elif resource.changes and resource.new_value is not None:
|
|
256
256
|
# Find all field removals and update new_value accordingly.
|
|
257
|
-
removals = [change for change in resource.changes if isinstance(change, RemovedField)]
|
|
257
|
+
removals: list[RemovedField] = [change for change in resource.changes if isinstance(change, RemovedField)]
|
|
258
258
|
addition_paths = {change.field_path for change in resource.changes if isinstance(change, AddedField)}
|
|
259
259
|
if removals:
|
|
260
260
|
if resource.current_value is None:
|
|
@@ -271,6 +271,13 @@ class ResourceDeploymentPlanList(UserList[ResourceDeploymentPlan]):
|
|
|
271
271
|
for change in resource.changes
|
|
272
272
|
if not isinstance(change, RemovedField)
|
|
273
273
|
or (isinstance(change, RemovedField) and change.field_path in addition_paths)
|
|
274
|
+
or (
|
|
275
|
+
isinstance(change, RemovedField)
|
|
276
|
+
and (
|
|
277
|
+
change.field_path.startswith("constraints.")
|
|
278
|
+
or change.field_path.startswith("indexes.")
|
|
279
|
+
)
|
|
280
|
+
)
|
|
274
281
|
],
|
|
275
282
|
}
|
|
276
283
|
)
|
|
@@ -320,22 +327,14 @@ class ResourceDeploymentPlanList(UserList[ResourceDeploymentPlan]):
|
|
|
320
327
|
resource: ContainerRequest, removals: list[RemovedField], addition_paths: set[str]
|
|
321
328
|
) -> DataModelResource:
|
|
322
329
|
container_properties = resource.properties.copy()
|
|
323
|
-
indexes = (resource.indexes or {}).copy()
|
|
324
|
-
constraints = (resource.constraints or {}).copy()
|
|
325
330
|
for removal in removals:
|
|
326
331
|
if removal.field_path.startswith("properties."):
|
|
327
332
|
prop_key = removal.field_path.removeprefix("properties.")
|
|
328
333
|
container_properties[prop_key] = cast(ContainerPropertyDefinition, removal.current_value)
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
index_key = removal.field_path.removeprefix("indexes.")
|
|
332
|
-
indexes[index_key] = cast(Index, removal.current_value)
|
|
333
|
-
elif removal.field_path.startswith("constraints.") and removal.field_path not in addition_paths:
|
|
334
|
-
# Constraint was removed and not re-added, so we need to restore it.
|
|
335
|
-
constraint_key = removal.field_path.removeprefix("constraints.")
|
|
336
|
-
constraints[constraint_key] = cast(Constraint, removal.current_value)
|
|
334
|
+
# Note: indexes and constraints are allowed to be removed in additive mode,
|
|
335
|
+
# so we don't restore them here unlike properties.
|
|
337
336
|
return resource.model_copy(
|
|
338
|
-
update={"properties": container_properties
|
|
337
|
+
update={"properties": container_properties},
|
|
339
338
|
deep=True,
|
|
340
339
|
)
|
|
341
340
|
|
|
@@ -185,16 +185,20 @@ class SchemaDeployer(OnSuccessResultProducer):
|
|
|
185
185
|
continue
|
|
186
186
|
current_resource = current_resources[ref]
|
|
187
187
|
diffs = differ.diff(current_resource, new_resource)
|
|
188
|
-
if (
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
and self.options.modus_operandi == "additive"
|
|
192
|
-
):
|
|
193
|
-
# In additive mode, changes to constraints and indexes require removal and re-adding
|
|
194
|
-
# In rebuild mode, all changes are forced via deletion and re-adding
|
|
188
|
+
if isinstance(current_resource, ContainerRequest) and isinstance(new_resource, ContainerRequest):
|
|
189
|
+
# CDF doesn't support in-place modification of constraints/indexes,
|
|
190
|
+
# so we transform changes to remove + add operations in both modes
|
|
195
191
|
diffs = self.remove_readd_modified_indexes_and_constraints(diffs, current_resource, new_resource)
|
|
192
|
+
|
|
193
|
+
warnings = self._generate_warnings_for_constraint_and_index_changes(diffs)
|
|
196
194
|
resources.append(
|
|
197
|
-
ResourceChange(
|
|
195
|
+
ResourceChange(
|
|
196
|
+
resource_id=ref,
|
|
197
|
+
new_value=new_resource,
|
|
198
|
+
current_value=current_resource,
|
|
199
|
+
changes=diffs,
|
|
200
|
+
message=" ".join(warnings) if warnings else None,
|
|
201
|
+
)
|
|
198
202
|
)
|
|
199
203
|
|
|
200
204
|
return plan_type(endpoint=endpoint, resources=resources)
|
|
@@ -222,19 +226,19 @@ class SchemaDeployer(OnSuccessResultProducer):
|
|
|
222
226
|
raise RuntimeError("Bug in Neat. Malformed field path for constraint/index change.")
|
|
223
227
|
# Field type is either "constraints" or "indexes"
|
|
224
228
|
field_type, identifier, *_ = diff.field_path.split(".", maxsplit=2)
|
|
225
|
-
|
|
229
|
+
field_path = f"{field_type}.{identifier}"
|
|
226
230
|
modified_diffs.append(
|
|
227
231
|
RemovedField(
|
|
228
|
-
field_path=
|
|
232
|
+
field_path=field_path,
|
|
229
233
|
item_severity=SeverityType.WARNING,
|
|
230
234
|
current_value=getattr(current_resource, field_type)[identifier],
|
|
231
235
|
)
|
|
232
236
|
)
|
|
233
|
-
|
|
237
|
+
add_severity = SeverityType.WARNING if field_type == "constraints" else SeverityType.SAFE
|
|
234
238
|
modified_diffs.append(
|
|
235
239
|
AddedField(
|
|
236
|
-
field_path=
|
|
237
|
-
item_severity=
|
|
240
|
+
field_path=field_path,
|
|
241
|
+
item_severity=add_severity,
|
|
238
242
|
new_value=getattr(new_resource, field_type)[identifier],
|
|
239
243
|
)
|
|
240
244
|
)
|
|
@@ -262,6 +266,29 @@ class SchemaDeployer(OnSuccessResultProducer):
|
|
|
262
266
|
)
|
|
263
267
|
return None
|
|
264
268
|
|
|
269
|
+
@classmethod
|
|
270
|
+
def _generate_warnings_for_constraint_and_index_changes(cls, diffs: list[FieldChange]) -> list[str]:
|
|
271
|
+
"""Generate warning messages for constraint and index changes.
|
|
272
|
+
|
|
273
|
+
Args:
|
|
274
|
+
diffs: The list of field changes.
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
A list of warning messages for field changes involving constraint and index changes.
|
|
278
|
+
"""
|
|
279
|
+
warnings: list[str] = []
|
|
280
|
+
if any(isinstance(diff, AddedField) and diff.field_path.startswith("constraints.") for diff in diffs):
|
|
281
|
+
warnings.append(
|
|
282
|
+
"Adding constraints could cause ingestion failures if the data being ingested violates the constraint."
|
|
283
|
+
)
|
|
284
|
+
if any(
|
|
285
|
+
isinstance(diff, RemovedField)
|
|
286
|
+
and (diff.field_path.startswith("constraints.") or diff.field_path.startswith("indexes."))
|
|
287
|
+
for diff in diffs
|
|
288
|
+
):
|
|
289
|
+
warnings.append("Removing constraints or indexes may affect query performance.")
|
|
290
|
+
return warnings
|
|
291
|
+
|
|
265
292
|
def should_proceed_to_deploy(self, plan: Sequence[ResourceDeploymentPlan]) -> bool:
|
|
266
293
|
max_severity_in_plan = SeverityType.max_severity(
|
|
267
294
|
[change.severity for resource_plan in plan for change in resource_plan.resources],
|
|
@@ -6,6 +6,8 @@ from cognite.neat._utils.useful_types import ReferenceObject
|
|
|
6
6
|
from ._container import ContainerRequest
|
|
7
7
|
from ._data_model import DataModelRequest
|
|
8
8
|
from ._references import (
|
|
9
|
+
ContainerConstraintReference,
|
|
10
|
+
ContainerIndexReference,
|
|
9
11
|
ContainerReference,
|
|
10
12
|
DataModelReference,
|
|
11
13
|
SpaceReference,
|
|
@@ -18,7 +20,14 @@ DataModelResource: TypeAlias = SpaceRequest | DataModelRequest | ViewRequest | C
|
|
|
18
20
|
|
|
19
21
|
T_DataModelResource = TypeVar("T_DataModelResource", bound=DataModelResource)
|
|
20
22
|
|
|
21
|
-
ResourceId: TypeAlias =
|
|
23
|
+
ResourceId: TypeAlias = (
|
|
24
|
+
SpaceReference
|
|
25
|
+
| DataModelReference
|
|
26
|
+
| ViewReference
|
|
27
|
+
| ContainerReference
|
|
28
|
+
| ContainerIndexReference
|
|
29
|
+
| ContainerConstraintReference
|
|
30
|
+
)
|
|
22
31
|
|
|
23
32
|
T_ResourceId = TypeVar("T_ResourceId", bound=ResourceId)
|
|
24
33
|
|
|
@@ -133,9 +133,43 @@ class ViewDirectReference(ReferenceObject):
|
|
|
133
133
|
return f"{self.source!s}.{self.identifier}"
|
|
134
134
|
|
|
135
135
|
|
|
136
|
-
class ContainerIndexReference(
|
|
137
|
-
|
|
136
|
+
class ContainerIndexReference(ReferenceObject):
|
|
137
|
+
"""Reference to a container index for deletion API."""
|
|
138
|
+
|
|
139
|
+
space: str = Field(
|
|
140
|
+
description="Id of the space hosting the container.",
|
|
141
|
+
min_length=1,
|
|
142
|
+
max_length=43,
|
|
143
|
+
pattern=SPACE_FORMAT_PATTERN,
|
|
144
|
+
)
|
|
145
|
+
container_external_id: str = Field(
|
|
146
|
+
description="External-id of the container.",
|
|
147
|
+
min_length=1,
|
|
148
|
+
max_length=255,
|
|
149
|
+
pattern=DM_EXTERNAL_ID_PATTERN,
|
|
150
|
+
alias="containerExternalId",
|
|
151
|
+
)
|
|
152
|
+
identifier: str = Field(
|
|
153
|
+
description="Identifier of the index.",
|
|
154
|
+
)
|
|
138
155
|
|
|
139
156
|
|
|
140
|
-
class ContainerConstraintReference(
|
|
141
|
-
|
|
157
|
+
class ContainerConstraintReference(ReferenceObject):
|
|
158
|
+
"""Reference to a container constraint for deletion API."""
|
|
159
|
+
|
|
160
|
+
space: str = Field(
|
|
161
|
+
description="Id of the space hosting the container.",
|
|
162
|
+
min_length=1,
|
|
163
|
+
max_length=43,
|
|
164
|
+
pattern=SPACE_FORMAT_PATTERN,
|
|
165
|
+
)
|
|
166
|
+
container_external_id: str = Field(
|
|
167
|
+
description="External-id of the container.",
|
|
168
|
+
min_length=1,
|
|
169
|
+
max_length=255,
|
|
170
|
+
pattern=DM_EXTERNAL_ID_PATTERN,
|
|
171
|
+
alias="containerExternalId",
|
|
172
|
+
)
|
|
173
|
+
identifier: str = Field(
|
|
174
|
+
description="Identifier of the constraint.",
|
|
175
|
+
)
|
|
@@ -2,15 +2,17 @@ from abc import ABC, abstractmethod
|
|
|
2
2
|
from typing import ClassVar
|
|
3
3
|
|
|
4
4
|
from cognite.neat._data_model._analysis import ValidationResources
|
|
5
|
+
from cognite.neat._data_model.models.dms._schema import RequestSchema
|
|
5
6
|
from cognite.neat._issues import ConsistencyError, Recommendation
|
|
6
7
|
|
|
7
8
|
|
|
8
|
-
class
|
|
9
|
-
"""
|
|
9
|
+
class NeatRule(ABC):
|
|
10
|
+
"""Rules for data model principles."""
|
|
10
11
|
|
|
11
12
|
code: ClassVar[str]
|
|
12
13
|
issue_type: ClassVar[type[ConsistencyError] | type[Recommendation]]
|
|
13
14
|
alpha: ClassVar[bool] = False
|
|
15
|
+
fixable: ClassVar[bool] = False
|
|
14
16
|
|
|
15
17
|
def __init__(
|
|
16
18
|
self,
|
|
@@ -19,7 +21,11 @@ class DataModelValidator(ABC):
|
|
|
19
21
|
self.validation_resources = validation_resources
|
|
20
22
|
|
|
21
23
|
@abstractmethod
|
|
22
|
-
def
|
|
23
|
-
"""Execute
|
|
24
|
-
# do something with data model
|
|
24
|
+
def validate(self) -> list[ConsistencyError] | list[Recommendation] | list[ConsistencyError | Recommendation]:
|
|
25
|
+
"""Execute rule validation."""
|
|
25
26
|
...
|
|
27
|
+
|
|
28
|
+
def fix(self) -> RequestSchema:
|
|
29
|
+
"""Fix the issues found by the validator producing a fixed object."""
|
|
30
|
+
|
|
31
|
+
raise NotImplementedError("This rule does not implement fix()")
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from collections.abc import Callable
|
|
2
|
+
|
|
3
|
+
from cognite.neat._client.data_classes import SpaceStatisticsResponse
|
|
4
|
+
from cognite.neat._data_model._analysis import ValidationResources
|
|
5
|
+
from cognite.neat._data_model._shared import OnSuccessIssuesChecker
|
|
6
|
+
from cognite.neat._data_model._snapshot import SchemaSnapshot
|
|
7
|
+
from cognite.neat._data_model.models.dms._limits import SchemaLimits
|
|
8
|
+
from cognite.neat._utils.auxiliary import get_concrete_subclasses
|
|
9
|
+
|
|
10
|
+
from ._base import CDFRule
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CDFRulesOrchestrator(OnSuccessIssuesChecker):
|
|
14
|
+
"""CDF rules orchestrator, used to execute CDF rules on an entire CDF snapshot."""
|
|
15
|
+
|
|
16
|
+
def __init__(
|
|
17
|
+
self,
|
|
18
|
+
limits: SchemaLimits,
|
|
19
|
+
space_statistics: SpaceStatisticsResponse,
|
|
20
|
+
can_run_validator: Callable[[str, type], bool] | None = None,
|
|
21
|
+
enable_alpha_validators: bool = False,
|
|
22
|
+
) -> None:
|
|
23
|
+
super().__init__()
|
|
24
|
+
self._limits = limits
|
|
25
|
+
self._can_run_validator = can_run_validator or (lambda code, issue_type: True) # type: ignore
|
|
26
|
+
self._has_run = False
|
|
27
|
+
self._enable_alpha_validators = enable_alpha_validators
|
|
28
|
+
self._space_statistics = space_statistics
|
|
29
|
+
|
|
30
|
+
def run(self, cdf_snapshot: SchemaSnapshot) -> None:
|
|
31
|
+
"""Run quality assessment on the DMS data model."""
|
|
32
|
+
|
|
33
|
+
validation_resources = self._gather_validation_resources(cdf_snapshot)
|
|
34
|
+
|
|
35
|
+
# Initialize all validators
|
|
36
|
+
validators: list[CDFRule] = [validator(validation_resources) for validator in get_concrete_subclasses(CDFRule)]
|
|
37
|
+
|
|
38
|
+
# Run validators
|
|
39
|
+
for validator in validators:
|
|
40
|
+
if validator.alpha and not self._enable_alpha_validators:
|
|
41
|
+
continue
|
|
42
|
+
if self._can_run_validator(validator.code, validator.issue_type):
|
|
43
|
+
self._issues.extend(validator.validate())
|
|
44
|
+
|
|
45
|
+
self._has_run = True
|
|
46
|
+
|
|
47
|
+
def _gather_validation_resources(self, cdf_snapshot: SchemaSnapshot) -> ValidationResources:
|
|
48
|
+
# we do not want to modify the original request schema during validation
|
|
49
|
+
|
|
50
|
+
return ValidationResources(
|
|
51
|
+
cdf=cdf_snapshot,
|
|
52
|
+
local=cdf_snapshot,
|
|
53
|
+
limits=self._limits,
|
|
54
|
+
space_statistics=self._space_statistics,
|
|
55
|
+
modus_operandi="rebuild",
|
|
56
|
+
)
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from cognite.neat._data_model._constants import COGNITE_APP_SPACES, COGNITE_SPACES
|
|
2
|
+
from cognite.neat._data_model.rules.cdf._base import CDFRule
|
|
3
|
+
from cognite.neat._issues import Recommendation
|
|
4
|
+
|
|
5
|
+
BASE_CODE = "NEAT-CDF-SPACES"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class EmptySpaces(CDFRule):
|
|
9
|
+
"""Rule that checks for empty spaces in CDF.
|
|
10
|
+
|
|
11
|
+
## What it does
|
|
12
|
+
This rule checks if there are any empty spaces in CDF.
|
|
13
|
+
|
|
14
|
+
## Why is this bad?
|
|
15
|
+
CDF projects typically have limits of 100 spaces, and having empty spaces can waste these valuable resources.
|
|
16
|
+
Also, empty spaces can lead to confusion and mismanagement of resources within the CDF environment.
|
|
17
|
+
They may indicate incomplete configurations or unused resources that could be cleaned up.
|
|
18
|
+
|
|
19
|
+
## Example
|
|
20
|
+
A space `iamempty` with no associated resources such as Views, Containers or Data Models.
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
code = f"{BASE_CODE}-001"
|
|
26
|
+
issue_type = Recommendation
|
|
27
|
+
|
|
28
|
+
def validate(self) -> list[Recommendation]:
|
|
29
|
+
issues: list[Recommendation] = []
|
|
30
|
+
|
|
31
|
+
if not self.validation_resources.space_statistics:
|
|
32
|
+
return issues
|
|
33
|
+
|
|
34
|
+
empty_spaces = set(self.validation_resources.space_statistics.empty_spaces()) - set(
|
|
35
|
+
COGNITE_APP_SPACES + COGNITE_SPACES
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
for space in empty_spaces:
|
|
39
|
+
issues.append(
|
|
40
|
+
Recommendation(
|
|
41
|
+
message=f"Space '{space}' is empty and has no associated resources.",
|
|
42
|
+
code=self.code,
|
|
43
|
+
fix="Consider removing the empty space to maintain a clean CDF environment.",
|
|
44
|
+
)
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
return issues
|
|
@@ -36,7 +36,7 @@ from ._limits import (
|
|
|
36
36
|
ViewImplementsCountIsOutOfLimits,
|
|
37
37
|
ViewPropertyCountIsOutOfLimits,
|
|
38
38
|
)
|
|
39
|
-
from ._orchestrator import
|
|
39
|
+
from ._orchestrator import DmsDataModelRulesOrchestrator
|
|
40
40
|
from ._performance import (
|
|
41
41
|
MissingRequiresConstraint,
|
|
42
42
|
SuboptimalRequiresConstraint,
|
|
@@ -53,7 +53,7 @@ __all__ = [
|
|
|
53
53
|
"DataModelMissingDescription",
|
|
54
54
|
"DataModelMissingName",
|
|
55
55
|
"DataModelViewCountIsOutOfLimits",
|
|
56
|
-
"
|
|
56
|
+
"DmsDataModelRulesOrchestrator",
|
|
57
57
|
"EnumerationMissingDescription",
|
|
58
58
|
"EnumerationMissingName",
|
|
59
59
|
"ExternalContainerDoesNotExist",
|