infrahub-server 1.3.0b6__py3-none-any.whl → 1.3.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/cli/db.py +7 -5
- infrahub/cli/upgrade.py +6 -1
- infrahub/core/attribute.py +5 -0
- infrahub/core/diff/calculator.py +4 -1
- infrahub/core/diff/coordinator.py +8 -1
- infrahub/core/diff/query/field_specifiers.py +1 -1
- infrahub/core/diff/query/merge.py +2 -2
- infrahub/core/diff/query_parser.py +23 -32
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m012_convert_account_generic.py +1 -1
- infrahub/core/migrations/graph/m023_deduplicate_cardinality_one_relationships.py +2 -2
- infrahub/core/migrations/graph/m029_duplicates_cleanup.py +2 -2
- infrahub/core/migrations/graph/m031_check_number_attributes.py +102 -0
- infrahub/core/migrations/query/attribute_rename.py +1 -1
- infrahub/core/node/__init__.py +5 -1
- infrahub/core/node/constraints/grouped_uniqueness.py +88 -132
- infrahub/core/query/delete.py +3 -3
- infrahub/core/schema/attribute_parameters.py +12 -5
- infrahub/core/schema/basenode_schema.py +107 -1
- infrahub/core/schema/schema_branch.py +17 -5
- infrahub/core/validators/attribute/min_max.py +7 -2
- infrahub/core/validators/uniqueness/model.py +17 -0
- infrahub/core/validators/uniqueness/query.py +212 -1
- infrahub/graphql/app.py +5 -1
- infrahub/graphql/mutations/main.py +18 -2
- infrahub/services/adapters/message_bus/nats.py +5 -1
- infrahub/services/scheduler.py +5 -1
- infrahub_sdk/node/__init__.py +2 -0
- infrahub_sdk/node/node.py +33 -2
- infrahub_sdk/node/related_node.py +7 -0
- {infrahub_server-1.3.0b6.dist-info → infrahub_server-1.3.2.dist-info}/METADATA +1 -1
- {infrahub_server-1.3.0b6.dist-info → infrahub_server-1.3.2.dist-info}/RECORD +36 -35
- {infrahub_server-1.3.0b6.dist-info → infrahub_server-1.3.2.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.3.0b6.dist-info → infrahub_server-1.3.2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.3.0b6.dist-info → infrahub_server-1.3.2.dist-info}/entry_points.txt +0 -0
|
@@ -1,26 +1,19 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
from infrahub.core import registry
|
|
6
6
|
from infrahub.core.constants import NULL_VALUE
|
|
7
|
-
from infrahub.core.schema import (
|
|
8
|
-
MainSchemaTypes,
|
|
9
|
-
SchemaAttributePath,
|
|
10
|
-
SchemaAttributePathValue,
|
|
11
|
-
)
|
|
12
7
|
from infrahub.core.schema.basenode_schema import (
|
|
13
|
-
SchemaUniquenessConstraintPath,
|
|
14
8
|
UniquenessConstraintType,
|
|
15
9
|
UniquenessConstraintViolation,
|
|
16
10
|
)
|
|
17
|
-
from infrahub.core.validators.uniqueness.index import UniquenessQueryResultsIndex
|
|
18
11
|
from infrahub.core.validators.uniqueness.model import (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
12
|
+
NodeUniquenessQueryRequestValued,
|
|
13
|
+
QueryAttributePathValued,
|
|
14
|
+
QueryRelationshipPathValued,
|
|
22
15
|
)
|
|
23
|
-
from infrahub.core.validators.uniqueness.query import
|
|
16
|
+
from infrahub.core.validators.uniqueness.query import UniquenessValidationQuery
|
|
24
17
|
from infrahub.exceptions import HFIDViolatedError, ValidationError
|
|
25
18
|
|
|
26
19
|
from .interface import NodeConstraintInterface
|
|
@@ -28,8 +21,11 @@ from .interface import NodeConstraintInterface
|
|
|
28
21
|
if TYPE_CHECKING:
|
|
29
22
|
from infrahub.core.branch import Branch
|
|
30
23
|
from infrahub.core.node import Node
|
|
31
|
-
from infrahub.core.query import QueryResult
|
|
32
24
|
from infrahub.core.relationship.model import RelationshipManager
|
|
25
|
+
from infrahub.core.schema import (
|
|
26
|
+
MainSchemaTypes,
|
|
27
|
+
SchemaAttributePath,
|
|
28
|
+
)
|
|
33
29
|
from infrahub.core.timestamp import Timestamp
|
|
34
30
|
from infrahub.database import InfrahubDatabase
|
|
35
31
|
|
|
@@ -40,72 +36,38 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
|
|
|
40
36
|
self.branch = branch
|
|
41
37
|
self.schema_branch = registry.schema.get_schema_branch(branch.name)
|
|
42
38
|
|
|
43
|
-
async def
|
|
44
|
-
self,
|
|
45
|
-
updated_node: Node,
|
|
46
|
-
node_schema: MainSchemaTypes,
|
|
47
|
-
uniqueness_constraint_paths: list[SchemaUniquenessConstraintPath],
|
|
48
|
-
filters: list[str] | None = None,
|
|
49
|
-
) -> NodeUniquenessQueryRequest:
|
|
50
|
-
query_request = NodeUniquenessQueryRequest(kind=node_schema.kind)
|
|
51
|
-
for uniqueness_constraint_path in uniqueness_constraint_paths:
|
|
52
|
-
include_in_query = not filters
|
|
53
|
-
query_relationship_paths: set[QueryRelationshipAttributePath] = set()
|
|
54
|
-
query_attribute_paths: set[QueryAttributePath] = set()
|
|
55
|
-
for attribute_path in uniqueness_constraint_path.attributes_paths:
|
|
56
|
-
if attribute_path.related_schema and attribute_path.relationship_schema:
|
|
57
|
-
if filters and attribute_path.relationship_schema.name in filters:
|
|
58
|
-
include_in_query = True
|
|
59
|
-
|
|
60
|
-
relationship_manager: RelationshipManager = getattr(
|
|
61
|
-
updated_node, attribute_path.relationship_schema.name
|
|
62
|
-
)
|
|
63
|
-
related_node = await relationship_manager.get_peer(db=self.db)
|
|
64
|
-
related_node_id = related_node.get_id() if related_node else None
|
|
65
|
-
query_relationship_paths.add(
|
|
66
|
-
QueryRelationshipAttributePath(
|
|
67
|
-
identifier=attribute_path.relationship_schema.get_identifier(),
|
|
68
|
-
value=related_node_id,
|
|
69
|
-
)
|
|
70
|
-
)
|
|
71
|
-
continue
|
|
72
|
-
if attribute_path.attribute_schema:
|
|
73
|
-
if filters and attribute_path.attribute_schema.name in filters:
|
|
74
|
-
include_in_query = True
|
|
75
|
-
attribute_name = attribute_path.attribute_schema.name
|
|
76
|
-
attribute = getattr(updated_node, attribute_name)
|
|
77
|
-
if attribute.is_enum and attribute.value:
|
|
78
|
-
attribute_value = attribute.value.value
|
|
79
|
-
else:
|
|
80
|
-
attribute_value = attribute.value
|
|
81
|
-
if attribute_value is None:
|
|
82
|
-
attribute_value = NULL_VALUE
|
|
83
|
-
query_attribute_paths.add(
|
|
84
|
-
QueryAttributePath(
|
|
85
|
-
attribute_name=attribute_name,
|
|
86
|
-
property_name=attribute_path.attribute_property_name or "value",
|
|
87
|
-
value=attribute_value,
|
|
88
|
-
)
|
|
89
|
-
)
|
|
90
|
-
if include_in_query:
|
|
91
|
-
query_request.relationship_attribute_paths |= query_relationship_paths
|
|
92
|
-
query_request.unique_attribute_paths |= query_attribute_paths
|
|
93
|
-
return query_request
|
|
94
|
-
|
|
95
|
-
async def _get_node_attribute_path_values(
|
|
39
|
+
async def _get_unique_valued_paths(
|
|
96
40
|
self,
|
|
97
41
|
updated_node: Node,
|
|
98
42
|
path_group: list[SchemaAttributePath],
|
|
99
|
-
|
|
100
|
-
|
|
43
|
+
filters: list[str],
|
|
44
|
+
) -> list[QueryAttributePathValued | QueryRelationshipPathValued]:
|
|
45
|
+
# if filters are provided, we need to check if the path group is relevant to the filters
|
|
46
|
+
if filters:
|
|
47
|
+
field_names: list[str] = []
|
|
48
|
+
for schema_attribute_path in path_group:
|
|
49
|
+
if schema_attribute_path.relationship_schema:
|
|
50
|
+
field_names.append(schema_attribute_path.relationship_schema.name)
|
|
51
|
+
elif schema_attribute_path.attribute_schema:
|
|
52
|
+
field_names.append(schema_attribute_path.attribute_schema.name)
|
|
53
|
+
|
|
54
|
+
if not set(field_names) & set(filters):
|
|
55
|
+
return []
|
|
56
|
+
|
|
57
|
+
valued_paths: list[QueryAttributePathValued | QueryRelationshipPathValued] = []
|
|
101
58
|
for schema_attribute_path in path_group:
|
|
102
59
|
if schema_attribute_path.relationship_schema:
|
|
103
60
|
relationship_name = schema_attribute_path.relationship_schema.name
|
|
104
61
|
relationship_manager: RelationshipManager = getattr(updated_node, relationship_name)
|
|
105
62
|
related_node = await relationship_manager.get_peer(db=self.db)
|
|
106
63
|
related_node_id = related_node.get_id() if related_node else None
|
|
107
|
-
|
|
108
|
-
|
|
64
|
+
valued_paths.append(
|
|
65
|
+
QueryRelationshipPathValued(
|
|
66
|
+
relationship_schema=schema_attribute_path.relationship_schema,
|
|
67
|
+
peer_id=related_node_id,
|
|
68
|
+
attribute_name=None,
|
|
69
|
+
attribute_value=None,
|
|
70
|
+
)
|
|
109
71
|
)
|
|
110
72
|
elif schema_attribute_path.attribute_schema:
|
|
111
73
|
attribute_name = schema_attribute_path.attribute_schema.name
|
|
@@ -115,86 +77,79 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
|
|
|
115
77
|
attribute_value = attribute_value.value
|
|
116
78
|
elif attribute_value is None:
|
|
117
79
|
attribute_value = NULL_VALUE
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
80
|
+
valued_paths.append(
|
|
81
|
+
QueryAttributePathValued(
|
|
82
|
+
attribute_name=attribute_name,
|
|
121
83
|
value=attribute_value,
|
|
122
84
|
)
|
|
123
85
|
)
|
|
124
|
-
return
|
|
86
|
+
return valued_paths
|
|
125
87
|
|
|
126
|
-
async def
|
|
88
|
+
async def _get_single_schema_violations(
|
|
127
89
|
self,
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
90
|
+
node: Node,
|
|
91
|
+
node_schema: MainSchemaTypes,
|
|
92
|
+
filters: list[str],
|
|
93
|
+
at: Timestamp | None = None,
|
|
131
94
|
) -> list[UniquenessConstraintViolation]:
|
|
132
|
-
|
|
133
|
-
|
|
95
|
+
schema_branch = self.db.schema.get_schema_branch(name=self.branch.name)
|
|
96
|
+
|
|
97
|
+
uniqueness_constraint_paths = node_schema.get_unique_constraint_schema_attribute_paths(
|
|
98
|
+
schema_branch=schema_branch
|
|
134
99
|
)
|
|
135
|
-
|
|
100
|
+
|
|
101
|
+
violations: list[UniquenessConstraintViolation] = []
|
|
136
102
|
for uniqueness_constraint_path in uniqueness_constraint_paths:
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
103
|
+
valued_paths = await self._get_unique_valued_paths(
|
|
104
|
+
updated_node=node,
|
|
105
|
+
path_group=uniqueness_constraint_path.attributes_paths,
|
|
106
|
+
filters=filters,
|
|
140
107
|
)
|
|
141
108
|
|
|
142
|
-
|
|
143
|
-
if any(sapv.value is None for sapv in schema_attribute_path_values):
|
|
109
|
+
if not valued_paths:
|
|
144
110
|
continue
|
|
145
111
|
|
|
146
|
-
|
|
147
|
-
|
|
112
|
+
# Create the valued query request for this constraint
|
|
113
|
+
valued_query_request = NodeUniquenessQueryRequestValued(
|
|
114
|
+
kind=node_schema.kind,
|
|
115
|
+
unique_valued_paths=valued_paths,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
# Execute the query
|
|
119
|
+
query = await UniquenessValidationQuery.init(
|
|
120
|
+
db=self.db,
|
|
121
|
+
branch=self.branch,
|
|
122
|
+
at=at,
|
|
123
|
+
query_request=valued_query_request,
|
|
124
|
+
node_ids_to_exclude=[node.get_id()],
|
|
125
|
+
)
|
|
126
|
+
await query.execute(db=self.db)
|
|
127
|
+
|
|
128
|
+
# Get violation nodes from the query results
|
|
129
|
+
violation_nodes = query.get_violation_nodes()
|
|
130
|
+
if not violation_nodes:
|
|
148
131
|
continue
|
|
149
132
|
|
|
133
|
+
# Create violation object
|
|
150
134
|
uniqueness_constraint_fields = []
|
|
151
|
-
for
|
|
152
|
-
if
|
|
153
|
-
uniqueness_constraint_fields.append(
|
|
154
|
-
elif
|
|
155
|
-
uniqueness_constraint_fields.append(
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
135
|
+
for valued_path in valued_paths:
|
|
136
|
+
if isinstance(valued_path, QueryRelationshipPathValued):
|
|
137
|
+
uniqueness_constraint_fields.append(valued_path.relationship_schema.name)
|
|
138
|
+
elif isinstance(valued_path, QueryAttributePathValued):
|
|
139
|
+
uniqueness_constraint_fields.append(valued_path.attribute_name)
|
|
140
|
+
|
|
141
|
+
matching_node_ids = {node_id for node_id, _ in violation_nodes}
|
|
142
|
+
if matching_node_ids:
|
|
143
|
+
violations.append(
|
|
144
|
+
UniquenessConstraintViolation(
|
|
145
|
+
nodes_ids=matching_node_ids,
|
|
146
|
+
fields=uniqueness_constraint_fields,
|
|
147
|
+
typ=uniqueness_constraint_path.typ,
|
|
148
|
+
)
|
|
162
149
|
)
|
|
163
|
-
)
|
|
164
150
|
|
|
165
151
|
return violations
|
|
166
152
|
|
|
167
|
-
async def _get_single_schema_violations(
|
|
168
|
-
self,
|
|
169
|
-
node: Node,
|
|
170
|
-
node_schema: MainSchemaTypes,
|
|
171
|
-
at: Timestamp | None = None,
|
|
172
|
-
filters: list[str] | None = None,
|
|
173
|
-
) -> list[UniquenessConstraintViolation]:
|
|
174
|
-
schema_branch = self.db.schema.get_schema_branch(name=self.branch.name)
|
|
175
|
-
|
|
176
|
-
uniqueness_constraint_paths = node_schema.get_unique_constraint_schema_attribute_paths(
|
|
177
|
-
schema_branch=schema_branch
|
|
178
|
-
)
|
|
179
|
-
query_request = await self._build_query_request(
|
|
180
|
-
updated_node=node,
|
|
181
|
-
node_schema=node_schema,
|
|
182
|
-
uniqueness_constraint_paths=uniqueness_constraint_paths,
|
|
183
|
-
filters=filters,
|
|
184
|
-
)
|
|
185
|
-
if not query_request:
|
|
186
|
-
return []
|
|
187
|
-
|
|
188
|
-
query = await NodeUniqueAttributeConstraintQuery.init(
|
|
189
|
-
db=self.db, branch=self.branch, at=at, query_request=query_request, min_count_required=0
|
|
190
|
-
)
|
|
191
|
-
await query.execute(db=self.db)
|
|
192
|
-
return await self._get_violations(
|
|
193
|
-
updated_node=node,
|
|
194
|
-
uniqueness_constraint_paths=uniqueness_constraint_paths,
|
|
195
|
-
query_results=query.get_results(),
|
|
196
|
-
)
|
|
197
|
-
|
|
198
153
|
async def check(self, node: Node, at: Timestamp | None = None, filters: list[str] | None = None) -> None:
|
|
199
154
|
def _frozen_constraints(schema: MainSchemaTypes) -> frozenset[frozenset[str]]:
|
|
200
155
|
if not schema.uniqueness_constraints:
|
|
@@ -218,7 +173,8 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
|
|
|
218
173
|
if include_node_schema:
|
|
219
174
|
schemas_to_check.append(node_schema)
|
|
220
175
|
|
|
221
|
-
violations = []
|
|
176
|
+
violations: list[UniquenessConstraintViolation] = []
|
|
177
|
+
|
|
222
178
|
for schema in schemas_to_check:
|
|
223
179
|
schema_filters = list(filters) if filters is not None else []
|
|
224
180
|
for attr_schema in schema.attributes:
|
infrahub/core/query/delete.py
CHANGED
|
@@ -21,7 +21,7 @@ class DeleteAfterTimeQuery(Query):
|
|
|
21
21
|
// ---------------------
|
|
22
22
|
// Reset edges with to time after timestamp
|
|
23
23
|
// ---------------------
|
|
24
|
-
CALL {
|
|
24
|
+
CALL () {
|
|
25
25
|
OPTIONAL MATCH (p)-[r]-(q)
|
|
26
26
|
WHERE r.to > $timestamp
|
|
27
27
|
SET r.to = NULL
|
|
@@ -33,7 +33,7 @@ class DeleteAfterTimeQuery(Query):
|
|
|
33
33
|
// ---------------------
|
|
34
34
|
// Delete edges with from time after timestamp timestamp
|
|
35
35
|
// ---------------------
|
|
36
|
-
CALL {
|
|
36
|
+
CALL () {
|
|
37
37
|
OPTIONAL MATCH (p)-[r]->(q)
|
|
38
38
|
WHERE r.from > $timestamp
|
|
39
39
|
DELETE r
|
|
@@ -49,7 +49,7 @@ class DeleteAfterTimeQuery(Query):
|
|
|
49
49
|
// ---------------------
|
|
50
50
|
// Delete edges with from time after timestamp timestamp
|
|
51
51
|
// ---------------------
|
|
52
|
-
CALL {
|
|
52
|
+
CALL () {
|
|
53
53
|
OPTIONAL MATCH (p)-[r]->(q)
|
|
54
54
|
WHERE r.from > $timestamp
|
|
55
55
|
DELETE r
|
|
@@ -8,6 +8,7 @@ from pydantic import ConfigDict, Field, model_validator
|
|
|
8
8
|
from infrahub import config
|
|
9
9
|
from infrahub.core.constants.schema import UpdateSupport
|
|
10
10
|
from infrahub.core.models import HashableModel
|
|
11
|
+
from infrahub.exceptions import ValidationError
|
|
11
12
|
|
|
12
13
|
|
|
13
14
|
def get_attribute_parameters_class_for_kind(kind: str) -> type[AttributeParameters]:
|
|
@@ -124,16 +125,22 @@ class NumberAttributeParameters(AttributeParameters):
|
|
|
124
125
|
return ranges
|
|
125
126
|
|
|
126
127
|
def is_valid_value(self, value: int) -> bool:
|
|
127
|
-
|
|
128
|
+
try:
|
|
129
|
+
self.check_valid_value(value=value, name="UNUSED")
|
|
130
|
+
except ValidationError:
|
|
128
131
|
return False
|
|
132
|
+
return True
|
|
133
|
+
|
|
134
|
+
def check_valid_value(self, value: int, name: str) -> None:
|
|
135
|
+
if self.min_value is not None and value < self.min_value:
|
|
136
|
+
raise ValidationError({name: f"{value} is lower than the minimum allowed value {self.min_value!r}"})
|
|
129
137
|
if self.max_value is not None and value > self.max_value:
|
|
130
|
-
|
|
138
|
+
raise ValidationError({name: f"{value} is higher than the maximum allowed value {self.max_value!r}"})
|
|
131
139
|
if value in self.get_excluded_single_values():
|
|
132
|
-
|
|
140
|
+
raise ValidationError({name: f"{value} is in the excluded values"})
|
|
133
141
|
for start, end in self.get_excluded_ranges():
|
|
134
142
|
if start <= value <= end:
|
|
135
|
-
|
|
136
|
-
return True
|
|
143
|
+
raise ValidationError({name: f"{value} is in an the excluded range {start}-{end}"})
|
|
137
144
|
|
|
138
145
|
|
|
139
146
|
class NumberPoolParameters(AttributeParameters):
|
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import hashlib
|
|
4
4
|
import keyword
|
|
5
5
|
import os
|
|
6
|
+
from collections import defaultdict
|
|
6
7
|
from dataclasses import asdict, dataclass
|
|
7
8
|
from enum import Enum
|
|
8
9
|
from typing import TYPE_CHECKING, Any, Callable, Iterable, Literal, overload
|
|
@@ -10,7 +11,7 @@ from typing import TYPE_CHECKING, Any, Callable, Iterable, Literal, overload
|
|
|
10
11
|
from infrahub_sdk.utils import compare_lists, intersection
|
|
11
12
|
from pydantic import field_validator
|
|
12
13
|
|
|
13
|
-
from infrahub.core.constants import RelationshipCardinality, RelationshipKind
|
|
14
|
+
from infrahub.core.constants import HashableModelState, RelationshipCardinality, RelationshipKind
|
|
14
15
|
from infrahub.core.models import HashableModel, HashableModelDiff
|
|
15
16
|
|
|
16
17
|
from .attribute_schema import AttributeSchema, get_attribute_schema_class_for_kind
|
|
@@ -514,7 +515,86 @@ class BaseNodeSchema(GeneratedBaseNodeSchema):
|
|
|
514
515
|
return UniquenessConstraintType.SUBSET_OF_HFID
|
|
515
516
|
return UniquenessConstraintType.STANDARD
|
|
516
517
|
|
|
518
|
+
def _update_schema_paths(
|
|
519
|
+
self, schema_paths_list: list[str], field_name_update_map: dict[str, str], deleted_field_names: set[str]
|
|
520
|
+
) -> list[str]:
|
|
521
|
+
"""
|
|
522
|
+
For each schema_path (eg name__value, device__name_value), update the field name if the current name is
|
|
523
|
+
in field_name_update_map, remove the path if the field name is in deleted_field_names
|
|
524
|
+
"""
|
|
525
|
+
updated_element_list = []
|
|
526
|
+
for schema_path in schema_paths_list:
|
|
527
|
+
split_path = schema_path.split("__", maxsplit=1)
|
|
528
|
+
current_field_name = split_path[0]
|
|
529
|
+
if current_field_name in deleted_field_names:
|
|
530
|
+
continue
|
|
531
|
+
new_field_name = field_name_update_map.get(current_field_name)
|
|
532
|
+
if not new_field_name:
|
|
533
|
+
updated_element_list.append(schema_path)
|
|
534
|
+
continue
|
|
535
|
+
rest_of_path = f"__{split_path[1]}" if len(split_path) > 1 else ""
|
|
536
|
+
new_element_str = f"{new_field_name}{rest_of_path}"
|
|
537
|
+
updated_element_list.append(new_element_str)
|
|
538
|
+
return updated_element_list
|
|
539
|
+
|
|
540
|
+
def handle_field_renames_and_deletes(self, other: BaseNodeSchema) -> None:
|
|
541
|
+
properties_to_update = [self.uniqueness_constraints, self.human_friendly_id, self.display_labels, self.order_by]
|
|
542
|
+
if not any(p for p in properties_to_update):
|
|
543
|
+
return
|
|
544
|
+
|
|
545
|
+
deleted_names: set[str] = set()
|
|
546
|
+
field_names_by_id = defaultdict(list)
|
|
547
|
+
for field in self.attributes + self.relationships:
|
|
548
|
+
if not field.id:
|
|
549
|
+
continue
|
|
550
|
+
field_names_by_id[field.id].append(field.name)
|
|
551
|
+
for field in other.attributes + other.relationships:
|
|
552
|
+
# identify fields deleted in the other schema
|
|
553
|
+
if field.state is HashableModelState.ABSENT:
|
|
554
|
+
deleted_names.add(field.name)
|
|
555
|
+
if not field.id:
|
|
556
|
+
continue
|
|
557
|
+
if field.name not in field_names_by_id[field.id]:
|
|
558
|
+
field_names_by_id[field.id].append(field.name)
|
|
559
|
+
# identify fields renamed from this schema to the other schema
|
|
560
|
+
renamed_field_name_map = {v[0]: v[-1] for v in field_names_by_id.values() if len(v) > 1}
|
|
561
|
+
|
|
562
|
+
if self.uniqueness_constraints:
|
|
563
|
+
updated_constraints = []
|
|
564
|
+
for constraint in self.uniqueness_constraints:
|
|
565
|
+
updated_constraint = self._update_schema_paths(
|
|
566
|
+
schema_paths_list=constraint,
|
|
567
|
+
field_name_update_map=renamed_field_name_map,
|
|
568
|
+
deleted_field_names=deleted_names,
|
|
569
|
+
)
|
|
570
|
+
if updated_constraint:
|
|
571
|
+
updated_constraints.append(updated_constraint)
|
|
572
|
+
self.uniqueness_constraints = updated_constraints
|
|
573
|
+
if self.human_friendly_id:
|
|
574
|
+
self.human_friendly_id = self._update_schema_paths(
|
|
575
|
+
schema_paths_list=self.human_friendly_id,
|
|
576
|
+
field_name_update_map=renamed_field_name_map,
|
|
577
|
+
deleted_field_names=deleted_names,
|
|
578
|
+
)
|
|
579
|
+
if self.display_labels:
|
|
580
|
+
self.display_labels = self._update_schema_paths(
|
|
581
|
+
schema_paths_list=self.display_labels,
|
|
582
|
+
field_name_update_map=renamed_field_name_map,
|
|
583
|
+
deleted_field_names=deleted_names,
|
|
584
|
+
)
|
|
585
|
+
if self.order_by:
|
|
586
|
+
self.order_by = self._update_schema_paths(
|
|
587
|
+
schema_paths_list=self.order_by,
|
|
588
|
+
field_name_update_map=renamed_field_name_map,
|
|
589
|
+
deleted_field_names=deleted_names,
|
|
590
|
+
)
|
|
591
|
+
|
|
517
592
|
def update(self, other: HashableModel) -> Self:
|
|
593
|
+
# handle renamed/deleted field updates for schema properties here
|
|
594
|
+
# so that they can still be overridden during the call to `update()` below
|
|
595
|
+
if isinstance(other, BaseNodeSchema):
|
|
596
|
+
self.handle_field_renames_and_deletes(other=other)
|
|
597
|
+
|
|
518
598
|
super().update(other=other)
|
|
519
599
|
|
|
520
600
|
# Allow to specify empty string to remove existing fields values
|
|
@@ -551,6 +631,24 @@ class SchemaAttributePath:
|
|
|
551
631
|
attribute_schema: AttributeSchema | None = None
|
|
552
632
|
attribute_property_name: str | None = None
|
|
553
633
|
|
|
634
|
+
def __str__(self) -> str:
|
|
635
|
+
return self.to_string()
|
|
636
|
+
|
|
637
|
+
def to_string(self, field_name_override: str | None = None) -> str:
|
|
638
|
+
str_path = ""
|
|
639
|
+
if self.relationship_schema:
|
|
640
|
+
str_path += field_name_override or self.relationship_schema.name
|
|
641
|
+
if self.attribute_schema:
|
|
642
|
+
if str_path:
|
|
643
|
+
str_path += "__"
|
|
644
|
+
attr_name = self.attribute_schema.name
|
|
645
|
+
else:
|
|
646
|
+
attr_name = field_name_override or self.attribute_schema.name
|
|
647
|
+
str_path += attr_name
|
|
648
|
+
if self.attribute_property_name:
|
|
649
|
+
str_path += f"__{self.attribute_property_name}"
|
|
650
|
+
return str_path
|
|
651
|
+
|
|
554
652
|
@property
|
|
555
653
|
def is_type_attribute(self) -> bool:
|
|
556
654
|
return bool(self.attribute_schema and not self.related_schema and not self.relationship_schema)
|
|
@@ -563,6 +661,14 @@ class SchemaAttributePath:
|
|
|
563
661
|
def has_property(self) -> bool:
|
|
564
662
|
return bool(self.attribute_property_name)
|
|
565
663
|
|
|
664
|
+
@property
|
|
665
|
+
def field_name(self) -> str | None:
|
|
666
|
+
if self.relationship_schema:
|
|
667
|
+
return self.relationship_schema.name
|
|
668
|
+
if self.attribute_schema:
|
|
669
|
+
return self.attribute_schema.name
|
|
670
|
+
return None
|
|
671
|
+
|
|
566
672
|
@property
|
|
567
673
|
def active_relationship_schema(self) -> RelationshipSchema:
|
|
568
674
|
if self.relationship_schema:
|
|
@@ -710,7 +710,9 @@ class SchemaBranch:
|
|
|
710
710
|
):
|
|
711
711
|
unique_attrs_in_constraints.add(schema_attribute_path.attribute_schema.name)
|
|
712
712
|
|
|
713
|
-
unique_attrs_in_attrs = {
|
|
713
|
+
unique_attrs_in_attrs = {
|
|
714
|
+
attr_schema.name for attr_schema in node_schema.unique_attributes if not attr_schema.inherited
|
|
715
|
+
}
|
|
714
716
|
if unique_attrs_in_attrs == unique_attrs_in_constraints:
|
|
715
717
|
continue
|
|
716
718
|
|
|
@@ -822,11 +824,16 @@ class SchemaBranch:
|
|
|
822
824
|
) from exc
|
|
823
825
|
|
|
824
826
|
def _is_attr_combination_unique(
|
|
825
|
-
self, attrs_paths: list[str], uniqueness_constraints: list[list[str]] | None
|
|
827
|
+
self, attrs_paths: list[str], uniqueness_constraints: list[list[str]] | None, unique_attribute_names: list[str]
|
|
826
828
|
) -> bool:
|
|
827
829
|
"""
|
|
828
|
-
Return whether at least one combination of any length of `attrs_paths` is
|
|
830
|
+
Return whether at least one combination of any length of `attrs_paths` is unique
|
|
829
831
|
"""
|
|
832
|
+
if unique_attribute_names:
|
|
833
|
+
for attr_path in attrs_paths:
|
|
834
|
+
for unique_attr_name in unique_attribute_names:
|
|
835
|
+
if attr_path.startswith(unique_attr_name):
|
|
836
|
+
return True
|
|
830
837
|
|
|
831
838
|
if not uniqueness_constraints:
|
|
832
839
|
return False
|
|
@@ -868,9 +875,14 @@ class SchemaBranch:
|
|
|
868
875
|
if config.SETTINGS.main.schema_strict_mode:
|
|
869
876
|
# For every relationship referred within hfid, check whether the combination of attributes is unique is the peer schema node
|
|
870
877
|
for related_schema, attrs_paths in rel_schemas_to_paths.values():
|
|
871
|
-
if not self._is_attr_combination_unique(
|
|
878
|
+
if not self._is_attr_combination_unique(
|
|
879
|
+
attrs_paths=attrs_paths,
|
|
880
|
+
uniqueness_constraints=related_schema.uniqueness_constraints,
|
|
881
|
+
unique_attribute_names=[a.name for a in related_schema.unique_attributes],
|
|
882
|
+
):
|
|
872
883
|
raise ValidationError(
|
|
873
|
-
f"HFID of {node_schema.kind} refers peer {related_schema.kind}
|
|
884
|
+
f"HFID of {node_schema.kind} refers to peer {related_schema.kind}"
|
|
885
|
+
f" with a non-unique combination of attributes {attrs_paths}"
|
|
874
886
|
)
|
|
875
887
|
|
|
876
888
|
def validate_required_relationships(self) -> None:
|
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
|
+
from infrahub import config
|
|
5
6
|
from infrahub.core.constants import PathType
|
|
6
7
|
from infrahub.core.path import DataPath, GroupedDataPaths
|
|
7
8
|
from infrahub.core.schema.attribute_parameters import NumberAttributeParameters
|
|
@@ -87,6 +88,10 @@ class AttributeNumberChecker(ConstraintCheckerInterface):
|
|
|
87
88
|
return "attribute.number.update"
|
|
88
89
|
|
|
89
90
|
def supports(self, request: SchemaConstraintValidatorRequest) -> bool:
|
|
91
|
+
# Some invalid values may exist due to https://github.com/opsmill/infrahub/issues/6714.
|
|
92
|
+
if not config.SETTINGS.main.schema_strict_mode:
|
|
93
|
+
return False
|
|
94
|
+
|
|
90
95
|
return request.constraint_name in (
|
|
91
96
|
ConstraintIdentifier.ATTRIBUTE_PARAMETERS_MIN_VALUE_UPDATE.value,
|
|
92
97
|
ConstraintIdentifier.ATTRIBUTE_PARAMETERS_MAX_VALUE_UPDATE.value,
|
|
@@ -94,7 +99,6 @@ class AttributeNumberChecker(ConstraintCheckerInterface):
|
|
|
94
99
|
)
|
|
95
100
|
|
|
96
101
|
async def check(self, request: SchemaConstraintValidatorRequest) -> list[GroupedDataPaths]:
|
|
97
|
-
grouped_data_paths_list: list[GroupedDataPaths] = []
|
|
98
102
|
if not request.schema_path.field_name:
|
|
99
103
|
raise ValueError("field_name is not defined")
|
|
100
104
|
attribute_schema = request.node_schema.get_attribute(name=request.schema_path.field_name)
|
|
@@ -106,8 +110,9 @@ class AttributeNumberChecker(ConstraintCheckerInterface):
|
|
|
106
110
|
and attribute_schema.parameters.max_value is None
|
|
107
111
|
and attribute_schema.parameters.excluded_values is None
|
|
108
112
|
):
|
|
109
|
-
return
|
|
113
|
+
return []
|
|
110
114
|
|
|
115
|
+
grouped_data_paths_list: list[GroupedDataPaths] = []
|
|
111
116
|
for query_class in self.query_classes:
|
|
112
117
|
# TODO add exception handling
|
|
113
118
|
query = await query_class.init(
|
|
@@ -59,6 +59,23 @@ class NodeUniquenessQueryRequest(BaseModel):
|
|
|
59
59
|
)
|
|
60
60
|
|
|
61
61
|
|
|
62
|
+
class QueryRelationshipPathValued(BaseModel):
|
|
63
|
+
relationship_schema: RelationshipSchema
|
|
64
|
+
peer_id: str | None
|
|
65
|
+
attribute_name: str | None
|
|
66
|
+
attribute_value: str | bool | int | float | None
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class QueryAttributePathValued(BaseModel):
|
|
70
|
+
attribute_name: str
|
|
71
|
+
value: str | bool | int | float
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class NodeUniquenessQueryRequestValued(BaseModel):
|
|
75
|
+
kind: str
|
|
76
|
+
unique_valued_paths: list[QueryAttributePathValued | QueryRelationshipPathValued]
|
|
77
|
+
|
|
78
|
+
|
|
62
79
|
class NonUniqueRelatedAttribute(BaseModel):
|
|
63
80
|
relationship: RelationshipSchema
|
|
64
81
|
attribute_name: str
|