infrahub-server 1.3.0b5__py3-none-any.whl → 1.3.0b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. infrahub/actions/constants.py +36 -79
  2. infrahub/actions/schema.py +2 -0
  3. infrahub/core/constraint/node/runner.py +3 -1
  4. infrahub/core/convert_object_type/conversion.py +2 -0
  5. infrahub/core/diff/query/delete_query.py +8 -4
  6. infrahub/core/diff/repository/repository.py +4 -0
  7. infrahub/core/migrations/graph/m015_diff_format_update.py +1 -2
  8. infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +1 -2
  9. infrahub/core/migrations/graph/m028_delete_diffs.py +1 -2
  10. infrahub/core/node/__init__.py +65 -36
  11. infrahub/core/path.py +14 -0
  12. infrahub/core/relationship/constraints/count.py +10 -9
  13. infrahub/core/relationship/constraints/interface.py +2 -1
  14. infrahub/core/relationship/constraints/peer_kind.py +2 -1
  15. infrahub/core/relationship/constraints/peer_parent.py +56 -0
  16. infrahub/core/relationship/constraints/peer_relatives.py +1 -1
  17. infrahub/core/relationship/constraints/profiles_kind.py +1 -1
  18. infrahub/core/schema/definitions/internal.py +8 -1
  19. infrahub/core/schema/generated/relationship_schema.py +6 -1
  20. infrahub/core/schema/schema_branch.py +36 -8
  21. infrahub/core/validators/__init__.py +2 -1
  22. infrahub/core/validators/relationship/peer.py +174 -4
  23. infrahub/database/__init__.py +0 -1
  24. infrahub/dependencies/builder/constraint/grouped/node_runner.py +2 -0
  25. infrahub/dependencies/builder/constraint/relationship_manager/peer_parent.py +8 -0
  26. infrahub/dependencies/builder/constraint/schema/aggregated.py +2 -0
  27. infrahub/dependencies/builder/constraint/schema/relationship_peer.py +8 -0
  28. infrahub/dependencies/registry.py +2 -0
  29. infrahub/git/tasks.py +1 -0
  30. infrahub/graphql/mutations/convert_object_type.py +16 -7
  31. infrahub/graphql/mutations/relationship.py +32 -0
  32. infrahub/graphql/queries/convert_object_type_mapping.py +3 -5
  33. infrahub/message_bus/operations/refresh/registry.py +3 -6
  34. infrahub/pools/models.py +14 -0
  35. infrahub/pools/tasks.py +71 -1
  36. infrahub_sdk/ctl/generator.py +4 -4
  37. infrahub_sdk/ctl/repository.py +1 -1
  38. infrahub_sdk/node/node.py +146 -92
  39. infrahub_sdk/pytest_plugin/items/python_transform.py +2 -1
  40. infrahub_sdk/query_groups.py +4 -3
  41. infrahub_sdk/utils.py +7 -20
  42. infrahub_sdk/yaml.py +6 -5
  43. {infrahub_server-1.3.0b5.dist-info → infrahub_server-1.3.0b6.dist-info}/METADATA +2 -2
  44. {infrahub_server-1.3.0b5.dist-info → infrahub_server-1.3.0b6.dist-info}/RECORD +47 -43
  45. {infrahub_server-1.3.0b5.dist-info → infrahub_server-1.3.0b6.dist-info}/LICENSE.txt +0 -0
  46. {infrahub_server-1.3.0b5.dist-info → infrahub_server-1.3.0b6.dist-info}/WHEEL +0 -0
  47. {infrahub_server-1.3.0b5.dist-info → infrahub_server-1.3.0b6.dist-info}/entry_points.txt +0 -0
@@ -73,10 +73,15 @@ class GeneratedRelationshipSchema(HashableModel):
73
73
  description="Defines the maximum objects allowed on the other side of the relationship.",
74
74
  json_schema_extra={"update": "validate_constraint"},
75
75
  )
76
+ common_parent: str | None = Field(
77
+ default=None,
78
+ description="Name of a parent relationship on the peer schema that must share the same related object with the object's parent.",
79
+ json_schema_extra={"update": "validate_constraint"},
80
+ )
76
81
  common_relatives: list[str] | None = Field(
77
82
  default=None,
78
83
  description="List of relationship names on the peer schema for which all objects must share the same set of peers.",
79
- json_schema_extra={"update": "validate_constraint"},
84
+ json_schema_extra={"update": "allowed"},
80
85
  )
81
86
  order_weight: int | None = Field(
82
87
  default=None,
@@ -975,6 +975,28 @@ class SchemaBranch:
975
975
  ):
976
976
  raise ValueError(f"{node.kind}: {rel.name} isn't allowed as a relationship name.")
977
977
 
978
+ def _validate_common_parent(self, node: NodeSchema, rel: RelationshipSchema) -> None:
979
+ if not rel.common_parent:
980
+ return
981
+
982
+ peer_schema = self.get(name=rel.peer, duplicate=False)
983
+ if not node.has_parent_relationship:
984
+ raise ValueError(
985
+ f"{node.kind}: Relationship {rel.name!r} defines 'common_parent' but node does not have a parent relationship"
986
+ )
987
+
988
+ try:
989
+ parent_rel = peer_schema.get_relationship(name=rel.common_parent)
990
+ except ValueError as exc:
991
+ raise ValueError(
992
+ f"{node.kind}: Relationship {rel.name!r} defines 'common_parent' but '{rel.peer}.{rel.common_parent}' does not exist"
993
+ ) from exc
994
+
995
+ if parent_rel.kind != RelationshipKind.PARENT:
996
+ raise ValueError(
997
+ f"{node.kind}: Relationship {rel.name!r} defines 'common_parent' but '{rel.peer}.{rel.common_parent} is not of kind 'parent'"
998
+ )
999
+
978
1000
  def validate_kinds(self) -> None:
979
1001
  for name in list(self.nodes.keys()):
980
1002
  node = self.get_node(name=name, duplicate=False)
@@ -997,6 +1019,9 @@ class SchemaBranch:
997
1019
  raise ValueError(
998
1020
  f"{node.kind}: Relationship {rel.name!r} is referring an invalid peer {rel.peer!r}"
999
1021
  ) from None
1022
+
1023
+ self._validate_common_parent(node=node, rel=rel)
1024
+
1000
1025
  if rel.common_relatives:
1001
1026
  peer_schema = self.get(name=rel.peer, duplicate=False)
1002
1027
  for common_relatives_rel_name in rel.common_relatives:
@@ -1019,11 +1044,7 @@ class SchemaBranch:
1019
1044
  for name in self.nodes.keys():
1020
1045
  node_schema = self.get_node(name=name, duplicate=False)
1021
1046
  for attribute in node_schema.attributes:
1022
- if (
1023
- attribute.kind == "NumberPool"
1024
- and isinstance(attribute.parameters, NumberPoolParameters)
1025
- and not attribute.parameters.number_pool_id
1026
- ):
1047
+ if attribute.kind == "NumberPool" and isinstance(attribute.parameters, NumberPoolParameters):
1027
1048
  self._validate_number_pool_parameters(
1028
1049
  node_schema=node_schema, attribute=attribute, number_pool_parameters=attribute.parameters
1029
1050
  )
@@ -1039,7 +1060,7 @@ class SchemaBranch:
1039
1060
  f"{node_schema.kind}.{attribute.name} is a NumberPool it has to be a read_only attribute"
1040
1061
  )
1041
1062
 
1042
- if attribute.inherited:
1063
+ if attribute.inherited and not number_pool_parameters.number_pool_id:
1043
1064
  generics_with_attribute = []
1044
1065
  for generic_name in node_schema.inherit_from:
1045
1066
  generic_schema = self.get_generic(name=generic_name, duplicate=False)
@@ -1053,9 +1074,16 @@ class SchemaBranch:
1053
1074
  raise ValidationError(
1054
1075
  f"{node_schema.kind}.{attribute.name} is a NumberPool inherited from more than one generic"
1055
1076
  )
1077
+ elif not attribute.inherited:
1078
+ for generic_name in node_schema.inherit_from:
1079
+ generic_schema = self.get_generic(name=generic_name, duplicate=False)
1080
+ if attribute.name in generic_schema.attribute_names:
1081
+ raise ValidationError(
1082
+ f"Overriding '{node_schema.kind}.{attribute.name}' NumberPool attribute from generic '{generic_name}' is not supported"
1083
+ )
1056
1084
 
1057
- else:
1058
- number_pool_parameters.number_pool_id = str(uuid4())
1085
+ if not number_pool_parameters.number_pool_id:
1086
+ number_pool_parameters.number_pool_id = str(uuid4())
1059
1087
 
1060
1088
  def validate_computed_attributes(self) -> None:
1061
1089
  self.computed_attributes = ComputedAttributes()
@@ -17,7 +17,7 @@ from .node.inherit_from import NodeInheritFromChecker
17
17
  from .node.relationship import NodeRelationshipAddChecker
18
18
  from .relationship.count import RelationshipCountChecker
19
19
  from .relationship.optional import RelationshipOptionalChecker
20
- from .relationship.peer import RelationshipPeerChecker
20
+ from .relationship.peer import RelationshipPeerChecker, RelationshipPeerParentChecker
21
21
  from .uniqueness.checker import UniquenessChecker
22
22
 
23
23
  CONSTRAINT_VALIDATOR_MAP: dict[str, type[ConstraintCheckerInterface] | None] = {
@@ -42,6 +42,7 @@ CONSTRAINT_VALIDATOR_MAP: dict[str, type[ConstraintCheckerInterface] | None] = {
42
42
  "relationship.optional.update": RelationshipOptionalChecker,
43
43
  "relationship.min_count.update": RelationshipCountChecker,
44
44
  "relationship.max_count.update": RelationshipCountChecker,
45
+ "relationship.common_parent.update": RelationshipPeerParentChecker,
45
46
  "node.inherit_from.update": NodeInheritFromChecker,
46
47
  "node.uniqueness_constraints.update": UniquenessChecker,
47
48
  "node.parent.update": NodeHierarchyChecker,
@@ -2,17 +2,17 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING, Any
4
4
 
5
- from infrahub.core.constants import PathType
5
+ from infrahub import config
6
+ from infrahub.core.constants import PathType, RelationshipKind
6
7
  from infrahub.core.path import DataPath, GroupedDataPaths
7
8
  from infrahub.core.schema import GenericSchema
8
9
 
9
10
  from ..interface import ConstraintCheckerInterface
10
- from ..shared import (
11
- RelationshipSchemaValidatorQuery,
12
- )
11
+ from ..shared import RelationshipSchemaValidatorQuery
13
12
 
14
13
  if TYPE_CHECKING:
15
14
  from infrahub.core.branch import Branch
15
+ from infrahub.core.schema.relationship_schema import RelationshipSchema
16
16
  from infrahub.database import InfrahubDatabase
17
17
 
18
18
  from ..model import SchemaConstraintValidatorRequest
@@ -125,3 +125,173 @@ class RelationshipPeerChecker(ConstraintCheckerInterface):
125
125
  await query.execute(db=self.db)
126
126
  grouped_data_paths_list.append(await query.get_paths())
127
127
  return grouped_data_paths_list
128
+
129
+
130
+ class RelationshipPeerParentValidatorQuery(RelationshipSchemaValidatorQuery):
131
+ name = "relationship_constraints_peer_parent_validator"
132
+
133
+ def __init__(
134
+ self,
135
+ relationship: RelationshipSchema,
136
+ parent_relationship: RelationshipSchema,
137
+ peer_parent_relationship: RelationshipSchema,
138
+ **kwargs: Any,
139
+ ):
140
+ super().__init__(**kwargs)
141
+
142
+ self.relationship = relationship
143
+ self.parent_relationship = parent_relationship
144
+ self.peer_parent_relationship = peer_parent_relationship
145
+
146
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
147
+ branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False)
148
+ self.params.update(branch_params)
149
+ self.params["peer_relationship_id"] = self.relationship.identifier
150
+ self.params["parent_relationship_id"] = self.parent_relationship.identifier
151
+ self.params["peer_parent_relationship_id"] = self.peer_parent_relationship.identifier
152
+
153
+ parent_arrows = self.parent_relationship.get_query_arrows()
154
+ parent_match = (
155
+ "MATCH (active_node)%(lstart)s[r1:IS_RELATED]%(lend)s"
156
+ "(rel:Relationship { name: $parent_relationship_id })%(rstart)s[r2:IS_RELATED]%(rend)s(parent:Node)"
157
+ ) % {
158
+ "lstart": parent_arrows.left.start,
159
+ "lend": parent_arrows.left.end,
160
+ "rstart": parent_arrows.right.start,
161
+ "rend": parent_arrows.right.end,
162
+ }
163
+
164
+ peer_parent_arrows = self.relationship.get_query_arrows()
165
+ peer_match = (
166
+ "MATCH (active_node)%(lstart)s[r1:IS_RELATED]%(lend)s"
167
+ "(r:Relationship {name: $peer_relationship_id })%(rstart)s[r2:IS_RELATED]%(rend)s(peer:Node)"
168
+ ) % {
169
+ "lstart": peer_parent_arrows.left.start,
170
+ "lend": peer_parent_arrows.left.end,
171
+ "rstart": peer_parent_arrows.right.start,
172
+ "rend": peer_parent_arrows.right.end,
173
+ }
174
+
175
+ peer_parent_arrows = self.peer_parent_relationship.get_query_arrows()
176
+ peer_parent_match = (
177
+ "MATCH (peer:Node)%(lstart)s[r1:IS_RELATED]%(lend)s"
178
+ "(r:Relationship {name: $peer_parent_relationship_id})%(rstart)s[r2:IS_RELATED]%(rend)s(peer_parent:Node)"
179
+ ) % {
180
+ "lstart": peer_parent_arrows.left.start,
181
+ "lend": peer_parent_arrows.left.end,
182
+ "rstart": peer_parent_arrows.right.start,
183
+ "rend": peer_parent_arrows.right.end,
184
+ }
185
+
186
+ query = """
187
+ MATCH (n:%(node_kind)s)
188
+ CALL (n) {
189
+ MATCH path = (root:Root)<-[r:IS_PART_OF]-(n)
190
+ WHERE %(branch_filter)s
191
+ RETURN n as active_node, r.status = "active" AS is_active
192
+ ORDER BY r.branch_level DESC, r.from DESC
193
+ LIMIT 1
194
+ }
195
+ WITH active_node, is_active
196
+ WHERE is_active = TRUE
197
+ %(parent_match)s
198
+ WHERE all(r in [r1, r2] WHERE %(branch_filter)s AND r.status = "active")
199
+ CALL (active_node) {
200
+ %(peer_match)s
201
+ WITH DISTINCT active_node, peer
202
+ %(peer_match)s
203
+ WHERE all(r in [r1, r2] WHERE %(branch_filter)s)
204
+ WITH peer, r1.status = "active" AND r2.status = "active" AS is_active
205
+ ORDER BY peer.uuid, r1.branch_level DESC, r2.branch_level DESC, r1.from DESC, r2.from DESC, is_active DESC
206
+ WITH peer, head(collect(is_active)) AS is_active
207
+ WHERE is_active = TRUE
208
+ RETURN peer
209
+ }
210
+ CALL (peer) {
211
+ %(peer_parent_match)s
212
+ WHERE all(r IN [r1, r2] WHERE %(branch_filter)s)
213
+ WITH peer_parent, r1, r2, r1.status = "active" AND r2.status = "active" AS is_active
214
+ WITH peer_parent, r1.branch AS branch_name, is_active
215
+ ORDER BY r1.branch_level DESC, r2.branch_level DESC, r1.from DESC, r2.from DESC, is_active DESC
216
+ LIMIT 1
217
+ WITH peer_parent, branch_name
218
+ WHERE is_active = TRUE
219
+ RETURN peer_parent, branch_name
220
+ }
221
+ WITH DISTINCT active_node, parent, peer, peer_parent, branch_name
222
+ WHERE parent.uuid <> peer_parent.uuid
223
+ """ % {
224
+ "branch_filter": branch_filter,
225
+ "node_kind": self.node_schema.kind,
226
+ "parent_match": parent_match,
227
+ "peer_match": peer_match,
228
+ "peer_parent_match": peer_parent_match,
229
+ }
230
+
231
+ self.add_to_query(query)
232
+ self.return_labels = ["active_node.uuid", "parent.uuid", "peer.uuid", "peer_parent.uuid", "branch_name"]
233
+
234
+ async def get_paths(self) -> GroupedDataPaths:
235
+ grouped_data_paths = GroupedDataPaths()
236
+
237
+ for result in self.results:
238
+ grouped_data_paths.add_data_path(
239
+ DataPath(
240
+ branch=str(result.get("branch_name")),
241
+ path_type=PathType.RELATIONSHIP_ONE,
242
+ node_id=str(result.get("peer.uuid")),
243
+ field_name=self.peer_parent_relationship.name,
244
+ peer_id=str(result.get("peer_parent.uuid")),
245
+ kind=self.relationship.peer,
246
+ )
247
+ )
248
+
249
+ return grouped_data_paths
250
+
251
+
252
+ class RelationshipPeerParentChecker(ConstraintCheckerInterface):
253
+ query_classes = [RelationshipPeerParentValidatorQuery]
254
+
255
+ def __init__(self, db: InfrahubDatabase, branch: Branch | None = None) -> None:
256
+ self.db = db
257
+ self.branch = branch
258
+
259
+ @property
260
+ def name(self) -> str:
261
+ return "relationship.common_parent.update"
262
+
263
+ def supports(self, request: SchemaConstraintValidatorRequest) -> bool:
264
+ return request.constraint_name == self.name and config.SETTINGS.main.schema_strict_mode
265
+
266
+ async def check(self, request: SchemaConstraintValidatorRequest) -> list[GroupedDataPaths]:
267
+ grouped_data_paths_list: list[GroupedDataPaths] = []
268
+
269
+ if not request.schema_path.field_name:
270
+ return grouped_data_paths_list
271
+
272
+ relationship = request.node_schema.get_relationship(name=request.schema_path.field_name)
273
+ if not relationship.common_parent:
274
+ # Should not happen if schema validation was done properly
275
+ return grouped_data_paths_list
276
+
277
+ parent_relationship = next(
278
+ iter(request.node_schema.get_relationships_of_kind(relationship_kinds=[RelationshipKind.PARENT]))
279
+ )
280
+ peer_parent_relationship = request.schema_branch.get(name=relationship.peer, duplicate=False).get_relationship(
281
+ name=relationship.common_parent
282
+ )
283
+
284
+ for query_class in self.query_classes:
285
+ query = await query_class.init(
286
+ db=self.db,
287
+ branch=self.branch,
288
+ node_schema=request.node_schema,
289
+ schema_path=request.schema_path,
290
+ relationship=relationship,
291
+ parent_relationship=parent_relationship,
292
+ peer_parent_relationship=peer_parent_relationship,
293
+ )
294
+ await query.execute(db=self.db)
295
+ grouped_data_paths_list.append(await query.get_paths())
296
+
297
+ return grouped_data_paths_list
@@ -498,7 +498,6 @@ async def get_db(retry: int = 0) -> AsyncDriver:
498
498
  trusted_certificates=trusted_certificates,
499
499
  notifications_disabled_categories=[
500
500
  NotificationDisabledCategory.UNRECOGNIZED,
501
- NotificationDisabledCategory.DEPRECATION, # TODO: Remove me with 1.3
502
501
  ],
503
502
  notifications_min_severity=NotificationMinimumSeverity.WARNING,
504
503
  )
@@ -4,6 +4,7 @@ from infrahub.dependencies.interface import DependencyBuilder, DependencyBuilder
4
4
  from ..node.grouped_uniqueness import NodeGroupedUniquenessConstraintDependency
5
5
  from ..relationship_manager.count import RelationshipCountConstraintDependency
6
6
  from ..relationship_manager.peer_kind import RelationshipPeerKindConstraintDependency
7
+ from ..relationship_manager.peer_parent import RelationshipPeerParentConstraintDependency
7
8
  from ..relationship_manager.peer_relatives import RelationshipPeerRelativesConstraintDependency
8
9
  from ..relationship_manager.profiles_kind import RelationshipProfilesKindConstraintDependency
9
10
 
@@ -19,6 +20,7 @@ class NodeConstraintRunnerDependency(DependencyBuilder[NodeConstraintRunner]):
19
20
  RelationshipPeerKindConstraintDependency.build(context=context),
20
21
  RelationshipCountConstraintDependency.build(context=context),
21
22
  RelationshipProfilesKindConstraintDependency.build(context=context),
23
+ RelationshipPeerParentConstraintDependency.build(context=context),
22
24
  RelationshipPeerRelativesConstraintDependency.build(context=context),
23
25
  ],
24
26
  )
@@ -0,0 +1,8 @@
1
+ from infrahub.core.relationship.constraints.peer_parent import RelationshipPeerParentConstraint
2
+ from infrahub.dependencies.interface import DependencyBuilder, DependencyBuilderContext
3
+
4
+
5
+ class RelationshipPeerParentConstraintDependency(DependencyBuilder[RelationshipPeerParentConstraint]):
6
+ @classmethod
7
+ def build(cls, context: DependencyBuilderContext) -> RelationshipPeerParentConstraint:
8
+ return RelationshipPeerParentConstraint(db=context.db, branch=context.branch)
@@ -14,6 +14,7 @@ from .node_attribute import SchemaNodeAttributeAddConstraintDependency
14
14
  from .node_relationship import SchemaNodeRelationshipAddConstraintDependency
15
15
  from .relationship_count import SchemaRelationshipCountConstraintDependency
16
16
  from .relationship_optional import SchemaRelationshipOptionalConstraintDependency
17
+ from .relationship_peer import SchemaRelationshipPeerParentConstraintDependency
17
18
  from .uniqueness import SchemaUniquenessConstraintDependency
18
19
 
19
20
 
@@ -36,6 +37,7 @@ class AggregatedSchemaConstraintsDependency(DependencyBuilder[AggregatedConstrai
36
37
  SchemaAttributeKindConstraintDependency.build(context=context),
37
38
  SchemaNodeAttributeAddConstraintDependency.build(context=context),
38
39
  SchemaNodeRelationshipAddConstraintDependency.build(context=context),
40
+ SchemaRelationshipPeerParentConstraintDependency.build(context=context),
39
41
  ],
40
42
  db=context.db,
41
43
  branch=context.branch,
@@ -0,0 +1,8 @@
1
+ from infrahub.core.validators.relationship.peer import RelationshipPeerParentChecker
2
+ from infrahub.dependencies.interface import DependencyBuilder, DependencyBuilderContext
3
+
4
+
5
+ class SchemaRelationshipPeerParentConstraintDependency(DependencyBuilder[RelationshipPeerParentChecker]):
6
+ @classmethod
7
+ def build(cls, context: DependencyBuilderContext) -> RelationshipPeerParentChecker:
8
+ return RelationshipPeerParentChecker(db=context.db, branch=context.branch)
@@ -3,6 +3,7 @@ from .builder.constraint.node.grouped_uniqueness import NodeGroupedUniquenessCon
3
3
  from .builder.constraint.node.uniqueness import NodeAttributeUniquenessConstraintDependency
4
4
  from .builder.constraint.relationship_manager.count import RelationshipCountConstraintDependency
5
5
  from .builder.constraint.relationship_manager.peer_kind import RelationshipPeerKindConstraintDependency
6
+ from .builder.constraint.relationship_manager.peer_parent import RelationshipPeerParentConstraintDependency
6
7
  from .builder.constraint.relationship_manager.peer_relatives import RelationshipPeerRelativesConstraintDependency
7
8
  from .builder.constraint.relationship_manager.profiles_kind import RelationshipProfilesKindConstraintDependency
8
9
  from .builder.constraint.schema.aggregated import AggregatedSchemaConstraintsDependency
@@ -38,6 +39,7 @@ def build_component_registry() -> ComponentDependencyRegistry:
38
39
  component_registry.track_dependency(RelationshipCountConstraintDependency)
39
40
  component_registry.track_dependency(RelationshipProfilesKindConstraintDependency)
40
41
  component_registry.track_dependency(RelationshipPeerKindConstraintDependency)
42
+ component_registry.track_dependency(RelationshipPeerParentConstraintDependency)
41
43
  component_registry.track_dependency(RelationshipPeerRelativesConstraintDependency)
42
44
  component_registry.track_dependency(NodeConstraintRunnerDependency)
43
45
  component_registry.track_dependency(NodeDeleteValidatorDependency)
infrahub/git/tasks.py CHANGED
@@ -696,6 +696,7 @@ async def trigger_internal_checks(
696
696
  if (
697
697
  existing_validator.typename == InfrahubKind.REPOSITORYVALIDATOR
698
698
  and existing_validator.repository.id == model.repository
699
+ and existing_validator.label.value == validator_name
699
700
  ):
700
701
  previous_validator = existing_validator
701
702
 
@@ -6,6 +6,7 @@ from graphql import GraphQLResolveInfo
6
6
 
7
7
  from infrahub.core import registry
8
8
  from infrahub.core.convert_object_type.conversion import InputForDestField, convert_object_type
9
+ from infrahub.core.convert_object_type.schema_mapping import get_schema_mapping
9
10
  from infrahub.core.manager import NodeManager
10
11
 
11
12
  if TYPE_CHECKING:
@@ -16,7 +17,6 @@ class ConvertObjectTypeInput(InputObjectType):
16
17
  node_id = String(required=True)
17
18
  target_kind = String(required=True)
18
19
  fields_mapping = GenericScalar(required=True) # keys are destination attributes/relationships names.
19
- branch = String(required=True)
20
20
 
21
21
 
22
22
  class ConvertObjectType(Mutation):
@@ -37,17 +37,26 @@ class ConvertObjectType(Mutation):
37
37
 
38
38
  graphql_context: GraphqlContext = info.context
39
39
 
40
+ node_to_convert = await NodeManager.get_one(
41
+ id=str(data.node_id), db=graphql_context.db, branch=graphql_context.branch
42
+ )
43
+
44
+ source_schema = registry.get_node_schema(name=node_to_convert.get_kind(), branch=graphql_context.branch)
45
+ target_schema = registry.get_node_schema(name=str(data.target_kind), branch=graphql_context.branch)
46
+
40
47
  fields_mapping: dict[str, InputForDestField] = {}
41
48
  if not isinstance(data.fields_mapping, dict):
42
49
  raise ValueError(f"Expected `fields_mapping` to be a `dict`, got {type(fields_mapping)}")
43
50
 
44
- for field, input_for_dest_field_str in data.fields_mapping.items():
45
- fields_mapping[field] = InputForDestField(**input_for_dest_field_str)
51
+ for field_name, input_for_dest_field_str in data.fields_mapping.items():
52
+ fields_mapping[field_name] = InputForDestField(**input_for_dest_field_str)
53
+
54
+ # Complete fields mapping with auto-mapping.
55
+ mapping = get_schema_mapping(source_schema=source_schema, target_schema=target_schema)
56
+ for field_name, mapping_value in mapping.items():
57
+ if mapping_value.source_field_name is not None and field_name not in fields_mapping:
58
+ fields_mapping[field_name] = InputForDestField(source_field=mapping_value.source_field_name)
46
59
 
47
- node_to_convert = await NodeManager.get_one(
48
- id=str(data.node_id), db=graphql_context.db, branch=str(data.branch)
49
- )
50
- target_schema = registry.get_node_schema(name=str(data.target_kind), branch=data.branch)
51
60
  new_node = await convert_object_type(
52
61
  node=node_to_convert,
53
62
  target_schema=target_schema,
@@ -85,6 +85,7 @@ class RelationshipAdd(Mutation):
85
85
  nodes = await _validate_peers(info=info, data=data)
86
86
  await _validate_permissions(info=info, source_node=source, peers=nodes)
87
87
  await _validate_peer_types(info=info, data=data, source_node=source, peers=nodes)
88
+ await _validate_peer_parents(info=info, data=data, source_node=source, peers=nodes)
88
89
 
89
90
  # This has to be done after validating the permissions
90
91
  await apply_external_context(graphql_context=graphql_context, context_input=context)
@@ -406,6 +407,37 @@ async def _validate_peer_types(
406
407
  )
407
408
 
408
409
 
410
+ async def _validate_peer_parents(
411
+ info: GraphQLResolveInfo, data: RelationshipNodesInput, source_node: Node, peers: dict[str, Node]
412
+ ) -> None:
413
+ relationship_name = str(data.name)
414
+ rel_schema = source_node.get_schema().get_relationship(name=relationship_name)
415
+ if not rel_schema.common_parent:
416
+ return
417
+
418
+ graphql_context: GraphqlContext = info.context
419
+
420
+ source_node_parent = await source_node.get_parent_relationship_peer(
421
+ db=graphql_context.db, name=rel_schema.common_parent
422
+ )
423
+ if not source_node_parent:
424
+ # If the schema is properly validated we are not expecting this to happen
425
+ raise ValidationError(f"Node {source_node.id} ({source_node.get_kind()!r}) does not have a parent peer")
426
+
427
+ parents: set[str] = {source_node_parent.id}
428
+ for peer in peers.values():
429
+ peer_parent = await peer.get_parent_relationship_peer(db=graphql_context.db, name=rel_schema.common_parent)
430
+ if not peer_parent:
431
+ # If the schema is properly validated we are not expecting this to happen
432
+ raise ValidationError(f"Peer {peer.id} ({peer.get_kind()!r}) does not have a parent peer")
433
+ parents.add(peer_parent.id)
434
+
435
+ if len(parents) > 1:
436
+ raise ValidationError(
437
+ f"Cannot relate {source_node.id!r} to '{relationship_name}' peers that do not have the same parent"
438
+ )
439
+
440
+
409
441
  async def _collect_current_peers(
410
442
  info: GraphQLResolveInfo, data: RelationshipNodesInput, source_node: Node
411
443
  ) -> dict[str, RelationshipPeerData]:
@@ -12,13 +12,12 @@ class FieldsMapping(ObjectType):
12
12
 
13
13
  async def fields_mapping_type_conversion_resolver(
14
14
  root: dict, # noqa: ARG001
15
- info: GraphQLResolveInfo, # noqa: ARG001
15
+ info: GraphQLResolveInfo,
16
16
  source_kind: str,
17
17
  target_kind: str,
18
- branch: str,
19
18
  ) -> dict:
20
- source_schema = registry.get_node_schema(name=source_kind, branch=branch)
21
- target_schema = registry.get_node_schema(name=target_kind, branch=branch)
19
+ source_schema = registry.get_node_schema(name=source_kind, branch=info.context.branch)
20
+ target_schema = registry.get_node_schema(name=target_kind, branch=info.context.branch)
22
21
 
23
22
  mapping = get_schema_mapping(source_schema=source_schema, target_schema=target_schema)
24
23
  mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in mapping.items()}
@@ -29,7 +28,6 @@ FieldsMappingTypeConversion = Field(
29
28
  FieldsMapping,
30
29
  source_kind=String(),
31
30
  target_kind=String(),
32
- branch=String(),
33
31
  description="Retrieve fields mapping for converting object type",
34
32
  resolver=fields_mapping_type_conversion_resolver,
35
33
  required=True,
@@ -1,5 +1,3 @@
1
- from infrahub import lock
2
- from infrahub.core.registry import registry
3
1
  from infrahub.message_bus import messages
4
2
  from infrahub.services import InfrahubServices
5
3
  from infrahub.tasks.registry import refresh_branches
@@ -24,8 +22,7 @@ async def rebased_branch(message: messages.RefreshRegistryRebasedBranch, service
24
22
  )
25
23
  return
26
24
 
27
- async with lock.registry.local_schema_lock():
28
- service.log.info("Refreshing rebased branch")
25
+ async with service.database.start_session(read_only=True) as db:
26
+ await refresh_branches(db=db)
29
27
 
30
- async with service.database.start_session(read_only=True) as db:
31
- registry.branch[message.branch] = await registry.branch_object.get_by_name(name=message.branch, db=db)
28
+ await service.component.refresh_schema_hash()
@@ -0,0 +1,14 @@
1
+ from dataclasses import dataclass
2
+
3
+
4
+ @dataclass
5
+ class NumberPoolLockDefinition:
6
+ pool_id: str
7
+
8
+ @property
9
+ def lock_name(self) -> str:
10
+ return f"number-pool-creation-{self.pool_id}"
11
+
12
+ @property
13
+ def namespace_name(self) -> str:
14
+ return "number-pool"
infrahub/pools/tasks.py CHANGED
@@ -3,12 +3,16 @@ from __future__ import annotations
3
3
  from prefect import flow
4
4
  from prefect.logging import get_run_logger
5
5
 
6
+ from infrahub import lock
6
7
  from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
7
- from infrahub.core.constants import NumberPoolType
8
+ from infrahub.core.constants import InfrahubKind, NumberPoolType
8
9
  from infrahub.core.manager import NodeManager
10
+ from infrahub.core.node import Node
9
11
  from infrahub.core.protocols import CoreNumberPool
10
12
  from infrahub.core.registry import registry
11
13
  from infrahub.core.schema.attribute_parameters import NumberPoolParameters
14
+ from infrahub.exceptions import NodeNotFoundError
15
+ from infrahub.pools.models import NumberPoolLockDefinition
12
16
  from infrahub.pools.registration import get_branches_with_schema_number_pool
13
17
  from infrahub.services import InfrahubServices # noqa: TC001 needed for prefect flow
14
18
 
@@ -54,3 +58,69 @@ async def validate_schema_number_pools(
54
58
  elif not defined_on_branches:
55
59
  log.info(f"Deleting number pool (id={schema_number_pool.id}) as it is no longer defined in the schema")
56
60
  await schema_number_pool.delete(db=service.database)
61
+
62
+ existing_pool_ids = [pool.id for pool in schema_number_pools]
63
+ for registry_branch in registry.schema.get_branches():
64
+ schema_branch = service.database.schema.get_schema_branch(name=registry_branch)
65
+
66
+ for generic_name in schema_branch.generic_names:
67
+ generic_node = schema_branch.get_generic(name=generic_name, duplicate=False)
68
+ for attribute_name in generic_node.attribute_names:
69
+ attribute = generic_node.get_attribute(name=attribute_name)
70
+ if isinstance(attribute.parameters, NumberPoolParameters) and attribute.parameters.number_pool_id:
71
+ if attribute.parameters.number_pool_id not in existing_pool_ids:
72
+ await _create_number_pool(
73
+ service=service,
74
+ number_pool_id=attribute.parameters.number_pool_id,
75
+ pool_node=generic_node.kind,
76
+ pool_attribute=attribute_name,
77
+ start_range=attribute.parameters.start_range,
78
+ end_range=attribute.parameters.end_range,
79
+ )
80
+ existing_pool_ids.append(attribute.parameters.number_pool_id)
81
+
82
+ for node_name in schema_branch.node_names:
83
+ node = schema_branch.get_node(name=node_name, duplicate=False)
84
+ for attribute_name in node.attribute_names:
85
+ attribute = node.get_attribute(name=attribute_name)
86
+ if isinstance(attribute.parameters, NumberPoolParameters) and attribute.parameters.number_pool_id:
87
+ if attribute.parameters.number_pool_id not in existing_pool_ids:
88
+ await _create_number_pool(
89
+ service=service,
90
+ number_pool_id=attribute.parameters.number_pool_id,
91
+ pool_node=node.kind,
92
+ pool_attribute=attribute_name,
93
+ start_range=attribute.parameters.start_range,
94
+ end_range=attribute.parameters.end_range,
95
+ )
96
+ existing_pool_ids.append(attribute.parameters.number_pool_id)
97
+
98
+
99
+ async def _create_number_pool(
100
+ service: InfrahubServices,
101
+ number_pool_id: str,
102
+ pool_node: str,
103
+ pool_attribute: str,
104
+ start_range: int,
105
+ end_range: int,
106
+ ) -> None:
107
+ lock_definition = NumberPoolLockDefinition(pool_id=number_pool_id)
108
+ async with lock.registry.get(name=lock_definition.lock_name, namespace=lock_definition.namespace_name, local=False):
109
+ async with service.database.start_session() as dbs:
110
+ try:
111
+ await registry.manager.get_one_by_id_or_default_filter(
112
+ db=dbs, id=str(number_pool_id), kind=CoreNumberPool
113
+ )
114
+ except NodeNotFoundError:
115
+ number_pool = await Node.init(db=dbs, schema=InfrahubKind.NUMBERPOOL, branch=registry.default_branch)
116
+ await number_pool.new(
117
+ db=dbs,
118
+ id=number_pool_id,
119
+ name=f"{pool_node}.{pool_attribute} [{number_pool_id}]",
120
+ node=pool_node,
121
+ node_attribute=pool_attribute,
122
+ start_range=start_range,
123
+ end_range=end_range,
124
+ pool_type=NumberPoolType.SCHEMA.value,
125
+ )
126
+ await number_pool.save(db=dbs)