infrahub-server 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. infrahub/api/__init__.py +13 -5
  2. infrahub/api/artifact.py +9 -15
  3. infrahub/api/auth.py +7 -1
  4. infrahub/api/dependencies.py +15 -2
  5. infrahub/api/diff/diff.py +13 -7
  6. infrahub/api/file.py +5 -10
  7. infrahub/api/internal.py +19 -6
  8. infrahub/api/menu.py +8 -6
  9. infrahub/api/oauth2.py +25 -10
  10. infrahub/api/oidc.py +26 -10
  11. infrahub/api/query.py +2 -2
  12. infrahub/api/schema.py +48 -59
  13. infrahub/api/storage.py +8 -8
  14. infrahub/api/transformation.py +6 -5
  15. infrahub/auth.py +1 -26
  16. infrahub/cli/__init__.py +1 -1
  17. infrahub/cli/context.py +5 -8
  18. infrahub/cli/db.py +6 -6
  19. infrahub/cli/git_agent.py +1 -1
  20. infrahub/computed_attribute/models.py +1 -1
  21. infrahub/computed_attribute/tasks.py +1 -1
  22. infrahub/config.py +5 -5
  23. infrahub/core/account.py +2 -10
  24. infrahub/core/attribute.py +22 -0
  25. infrahub/core/branch/models.py +1 -1
  26. infrahub/core/branch/tasks.py +4 -3
  27. infrahub/core/diff/calculator.py +14 -0
  28. infrahub/core/diff/combiner.py +6 -2
  29. infrahub/core/diff/conflicts_enricher.py +2 -2
  30. infrahub/core/diff/coordinator.py +296 -87
  31. infrahub/core/diff/data_check_synchronizer.py +33 -4
  32. infrahub/core/diff/enricher/cardinality_one.py +3 -3
  33. infrahub/core/diff/enricher/hierarchy.py +4 -1
  34. infrahub/core/diff/merger/merger.py +11 -1
  35. infrahub/core/diff/merger/serializer.py +5 -29
  36. infrahub/core/diff/model/path.py +88 -4
  37. infrahub/core/diff/query/field_specifiers.py +35 -0
  38. infrahub/core/diff/query/roots_metadata.py +48 -0
  39. infrahub/core/diff/query/save.py +1 -0
  40. infrahub/core/diff/query_parser.py +27 -11
  41. infrahub/core/diff/repository/deserializer.py +7 -3
  42. infrahub/core/diff/repository/repository.py +100 -9
  43. infrahub/core/diff/tasks.py +1 -1
  44. infrahub/core/graph/__init__.py +1 -1
  45. infrahub/core/integrity/object_conflict/conflict_recorder.py +6 -1
  46. infrahub/core/ipam/utilization.py +6 -1
  47. infrahub/core/manager.py +8 -0
  48. infrahub/core/merge.py +6 -1
  49. infrahub/core/migrations/graph/__init__.py +2 -0
  50. infrahub/core/migrations/graph/m014_remove_index_attr_value.py +1 -1
  51. infrahub/core/migrations/graph/m015_diff_format_update.py +1 -1
  52. infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +1 -1
  53. infrahub/core/migrations/graph/m018_uniqueness_nulls.py +101 -0
  54. infrahub/core/migrations/query/attribute_add.py +5 -5
  55. infrahub/core/migrations/schema/tasks.py +2 -2
  56. infrahub/core/migrations/shared.py +3 -3
  57. infrahub/core/node/__init__.py +8 -2
  58. infrahub/core/node/constraints/grouped_uniqueness.py +9 -2
  59. infrahub/core/query/__init__.py +5 -2
  60. infrahub/core/query/diff.py +32 -19
  61. infrahub/core/query/ipam.py +30 -22
  62. infrahub/core/query/node.py +91 -40
  63. infrahub/core/schema/generated/attribute_schema.py +2 -2
  64. infrahub/core/schema/generated/base_node_schema.py +2 -2
  65. infrahub/core/schema/generated/relationship_schema.py +1 -1
  66. infrahub/core/schema/schema_branch_computed.py +1 -1
  67. infrahub/core/task/task_log.py +1 -1
  68. infrahub/core/validators/attribute/kind.py +1 -1
  69. infrahub/core/validators/interface.py +1 -2
  70. infrahub/core/validators/models/violation.py +1 -14
  71. infrahub/core/validators/shared.py +2 -2
  72. infrahub/core/validators/tasks.py +7 -4
  73. infrahub/core/validators/uniqueness/index.py +2 -4
  74. infrahub/database/index.py +1 -1
  75. infrahub/dependencies/builder/constraint/schema/aggregated.py +2 -0
  76. infrahub/dependencies/builder/constraint/schema/attribute_kind.py +8 -0
  77. infrahub/dependencies/builder/diff/data_check_synchronizer.py +2 -0
  78. infrahub/git/base.py +3 -3
  79. infrahub/git/integrator.py +1 -1
  80. infrahub/graphql/api/endpoints.py +12 -3
  81. infrahub/graphql/app.py +2 -2
  82. infrahub/graphql/auth/query_permission_checker/default_branch_checker.py +2 -17
  83. infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py +1 -12
  84. infrahub/graphql/auth/query_permission_checker/object_permission_checker.py +6 -40
  85. infrahub/graphql/auth/query_permission_checker/super_admin_checker.py +5 -8
  86. infrahub/graphql/enums.py +2 -2
  87. infrahub/graphql/initialization.py +27 -8
  88. infrahub/graphql/manager.py +9 -3
  89. infrahub/graphql/models.py +6 -0
  90. infrahub/graphql/mutations/account.py +14 -10
  91. infrahub/graphql/mutations/computed_attribute.py +11 -22
  92. infrahub/graphql/mutations/diff.py +2 -0
  93. infrahub/graphql/mutations/main.py +5 -16
  94. infrahub/graphql/mutations/proposed_change.py +11 -20
  95. infrahub/graphql/mutations/resource_manager.py +6 -3
  96. infrahub/graphql/mutations/schema.py +8 -7
  97. infrahub/graphql/mutations/tasks.py +1 -1
  98. infrahub/graphql/permissions.py +3 -4
  99. infrahub/graphql/queries/account.py +2 -11
  100. infrahub/graphql/queries/resource_manager.py +21 -10
  101. infrahub/graphql/query.py +3 -1
  102. infrahub/graphql/resolvers/resolver.py +5 -1
  103. infrahub/graphql/types/task.py +14 -2
  104. infrahub/menu/generator.py +6 -18
  105. infrahub/message_bus/messages/event_node_mutated.py +2 -2
  106. infrahub/message_bus/operations/check/repository.py +2 -4
  107. infrahub/message_bus/operations/event/branch.py +2 -4
  108. infrahub/message_bus/operations/requests/proposed_change.py +1 -1
  109. infrahub/message_bus/operations/requests/repository.py +3 -5
  110. infrahub/message_bus/types.py +1 -1
  111. infrahub/permissions/__init__.py +12 -3
  112. infrahub/permissions/backend.py +2 -17
  113. infrahub/permissions/constants.py +12 -8
  114. infrahub/permissions/local_backend.py +5 -102
  115. infrahub/permissions/manager.py +135 -0
  116. infrahub/permissions/report.py +14 -25
  117. infrahub/permissions/types.py +6 -0
  118. infrahub/proposed_change/tasks.py +1 -1
  119. infrahub/task_manager/models.py +34 -5
  120. infrahub/task_manager/task.py +14 -6
  121. infrahub/visuals.py +1 -3
  122. infrahub_sdk/client.py +204 -43
  123. infrahub_sdk/ctl/cli_commands.py +106 -6
  124. infrahub_sdk/data.py +3 -2
  125. infrahub_sdk/graphql.py +5 -0
  126. infrahub_sdk/node.py +21 -2
  127. infrahub_sdk/queries.py +69 -0
  128. infrahub_sdk/schema/main.py +1 -0
  129. infrahub_sdk/testing/schemas/animal.py +1 -0
  130. infrahub_sdk/types.py +6 -0
  131. infrahub_sdk/utils.py +17 -0
  132. {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/METADATA +1 -1
  133. {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/RECORD +136 -131
  134. infrahub/core/diff/query/empty_roots.py +0 -33
  135. {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/LICENSE.txt +0 -0
  136. {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/WHEEL +0 -0
  137. {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/entry_points.txt +0 -0
@@ -3,17 +3,22 @@ from typing import Generator
3
3
  from infrahub import config
4
4
  from infrahub.core import registry
5
5
  from infrahub.core.diff.query.field_summary import EnrichedDiffNodeFieldSummaryQuery
6
+ from infrahub.core.query.diff import DiffCountChanges
6
7
  from infrahub.core.timestamp import Timestamp
7
8
  from infrahub.database import InfrahubDatabase, retry_db_transaction
8
9
  from infrahub.exceptions import ResourceNotFoundError
10
+ from infrahub.log import get_logger
9
11
 
10
12
  from ..model.path import (
11
13
  ConflictSelection,
12
14
  EnrichedDiffConflict,
13
15
  EnrichedDiffRoot,
16
+ EnrichedDiffRootMetadata,
14
17
  EnrichedDiffs,
18
+ EnrichedDiffsMetadata,
15
19
  EnrichedNodeCreateRequest,
16
20
  NodeDiffFieldSummary,
21
+ NodeFieldSpecifier,
17
22
  TimeRange,
18
23
  TrackingId,
19
24
  )
@@ -21,15 +26,18 @@ from ..query.delete_query import EnrichedDiffDeleteQuery
21
26
  from ..query.diff_get import EnrichedDiffGetQuery
22
27
  from ..query.diff_summary import DiffSummaryCounters, DiffSummaryQuery
23
28
  from ..query.drop_tracking_id import EnrichedDiffDropTrackingIdQuery
24
- from ..query.empty_roots import EnrichedDiffEmptyRootsQuery
29
+ from ..query.field_specifiers import EnrichedDiffFieldSpecifiersQuery
25
30
  from ..query.filters import EnrichedDiffQueryFilters
26
31
  from ..query.get_conflict_query import EnrichedDiffConflictQuery
27
32
  from ..query.has_conflicts_query import EnrichedDiffHasConflictQuery
33
+ from ..query.roots_metadata import EnrichedDiffRootsMetadataQuery
28
34
  from ..query.save import EnrichedDiffRootsCreateQuery, EnrichedNodeBatchCreateQuery, EnrichedNodesLinkQuery
29
35
  from ..query.time_range_query import EnrichedDiffTimeRangeQuery
30
36
  from ..query.update_conflict_query import EnrichedDiffConflictUpdateQuery
31
37
  from .deserializer import EnrichedDiffDeserializer
32
38
 
39
+ log = get_logger()
40
+
33
41
 
34
42
  class DiffRepository:
35
43
  MAX_SAVE_BATCH_SIZE: int = 100
@@ -53,7 +61,6 @@ class DiffRepository:
53
61
  include_empty: bool = False,
54
62
  ) -> list[EnrichedDiffRoot]:
55
63
  final_max_depth = config.SETTINGS.database.max_depth_search_hierarchy
56
- final_limit = limit or config.SETTINGS.database.query_size_limit
57
64
  query = await EnrichedDiffGetQuery.init(
58
65
  db=self.db,
59
66
  base_branch_name=base_branch_name,
@@ -62,7 +69,7 @@ class DiffRepository:
62
69
  to_time=to_time,
63
70
  filters=EnrichedDiffQueryFilters(**dict(filters or {})),
64
71
  max_depth=final_max_depth,
65
- limit=final_limit,
72
+ limit=limit,
66
73
  offset=offset,
67
74
  tracking_id=tracking_id,
68
75
  diff_ids=diff_ids,
@@ -118,6 +125,22 @@ class DiffRepository:
118
125
  for dbr in diff_branch_roots
119
126
  ]
120
127
 
128
+ async def hydrate_diff_pair(self, enriched_diffs_metadata: EnrichedDiffsMetadata) -> EnrichedDiffs:
129
+ hydrated_base_diff = await self.get_one(
130
+ diff_branch_name=enriched_diffs_metadata.base_branch_name,
131
+ diff_id=enriched_diffs_metadata.base_branch_diff.uuid,
132
+ )
133
+ hydrated_branch_diff = await self.get_one(
134
+ diff_branch_name=enriched_diffs_metadata.diff_branch_name,
135
+ diff_id=enriched_diffs_metadata.diff_branch_diff.uuid,
136
+ )
137
+ return EnrichedDiffs(
138
+ base_branch_name=enriched_diffs_metadata.base_branch_name,
139
+ diff_branch_name=enriched_diffs_metadata.diff_branch_name,
140
+ base_branch_diff=hydrated_base_diff,
141
+ diff_branch_diff=hydrated_branch_diff,
142
+ )
143
+
121
144
  async def get_one(
122
145
  self,
123
146
  diff_branch_name: str,
@@ -161,6 +184,8 @@ class DiffRepository:
161
184
 
162
185
  @retry_db_transaction(name="enriched_diff_save")
163
186
  async def save(self, enriched_diffs: EnrichedDiffs) -> None:
187
+ num_nodes = len(enriched_diffs.base_branch_diff.nodes) + len(enriched_diffs.diff_branch_diff.nodes)
188
+ log.info(f"Saving diff (num_nodes={num_nodes})...")
164
189
  root_query = await EnrichedDiffRootsCreateQuery.init(db=self.db, enriched_diffs=enriched_diffs)
165
190
  await root_query.execute(db=self.db)
166
191
  for node_create_batch in self._get_node_create_request_batch(enriched_diffs=enriched_diffs):
@@ -168,6 +193,7 @@ class DiffRepository:
168
193
  await node_query.execute(db=self.db)
169
194
  link_query = await EnrichedNodesLinkQuery.init(db=self.db, enriched_diffs=enriched_diffs)
170
195
  await link_query.execute(db=self.db)
196
+ log.info("Diff saved.")
171
197
 
172
198
  async def summary(
173
199
  self,
@@ -211,18 +237,55 @@ class DiffRepository:
211
237
  await query.execute(db=self.db)
212
238
  return await query.get_time_ranges()
213
239
 
214
- async def get_empty_roots(
240
+ async def get_diff_pairs_metadata(
215
241
  self,
216
242
  diff_branch_names: list[str] | None = None,
217
243
  base_branch_names: list[str] | None = None,
218
- ) -> list[EnrichedDiffRoot]:
219
- query = await EnrichedDiffEmptyRootsQuery.init(
220
- db=self.db, diff_branch_names=diff_branch_names, base_branch_names=base_branch_names
244
+ from_time: Timestamp | None = None,
245
+ to_time: Timestamp | None = None,
246
+ ) -> list[EnrichedDiffsMetadata]:
247
+ if diff_branch_names and base_branch_names:
248
+ diff_branch_names += base_branch_names
249
+ empty_roots = await self.get_roots_metadata(
250
+ diff_branch_names=diff_branch_names,
251
+ base_branch_names=base_branch_names,
252
+ from_time=from_time,
253
+ to_time=to_time,
254
+ )
255
+ roots_by_id = {root.uuid: root for root in empty_roots}
256
+ pairs: list[EnrichedDiffsMetadata] = []
257
+ for branch_root in empty_roots:
258
+ if branch_root.base_branch_name == branch_root.diff_branch_name:
259
+ continue
260
+ base_root = roots_by_id[branch_root.partner_uuid]
261
+ pairs.append(
262
+ EnrichedDiffsMetadata(
263
+ base_branch_name=branch_root.base_branch_name,
264
+ diff_branch_name=branch_root.diff_branch_name,
265
+ base_branch_diff=base_root,
266
+ diff_branch_diff=branch_root,
267
+ )
268
+ )
269
+ return pairs
270
+
271
+ async def get_roots_metadata(
272
+ self,
273
+ diff_branch_names: list[str] | None = None,
274
+ base_branch_names: list[str] | None = None,
275
+ from_time: Timestamp | None = None,
276
+ to_time: Timestamp | None = None,
277
+ ) -> list[EnrichedDiffRootMetadata]:
278
+ query = await EnrichedDiffRootsMetadataQuery.init(
279
+ db=self.db,
280
+ diff_branch_names=diff_branch_names,
281
+ base_branch_names=base_branch_names,
282
+ from_time=from_time,
283
+ to_time=to_time,
221
284
  )
222
285
  await query.execute(db=self.db)
223
286
  diff_roots = []
224
- for neo4j_node in query.get_empty_root_nodes():
225
- diff_roots.append(self.deserializer.build_diff_root(root_node=neo4j_node))
287
+ for neo4j_node in query.get_root_nodes_metadata():
288
+ diff_roots.append(self.deserializer.build_diff_root_metadata(root_node=neo4j_node))
226
289
  return diff_roots
227
290
 
228
291
  async def diff_has_conflicts(
@@ -267,3 +330,31 @@ class DiffRepository:
267
330
  async def drop_tracking_ids(self, tracking_ids: list[TrackingId]) -> None:
268
331
  query = await EnrichedDiffDropTrackingIdQuery.init(db=self.db, tracking_ids=tracking_ids)
269
332
  await query.execute(db=self.db)
333
+
334
+ async def get_num_changes_in_time_range_by_branch(
335
+ self, branch_names: list[str], from_time: Timestamp, to_time: Timestamp
336
+ ) -> dict[str, int]:
337
+ query = await DiffCountChanges.init(db=self.db, branch_names=branch_names, diff_from=from_time, diff_to=to_time)
338
+ await query.execute(db=self.db)
339
+ return query.get_num_changes_by_branch()
340
+
341
+ async def get_node_field_specifiers(self, diff_id: str) -> set[NodeFieldSpecifier]:
342
+ limit = config.SETTINGS.database.query_size_limit
343
+ offset = 0
344
+ specifiers: set[NodeFieldSpecifier] = set()
345
+ while True:
346
+ query = await EnrichedDiffFieldSpecifiersQuery.init(db=self.db, diff_id=diff_id, offset=offset, limit=limit)
347
+ await query.execute(db=self.db)
348
+
349
+ new_specifiers = {
350
+ NodeFieldSpecifier(
351
+ node_uuid=field_specifier_tuple[0],
352
+ field_name=field_specifier_tuple[1],
353
+ )
354
+ for field_specifier_tuple in query.get_node_field_specifier_tuples()
355
+ }
356
+ if not new_specifiers:
357
+ break
358
+ specifiers |= new_specifiers
359
+ offset += limit
360
+ return specifiers
@@ -57,7 +57,7 @@ async def refresh_diff_all(branch_name: str) -> None:
57
57
  component_registry = get_component_registry()
58
58
  default_branch = registry.get_branch_from_registry()
59
59
  diff_repository = await component_registry.get_component(DiffRepository, db=db, branch=default_branch)
60
- diff_roots_to_refresh = await diff_repository.get_empty_roots(diff_branch_names=[branch_name])
60
+ diff_roots_to_refresh = await diff_repository.get_roots_metadata(diff_branch_names=[branch_name])
61
61
 
62
62
  for diff_root in diff_roots_to_refresh:
63
63
  if diff_root.base_branch_name != diff_root.diff_branch_name:
@@ -1 +1 @@
1
- GRAPH_VERSION = 17
1
+ GRAPH_VERSION = 18
@@ -94,13 +94,18 @@ class ObjectConflictValidatorRecorder:
94
94
  await self.finalize_validator(validator, is_success)
95
95
  return current_checks
96
96
 
97
- async def get_or_create_validator(self, proposed_change: CoreProposedChange) -> Node:
97
+ async def get_validator(self, proposed_change: CoreProposedChange) -> Node | None:
98
98
  validations = await proposed_change.validations.get_peers(db=self.db, branch_agnostic=True)
99
99
 
100
100
  for validation in validations.values():
101
101
  if validation.get_kind() == self.validator_kind:
102
102
  return validation
103
+ return None
103
104
 
105
+ async def get_or_create_validator(self, proposed_change: CoreProposedChange) -> Node:
106
+ validator_obj = await self.get_validator(proposed_change=proposed_change)
107
+ if validator_obj:
108
+ return validator_obj
104
109
  validator_obj = await Node.init(db=self.db, schema=self.validator_kind)
105
110
  await validator_obj.new(
106
111
  db=self.db,
@@ -35,7 +35,12 @@ class PrefixUtilizationGetter:
35
35
 
36
36
  async def _run_and_parse_query(self) -> None:
37
37
  self._results_by_prefix_id = {}
38
- query = await IPPrefixUtilization.init(db=self.db, at=self.at, ip_prefixes=self.ip_prefixes)
38
+ query = await IPPrefixUtilization.init(
39
+ db=self.db,
40
+ at=self.at,
41
+ ip_prefixes=self.ip_prefixes,
42
+ allocated_kinds=[InfrahubKind.IPPREFIX, InfrahubKind.IPADDRESS],
43
+ )
39
44
  await query.execute(db=self.db)
40
45
 
41
46
  for result in query.get_results():
infrahub/core/manager.py CHANGED
@@ -26,6 +26,7 @@ from infrahub.core.relationship import Relationship, RelationshipManager
26
26
  from infrahub.core.schema import GenericSchema, MainSchemaTypes, NodeSchema, ProfileSchema, RelationshipSchema
27
27
  from infrahub.core.timestamp import Timestamp
28
28
  from infrahub.exceptions import NodeNotFoundError, ProcessingError, SchemaNotFoundError
29
+ from infrahub.graphql.models import OrderModel
29
30
 
30
31
  if TYPE_CHECKING:
31
32
  from infrahub.core.branch import Branch
@@ -141,6 +142,7 @@ class NodeManager:
141
142
  account=...,
142
143
  partial_match: bool = ...,
143
144
  branch_agnostic: bool = ...,
145
+ order: OrderModel | None = ...,
144
146
  ) -> list[Any]: ...
145
147
 
146
148
  @overload
@@ -161,6 +163,7 @@ class NodeManager:
161
163
  account=...,
162
164
  partial_match: bool = ...,
163
165
  branch_agnostic: bool = ...,
166
+ order: OrderModel | None = ...,
164
167
  ) -> list[SchemaProtocol]: ...
165
168
 
166
169
  @classmethod
@@ -180,6 +183,7 @@ class NodeManager:
180
183
  account=None,
181
184
  partial_match: bool = False,
182
185
  branch_agnostic: bool = False,
186
+ order: OrderModel | None = None,
183
187
  ) -> list[Any]:
184
188
  """Query one or multiple nodes of a given type based on filter arguments.
185
189
 
@@ -227,6 +231,7 @@ class NodeManager:
227
231
  at=at,
228
232
  partial_match=partial_match,
229
233
  branch_agnostic=branch_agnostic,
234
+ order=order,
230
235
  )
231
236
  await query.execute(db=db)
232
237
  node_ids = query.get_node_ids()
@@ -295,6 +300,7 @@ class NodeManager:
295
300
  at=at,
296
301
  partial_match=partial_match,
297
302
  branch_agnostic=branch_agnostic,
303
+ order=OrderModel(disable=True),
298
304
  )
299
305
  return await query.count(db=db)
300
306
 
@@ -657,6 +663,7 @@ class NodeManager:
657
663
  prefetch_relationships=prefetch_relationships,
658
664
  account=account,
659
665
  branch_agnostic=branch_agnostic,
666
+ order=OrderModel(disable=True),
660
667
  )
661
668
 
662
669
  if len(items) > 1:
@@ -820,6 +827,7 @@ class NodeManager:
820
827
  prefetch_relationships=prefetch_relationships,
821
828
  account=account,
822
829
  branch_agnostic=branch_agnostic,
830
+ order=OrderModel(disable=True),
823
831
  )
824
832
 
825
833
  if len(items) < 1:
infrahub/core/merge.py CHANGED
@@ -10,6 +10,7 @@ from infrahub.core.protocols import CoreRepository
10
10
  from infrahub.core.registry import registry
11
11
  from infrahub.core.timestamp import Timestamp
12
12
  from infrahub.exceptions import ValidationError
13
+ from infrahub.log import get_logger
13
14
 
14
15
  from ..git.models import GitRepositoryMerge
15
16
  from ..workflows.catalogue import GIT_REPOSITORIES_MERGE
@@ -25,6 +26,8 @@ if TYPE_CHECKING:
25
26
  from infrahub.database import InfrahubDatabase
26
27
  from infrahub.services import InfrahubServices
27
28
 
29
+ log = get_logger()
30
+
28
31
 
29
32
  class BranchMerger:
30
33
  def __init__(
@@ -174,9 +177,11 @@ class BranchMerger:
174
177
  if self.source_branch.name == registry.default_branch:
175
178
  raise ValidationError(f"Unable to merge the branch '{self.source_branch.name}' into itself")
176
179
 
177
- enriched_diff = await self.diff_coordinator.update_branch_diff(
180
+ log.debug("Updating diff for merge")
181
+ enriched_diff = await self.diff_coordinator.update_branch_diff_and_return(
178
182
  base_branch=self.destination_branch, diff_branch=self.source_branch
179
183
  )
184
+ log.debug("Diff updated for merge")
180
185
  conflict_map = enriched_diff.get_all_conflicts()
181
186
  errors: list[str] = []
182
187
  for conflict_path, conflict in conflict_map.items():
@@ -19,6 +19,7 @@ from .m014_remove_index_attr_value import Migration014
19
19
  from .m015_diff_format_update import Migration015
20
20
  from .m016_diff_delete_bug_fix import Migration016
21
21
  from .m017_add_core_profile import Migration017
22
+ from .m018_uniqueness_nulls import Migration018
22
23
 
23
24
  if TYPE_CHECKING:
24
25
  from infrahub.core.root import Root
@@ -43,6 +44,7 @@ MIGRATIONS: list[type[Union[GraphMigration, InternalSchemaMigration, ArbitraryMi
43
44
  Migration015,
44
45
  Migration016,
45
46
  Migration017,
47
+ Migration018,
46
48
  ]
47
49
 
48
50
 
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  from typing import TYPE_CHECKING, Sequence
4
4
 
5
5
  from infrahub.core.migrations.shared import MigrationResult
6
- from infrahub.core.query import Query # noqa: TCH001
6
+ from infrahub.core.query import Query # noqa: TC001
7
7
  from infrahub.database import DatabaseType
8
8
  from infrahub.database.constants import IndexType
9
9
  from infrahub.database.index import IndexItem
@@ -31,6 +31,6 @@ class Migration015(ArbitraryMigration):
31
31
  component_registry = get_component_registry()
32
32
  diff_repo = await component_registry.get_component(DiffRepository, db=db, branch=default_branch)
33
33
 
34
- diff_roots = await diff_repo.get_empty_roots()
34
+ diff_roots = await diff_repo.get_roots_metadata()
35
35
  await diff_repo.delete_diff_roots(diff_root_uuids=[d.uuid for d in diff_roots])
36
36
  return MigrationResult()
@@ -31,6 +31,6 @@ class Migration016(ArbitraryMigration):
31
31
  component_registry = get_component_registry()
32
32
  diff_repo = await component_registry.get_component(DiffRepository, db=db, branch=default_branch)
33
33
 
34
- diff_roots = await diff_repo.get_empty_roots()
34
+ diff_roots = await diff_repo.get_roots_metadata()
35
35
  await diff_repo.delete_diff_roots(diff_root_uuids=[d.uuid for d in diff_roots])
36
36
  return MigrationResult()
@@ -0,0 +1,101 @@
1
+ from __future__ import annotations
2
+
3
+ from collections import defaultdict
4
+ from typing import TYPE_CHECKING, Sequence
5
+
6
+ from infrahub.core import registry
7
+ from infrahub.core.diff.payload_builder import get_display_labels_per_kind
8
+ from infrahub.core.migrations.shared import MigrationResult
9
+ from infrahub.core.schema import GenericSchema, NodeSchema, SchemaRoot, internal_schema
10
+ from infrahub.core.schema.manager import SchemaManager
11
+ from infrahub.core.validators.uniqueness.checker import UniquenessChecker
12
+ from infrahub.dependencies.registry import build_component_registry, get_component_registry
13
+ from infrahub.log import get_logger
14
+
15
+ from ..shared import InternalSchemaMigration, SchemaMigration
16
+
17
+ if TYPE_CHECKING:
18
+ from infrahub.core.validators.uniqueness.model import NonUniqueNode
19
+ from infrahub.database import InfrahubDatabase
20
+
21
+ log = get_logger()
22
+
23
+
24
+ class Migration018(InternalSchemaMigration):
25
+ name: str = "018_validate_nulls_in_uniqueness_constraints"
26
+ minimum_version: int = 17
27
+ migrations: Sequence[SchemaMigration] = []
28
+
29
+ async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult:
30
+ result = MigrationResult()
31
+
32
+ return result
33
+
34
+ async def execute(self, db: InfrahubDatabase) -> MigrationResult:
35
+ """
36
+ Validate any schema that include optional attributes in the uniqueness constraints
37
+
38
+ An update to uniqueness constraint validation now handles NULL values as unique instead of ignoring them
39
+ """
40
+ default_branch = registry.get_branch_from_registry()
41
+ build_component_registry()
42
+ component_registry = get_component_registry()
43
+ uniqueness_checker = await component_registry.get_component(UniquenessChecker, db=db, branch=default_branch)
44
+ non_unique_nodes_by_kind: dict[str, list[NonUniqueNode]] = defaultdict(list)
45
+
46
+ manager = SchemaManager()
47
+ registry.schema = manager
48
+ internal_schema_root = SchemaRoot(**internal_schema)
49
+ manager.register_schema(schema=internal_schema_root)
50
+ schema_branch = await manager.load_schema_from_db(db=db, branch=default_branch)
51
+ manager.set_schema_branch(name=default_branch.name, schema=schema_branch)
52
+
53
+ for schema_kind in schema_branch.node_names + schema_branch.generic_names:
54
+ schema = schema_branch.get(name=schema_kind, duplicate=False)
55
+ if not isinstance(schema, (NodeSchema, GenericSchema)):
56
+ continue
57
+
58
+ schema_constraint_path_groups = schema.get_unique_constraint_schema_attribute_paths(
59
+ schema_branch=schema_branch
60
+ )
61
+ includes_optional_attr: bool = False
62
+
63
+ for constraint_group in schema_constraint_path_groups:
64
+ for schema_attribute_path in constraint_group:
65
+ if (
66
+ schema_attribute_path.attribute_schema
67
+ and schema_attribute_path.attribute_schema.optional is True
68
+ ):
69
+ includes_optional_attr = True
70
+ break
71
+
72
+ if not includes_optional_attr:
73
+ continue
74
+
75
+ non_unique_nodes = await uniqueness_checker.check_one_schema(schema=schema)
76
+ if non_unique_nodes:
77
+ non_unique_nodes_by_kind[schema_kind] = non_unique_nodes
78
+
79
+ if not non_unique_nodes_by_kind:
80
+ return MigrationResult()
81
+
82
+ error_strings = []
83
+ for schema_kind, non_unique_nodes in non_unique_nodes_by_kind.items():
84
+ display_label_map = await get_display_labels_per_kind(
85
+ db=db, kind=schema_kind, branch_name=default_branch.name, ids=[nun.node_id for nun in non_unique_nodes]
86
+ )
87
+ for non_unique_node in non_unique_nodes:
88
+ display_label = display_label_map.get(non_unique_node.node_id)
89
+ error_str = f"{display_label or ''}({non_unique_node.node_schema.kind} / {non_unique_node.node_id})"
90
+ error_str += " violates uniqueness constraints for the following attributes: "
91
+ attr_values = [
92
+ f"{attr.attribute_name}={attr.attribute_value}" for attr in non_unique_node.non_unique_attributes
93
+ ]
94
+ error_str += ", ".join(attr_values)
95
+ error_strings.append(error_str)
96
+ if error_strings:
97
+ error_str = "For the following nodes, you must update the uniqueness_constraints on the schema of the node"
98
+ error_str += " to remove the attribute(s) with NULL values or update the data on the nodes to be unique"
99
+ error_str += " now that NULL values are considered during uniqueness validation"
100
+ return MigrationResult(errors=[error_str] + error_strings)
101
+ return MigrationResult()
@@ -56,6 +56,10 @@ class AttributeAddQuery(Query):
56
56
  self.params["is_visible_default"] = True
57
57
 
58
58
  query = """
59
+ MERGE (av:AttributeValue { value: $attr_value, is_default: true })
60
+ MERGE (is_protected_value:Boolean { value: $is_protected_default })
61
+ MERGE (is_visible_value:Boolean { value: $is_visible_default })
62
+ WITH av, is_protected_value, is_visible_value
59
63
  MATCH p = (n:%(node_kind)s)
60
64
  CALL {
61
65
  WITH n
@@ -66,12 +70,8 @@ class AttributeAddQuery(Query):
66
70
  ORDER BY r2.branch_level DESC, r2.from ASC, r1.branch_level DESC, r1.from ASC
67
71
  LIMIT 1
68
72
  }
69
- WITH n1 as n, r11 as r1, r12 as r2
73
+ WITH n1 as n, r11 as r1, r12 as r2, av, is_protected_value, is_visible_value
70
74
  WHERE r1.status = "active" AND (r2 IS NULL OR r2.status = "deleted")
71
- MERGE (av:AttributeValue { value: $attr_value, is_default: true })
72
- MERGE (is_protected_value:Boolean { value: $is_protected_default })
73
- MERGE (is_visible_value:Boolean { value: $is_visible_default })
74
- WITH n, av, is_protected_value, is_visible_value, r2
75
75
  CREATE (a:Attribute { name: $attr_name, branch_support: $branch_support })
76
76
  CREATE (n)-[:HAS_ATTRIBUTE $rel_props ]->(a)
77
77
  CREATE (a)-[:HAS_VALUE $rel_props ]->(av)
@@ -7,9 +7,9 @@ from prefect import flow, task
7
7
  from prefect.cache_policies import NONE
8
8
  from prefect.logging import get_run_logger
9
9
 
10
- from infrahub.core.branch import Branch # noqa: TCH001
10
+ from infrahub.core.branch import Branch # noqa: TC001
11
11
  from infrahub.core.migrations import MIGRATION_MAP
12
- from infrahub.core.path import SchemaPath # noqa: TCH001
12
+ from infrahub.core.path import SchemaPath # noqa: TC001
13
13
  from infrahub.services import services
14
14
  from infrahub.workflows.utils import add_branch_tag
15
15
 
@@ -6,8 +6,8 @@ from pydantic import BaseModel, ConfigDict, Field
6
6
  from typing_extensions import Self
7
7
 
8
8
  from infrahub.core import registry
9
- from infrahub.core.path import SchemaPath # noqa: TCH001
10
- from infrahub.core.query import Query # noqa: TCH001
9
+ from infrahub.core.path import SchemaPath # noqa: TC001
10
+ from infrahub.core.query import Query # noqa: TC001
11
11
  from infrahub.core.schema import (
12
12
  AttributeSchema,
13
13
  GenericSchema,
@@ -17,7 +17,7 @@ from infrahub.core.schema import (
17
17
  internal_schema,
18
18
  )
19
19
 
20
- from .query import MigrationQuery # noqa: TCH001
20
+ from .query import MigrationQuery # noqa: TC001
21
21
 
22
22
  if TYPE_CHECKING:
23
23
  from infrahub.core.branch import Branch
@@ -18,6 +18,7 @@ from infrahub.support.macro import MacroDefinition
18
18
  from infrahub.types import ATTRIBUTE_TYPES
19
19
 
20
20
  from ...graphql.constants import KIND_GRAPHQL_FIELD_NAME
21
+ from ...graphql.models import OrderModel
21
22
  from ..relationship import RelationshipManager
22
23
  from ..utils import update_relationships_to
23
24
  from .base import BaseNode, BaseNodeMeta, BaseNodeOptions
@@ -399,7 +400,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
399
400
  peer = await relationship_attribute.get_peer(db=db, raise_on_error=True)
400
401
 
401
402
  related_node = await registry.manager.get_one_by_id_or_default_filter(
402
- db=db, id=peer.id, kind=attribute_path.active_relationship_schema.peer
403
+ db=db, id=peer.id, kind=attribute_path.active_relationship_schema.peer, branch=self._branch.name
403
404
  )
404
405
 
405
406
  attribute: BaseAttribute = getattr(
@@ -609,7 +610,12 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
609
610
 
610
611
  # Update the relationship to the branch itself
611
612
  query = await NodeGetListQuery.init(
612
- db=db, schema=self._schema, filters={"id": self.id}, branch=self._branch, at=delete_at
613
+ db=db,
614
+ schema=self._schema,
615
+ filters={"id": self.id},
616
+ branch=self._branch,
617
+ at=delete_at,
618
+ order=OrderModel(disable=True),
613
619
  )
614
620
  await query.execute(db=db)
615
621
  result = query.get_result()
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from typing import TYPE_CHECKING, Iterable, Optional
4
4
 
5
5
  from infrahub.core import registry
6
+ from infrahub.core.constants import NULL_VALUE
6
7
  from infrahub.core.schema import (
7
8
  MainSchemaTypes,
8
9
  SchemaAttributePath,
@@ -61,10 +62,12 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
61
62
  include_in_query = True
62
63
  attribute_name = attribute_path.attribute_schema.name
63
64
  attribute = getattr(updated_node, attribute_name)
64
- if attribute.is_enum:
65
+ if attribute.is_enum and attribute.value:
65
66
  attribute_value = attribute.value.value
66
67
  else:
67
68
  attribute_value = attribute.value
69
+ if attribute_value is None:
70
+ attribute_value = NULL_VALUE
68
71
  query_attribute_paths.add(
69
72
  QueryAttributePath(
70
73
  attribute_name=attribute_name,
@@ -96,10 +99,14 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
96
99
  attribute_name = schema_attribute_path.attribute_schema.name
97
100
  attribute_field = getattr(updated_node, attribute_name)
98
101
  attribute_value = getattr(attribute_field, schema_attribute_path.attribute_property_name or "value")
102
+ if attribute_field.is_enum and attribute_value:
103
+ attribute_value = attribute_value.value
104
+ elif attribute_value is None:
105
+ attribute_value = NULL_VALUE
99
106
  node_value_combination.append(
100
107
  SchemaAttributePathValue.from_schema_attribute_path(
101
108
  schema_attribute_path,
102
- value=attribute_value.value if attribute_field.is_enum else attribute_value,
109
+ value=attribute_value,
103
110
  )
104
111
  )
105
112
  return node_value_combination
@@ -10,6 +10,7 @@ import ujson
10
10
  from neo4j.graph import Node as Neo4jNode
11
11
  from neo4j.graph import Path as Neo4jPath
12
12
  from neo4j.graph import Relationship as Neo4jRelationship
13
+ from opentelemetry import trace
13
14
 
14
15
  from infrahub import config
15
16
  from infrahub.core.constants import PermissionLevel
@@ -161,7 +162,7 @@ def cleanup_return_labels(labels: list[str]) -> list[str]:
161
162
  class QueryResult:
162
163
  def __init__(self, data: list[Union[Neo4jNode, Neo4jRelationship, list[Neo4jNode]]], labels: list[str]):
163
164
  self.data = data
164
- self.labels = cleanup_return_labels(labels)
165
+ self.labels = labels
165
166
  self.branch_score: int = 0
166
167
  self.time_score: int = 0
167
168
  self.permission_score = PermissionLevel.DEFAULT
@@ -523,6 +524,7 @@ class Query(ABC):
523
524
 
524
525
  return ":params { " + ", ".join(params) + " }"
525
526
 
527
+ @trace.get_tracer(__name__).start_as_current_span("Query.execute")
526
528
  async def execute(self, db: InfrahubDatabase) -> Self:
527
529
  # Ensure all mandatory params have been provided
528
530
  # Ensure at least 1 return obj has been defined
@@ -552,7 +554,8 @@ class Query(ABC):
552
554
  if not results and self.raise_error_if_empty:
553
555
  raise QueryError(query=query_str, params=self.params)
554
556
 
555
- self.results = [QueryResult(data=result, labels=self.return_labels) for result in results]
557
+ clean_labels = cleanup_return_labels(self.return_labels)
558
+ self.results = [QueryResult(data=result, labels=clean_labels) for result in results]
556
559
  self.has_been_executed = True
557
560
 
558
561
  return self