infrahub-server 1.2.9rc0__py3-none-any.whl → 1.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. infrahub/computed_attribute/models.py +13 -0
  2. infrahub/computed_attribute/tasks.py +48 -26
  3. infrahub/config.py +9 -0
  4. infrahub/core/attribute.py +43 -2
  5. infrahub/core/branch/models.py +8 -9
  6. infrahub/core/branch/tasks.py +0 -2
  7. infrahub/core/constants/infrahubkind.py +1 -0
  8. infrahub/core/constraint/node/runner.py +1 -1
  9. infrahub/core/diff/calculator.py +65 -11
  10. infrahub/core/diff/combiner.py +38 -31
  11. infrahub/core/diff/coordinator.py +44 -28
  12. infrahub/core/diff/data_check_synchronizer.py +3 -2
  13. infrahub/core/diff/enricher/hierarchy.py +36 -27
  14. infrahub/core/diff/ipam_diff_parser.py +5 -4
  15. infrahub/core/diff/merger/merger.py +46 -16
  16. infrahub/core/diff/merger/serializer.py +1 -0
  17. infrahub/core/diff/model/field_specifiers_map.py +64 -0
  18. infrahub/core/diff/model/path.py +58 -58
  19. infrahub/core/diff/parent_node_adder.py +14 -16
  20. infrahub/core/diff/query/drop_nodes.py +42 -0
  21. infrahub/core/diff/query/field_specifiers.py +8 -7
  22. infrahub/core/diff/query/filters.py +15 -1
  23. infrahub/core/diff/query/merge.py +264 -28
  24. infrahub/core/diff/query/save.py +6 -2
  25. infrahub/core/diff/query_parser.py +55 -65
  26. infrahub/core/diff/repository/deserializer.py +38 -24
  27. infrahub/core/diff/repository/repository.py +31 -12
  28. infrahub/core/diff/tasks.py +3 -3
  29. infrahub/core/graph/__init__.py +1 -1
  30. infrahub/core/migrations/graph/__init__.py +2 -0
  31. infrahub/core/migrations/graph/m027_delete_isolated_nodes.py +50 -0
  32. infrahub/core/migrations/graph/m028_delete_diffs.py +38 -0
  33. infrahub/core/node/resource_manager/ip_address_pool.py +6 -2
  34. infrahub/core/node/resource_manager/ip_prefix_pool.py +6 -2
  35. infrahub/core/protocols.py +4 -0
  36. infrahub/core/query/branch.py +27 -17
  37. infrahub/core/query/diff.py +169 -51
  38. infrahub/core/query/node.py +39 -5
  39. infrahub/core/query/relationship.py +105 -30
  40. infrahub/core/query/subquery.py +2 -2
  41. infrahub/core/relationship/model.py +1 -1
  42. infrahub/core/schema/definitions/core/__init__.py +8 -1
  43. infrahub/core/schema/definitions/core/resource_pool.py +20 -0
  44. infrahub/core/schema/schema_branch.py +3 -0
  45. infrahub/core/validators/tasks.py +1 -1
  46. infrahub/core/validators/uniqueness/query.py +7 -0
  47. infrahub/database/__init__.py +5 -4
  48. infrahub/graphql/app.py +1 -1
  49. infrahub/graphql/loaders/node.py +1 -1
  50. infrahub/graphql/loaders/peers.py +1 -1
  51. infrahub/graphql/mutations/proposed_change.py +1 -1
  52. infrahub/graphql/queries/diff/tree.py +2 -1
  53. infrahub/graphql/queries/relationship.py +1 -1
  54. infrahub/graphql/queries/task.py +10 -0
  55. infrahub/graphql/resolvers/many_relationship.py +4 -4
  56. infrahub/graphql/resolvers/resolver.py +4 -4
  57. infrahub/graphql/resolvers/single_relationship.py +2 -2
  58. infrahub/graphql/subscription/graphql_query.py +2 -2
  59. infrahub/graphql/types/branch.py +1 -1
  60. infrahub/graphql/types/task_log.py +3 -2
  61. infrahub/message_bus/operations/refresh/registry.py +1 -1
  62. infrahub/task_manager/task.py +44 -4
  63. infrahub/telemetry/database.py +1 -1
  64. infrahub/telemetry/tasks.py +1 -1
  65. infrahub/trigger/models.py +11 -1
  66. infrahub/trigger/setup.py +51 -15
  67. infrahub/trigger/tasks.py +1 -4
  68. infrahub/types.py +1 -1
  69. infrahub/webhook/models.py +2 -1
  70. infrahub/workflows/catalogue.py +9 -0
  71. infrahub/workflows/initialization.py +1 -3
  72. infrahub_sdk/timestamp.py +2 -2
  73. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/METADATA +3 -3
  74. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/RECORD +79 -75
  75. infrahub_testcontainers/docker-compose.test.yml +3 -3
  76. infrahub_testcontainers/performance_test.py +6 -3
  77. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/LICENSE.txt +0 -0
  78. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/WHEEL +0 -0
  79. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/entry_points.txt +0 -0
@@ -9,6 +9,7 @@ from infrahub.core.timestamp import Timestamp
9
9
  from infrahub.exceptions import ValidationError
10
10
  from infrahub.log import get_logger
11
11
 
12
+ from .model.field_specifiers_map import NodeFieldSpecifierMap
12
13
  from .model.path import (
13
14
  BranchTrackingId,
14
15
  EnrichedDiffRoot,
@@ -16,6 +17,7 @@ from .model.path import (
16
17
  EnrichedDiffs,
17
18
  EnrichedDiffsMetadata,
18
19
  NameTrackingId,
20
+ NodeIdentifier,
19
21
  TrackingId,
20
22
  )
21
23
 
@@ -43,7 +45,7 @@ class EnrichedDiffRequest:
43
45
  from_time: Timestamp
44
46
  to_time: Timestamp
45
47
  tracking_id: TrackingId
46
- node_field_specifiers: dict[str, set[str]] = field(default_factory=dict)
48
+ node_field_specifiers: NodeFieldSpecifierMap = field(default_factory=NodeFieldSpecifierMap)
47
49
 
48
50
  def __repr__(self) -> str:
49
51
  return (
@@ -141,7 +143,7 @@ class DiffCoordinator:
141
143
  self.lock_registry.get(name=incremental_lock_name, namespace=self.lock_namespace),
142
144
  ):
143
145
  log.info(f"Acquired lock to run branch diff update for {base_branch.name} - {diff_branch.name}")
144
- enriched_diffs = await self._update_diffs(
146
+ enriched_diffs, node_identifiers_to_drop = await self._update_diffs(
145
147
  base_branch=base_branch,
146
148
  diff_branch=diff_branch,
147
149
  from_time=from_time,
@@ -149,7 +151,9 @@ class DiffCoordinator:
149
151
  tracking_id=tracking_id,
150
152
  force_branch_refresh=False,
151
153
  )
152
- await self.diff_repo.save(enriched_diffs=enriched_diffs)
154
+ await self.diff_repo.save(
155
+ enriched_diffs=enriched_diffs, node_identifiers_to_drop=list(node_identifiers_to_drop)
156
+ )
153
157
  await self._update_core_data_checks(enriched_diff=enriched_diffs.diff_branch_diff)
154
158
  log.info(f"Branch diff update complete for {base_branch.name} - {diff_branch.name}")
155
159
  return enriched_diffs.diff_branch_diff
@@ -168,7 +172,7 @@ class DiffCoordinator:
168
172
  )
169
173
  async with self.lock_registry.get(name=general_lock_name, namespace=self.lock_namespace):
170
174
  log.info(f"Acquired lock to run arbitrary diff update for {base_branch.name} - {diff_branch.name}")
171
- enriched_diffs = await self._update_diffs(
175
+ enriched_diffs, node_identifiers_to_drop = await self._update_diffs(
172
176
  base_branch=base_branch,
173
177
  diff_branch=diff_branch,
174
178
  from_time=from_time,
@@ -177,7 +181,9 @@ class DiffCoordinator:
177
181
  force_branch_refresh=False,
178
182
  )
179
183
 
180
- await self.diff_repo.save(enriched_diffs=enriched_diffs)
184
+ await self.diff_repo.save(
185
+ enriched_diffs=enriched_diffs, node_identifiers_to_drop=list(node_identifiers_to_drop)
186
+ )
181
187
  await self._update_core_data_checks(enriched_diff=enriched_diffs.diff_branch_diff)
182
188
  log.info(f"Arbitrary diff update complete for {base_branch.name} - {diff_branch.name}")
183
189
  return enriched_diffs.diff_branch_diff
@@ -205,7 +211,7 @@ class DiffCoordinator:
205
211
  from_time = current_branch_diff.from_time
206
212
  branched_from_time = Timestamp(diff_branch.get_branched_from())
207
213
  from_time = max(from_time, branched_from_time)
208
- enriched_diffs = await self._update_diffs(
214
+ enriched_diffs, _ = await self._update_diffs(
209
215
  base_branch=base_branch,
210
216
  diff_branch=diff_branch,
211
217
  from_time=branched_from_time,
@@ -282,7 +288,7 @@ class DiffCoordinator:
282
288
  to_time: Timestamp,
283
289
  tracking_id: TrackingId,
284
290
  force_branch_refresh: Literal[True] = ...,
285
- ) -> EnrichedDiffs: ...
291
+ ) -> tuple[EnrichedDiffs, set[NodeIdentifier]]: ...
286
292
 
287
293
  @overload
288
294
  async def _update_diffs(
@@ -293,7 +299,7 @@ class DiffCoordinator:
293
299
  to_time: Timestamp,
294
300
  tracking_id: TrackingId,
295
301
  force_branch_refresh: Literal[False] = ...,
296
- ) -> EnrichedDiffs | EnrichedDiffsMetadata: ...
302
+ ) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]: ...
297
303
 
298
304
  async def _update_diffs(
299
305
  self,
@@ -303,7 +309,7 @@ class DiffCoordinator:
303
309
  to_time: Timestamp,
304
310
  tracking_id: TrackingId,
305
311
  force_branch_refresh: bool = False,
306
- ) -> EnrichedDiffs | EnrichedDiffsMetadata:
312
+ ) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]:
307
313
  # start with empty diffs b/c we only care about their metadata for now, hydrate them with data as needed
308
314
  diff_pairs_metadata = await self.diff_repo.get_diff_pairs_metadata(
309
315
  base_branch_names=[base_branch.name],
@@ -312,7 +318,7 @@ class DiffCoordinator:
312
318
  to_time=to_time,
313
319
  tracking_id=tracking_id,
314
320
  )
315
- aggregated_enriched_diffs = await self._aggregate_enriched_diffs(
321
+ aggregated_enriched_diffs, node_identifiers_to_drop = await self._aggregate_enriched_diffs(
316
322
  diff_request=EnrichedDiffRequest(
317
323
  base_branch=base_branch,
318
324
  diff_branch=diff_branch,
@@ -343,7 +349,7 @@ class DiffCoordinator:
343
349
  # this is an EnrichedDiffsMetadata, so there are no nodes to enrich
344
350
  if not isinstance(aggregated_enriched_diffs, EnrichedDiffs):
345
351
  aggregated_enriched_diffs.update_metadata(from_time=from_time, to_time=to_time, tracking_id=tracking_id)
346
- return aggregated_enriched_diffs
352
+ return aggregated_enriched_diffs, set()
347
353
 
348
354
  await self.conflicts_enricher.add_conflicts_to_branch_diff(
349
355
  base_diff_root=aggregated_enriched_diffs.base_branch_diff,
@@ -353,27 +359,27 @@ class DiffCoordinator:
353
359
  enriched_diff_root=aggregated_enriched_diffs.diff_branch_diff, conflicts_only=True
354
360
  )
355
361
 
356
- return aggregated_enriched_diffs
362
+ return aggregated_enriched_diffs, node_identifiers_to_drop
357
363
 
358
364
  @overload
359
365
  async def _aggregate_enriched_diffs(
360
366
  self,
361
367
  diff_request: EnrichedDiffRequest,
362
368
  partial_enriched_diffs: list[EnrichedDiffsMetadata],
363
- ) -> EnrichedDiffs | EnrichedDiffsMetadata: ...
369
+ ) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]: ...
364
370
 
365
371
  @overload
366
372
  async def _aggregate_enriched_diffs(
367
373
  self,
368
374
  diff_request: EnrichedDiffRequest,
369
375
  partial_enriched_diffs: None,
370
- ) -> EnrichedDiffs: ...
376
+ ) -> tuple[EnrichedDiffs, set[NodeIdentifier]]: ...
371
377
 
372
378
  async def _aggregate_enriched_diffs(
373
379
  self,
374
380
  diff_request: EnrichedDiffRequest,
375
381
  partial_enriched_diffs: list[EnrichedDiffsMetadata] | None,
376
- ) -> EnrichedDiffs | EnrichedDiffsMetadata:
382
+ ) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]:
377
383
  """
378
384
  If return is an EnrichedDiffsMetadata, it acts as a pointer to a diff in the database that has all the
379
385
  necessary data for this diff_request. Might have a different time range and/or tracking_id
@@ -385,6 +391,7 @@ class DiffCoordinator:
385
391
  diff_request=diff_request, is_incremental_diff=False
386
392
  )
387
393
 
394
+ node_identifiers_to_drop: set[NodeIdentifier] = set()
388
395
  if partial_enriched_diffs is not None and not aggregated_enriched_diffs:
389
396
  ordered_diffs = self._get_ordered_diff_pairs(diff_pairs=partial_enriched_diffs, allow_overlap=False)
390
397
  ordered_diff_reprs = [repr(d) for d in ordered_diffs]
@@ -430,31 +437,31 @@ class DiffCoordinator:
430
437
  )
431
438
  current_time = end_time
432
439
 
433
- aggregated_enriched_diffs = await self._concatenate_diffs_and_requests(
440
+ aggregated_enriched_diffs, node_identifiers_to_drop = await self._concatenate_diffs_and_requests(
434
441
  diff_or_request_list=incremental_diffs_and_requests, full_diff_request=diff_request
435
442
  )
436
443
 
437
444
  # no changes during this time period, so generate an EnrichedDiffs with no nodes
438
445
  if not aggregated_enriched_diffs:
439
- return self._build_enriched_diffs_with_no_nodes(diff_request=diff_request)
446
+ return self._build_enriched_diffs_with_no_nodes(diff_request=diff_request), node_identifiers_to_drop
440
447
 
441
448
  # metadata-only diff, means that a diff exists in the database that covers at least
442
449
  # part of this time period, but it might need to have its start or end time extended
443
450
  # to cover time ranges with no changes
444
451
  if not isinstance(aggregated_enriched_diffs, EnrichedDiffs):
445
- return aggregated_enriched_diffs
452
+ return aggregated_enriched_diffs, node_identifiers_to_drop
446
453
 
447
454
  # a new diff (with nodes) covering the time period
448
455
  aggregated_enriched_diffs.update_metadata(
449
456
  from_time=diff_request.from_time, to_time=diff_request.to_time, tracking_id=diff_request.tracking_id
450
457
  )
451
- return aggregated_enriched_diffs
458
+ return aggregated_enriched_diffs, node_identifiers_to_drop
452
459
 
453
460
  async def _concatenate_diffs_and_requests(
454
461
  self,
455
462
  diff_or_request_list: Sequence[EnrichedDiffsMetadata | EnrichedDiffRequest | None],
456
463
  full_diff_request: EnrichedDiffRequest,
457
- ) -> EnrichedDiffs | EnrichedDiffsMetadata | None:
464
+ ) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata | None, set[NodeIdentifier]]:
458
465
  """
459
466
  Returns None if diff_or_request_list is empty or all Nones
460
467
  meaning there are no changes for the diff during this time period
@@ -464,7 +471,7 @@ class DiffCoordinator:
464
471
  meaning multiple diffs (some that may have been freshly calculated) were combined
465
472
  """
466
473
  previous_diff_pair: EnrichedDiffs | EnrichedDiffsMetadata | None = None
467
- updated_node_uuids: set[str] = set()
474
+ updated_node_identifiers: set[NodeIdentifier] = set()
468
475
  for diff_or_request in diff_or_request_list:
469
476
  if isinstance(diff_or_request, EnrichedDiffRequest):
470
477
  if previous_diff_pair:
@@ -478,8 +485,8 @@ class DiffCoordinator:
478
485
  calculated_diff = await self._calculate_enriched_diff(
479
486
  diff_request=diff_or_request, is_incremental_diff=is_incremental_diff
480
487
  )
481
- updated_node_uuids |= calculated_diff.base_node_uuids
482
- updated_node_uuids |= calculated_diff.branch_node_uuids
488
+ updated_node_identifiers |= calculated_diff.base_node_identifiers
489
+ updated_node_identifiers |= calculated_diff.branch_node_identifiers
483
490
  single_enriched_diffs: EnrichedDiffs | EnrichedDiffsMetadata = calculated_diff
484
491
 
485
492
  elif isinstance(diff_or_request, EnrichedDiffsMetadata):
@@ -495,17 +502,22 @@ class DiffCoordinator:
495
502
  previous_diff_pair = await self._combine_diffs(
496
503
  earlier=previous_diff_pair,
497
504
  later=single_enriched_diffs,
498
- node_uuids=updated_node_uuids,
505
+ node_identifiers=updated_node_identifiers,
499
506
  )
500
507
  log.info("Diffs combined.")
501
508
 
502
- return previous_diff_pair
509
+ node_identifiers_to_drop: set[NodeIdentifier] = set()
510
+ if isinstance(previous_diff_pair, EnrichedDiffs):
511
+ # nodes that were updated and that no longer exist on this diff have been removed
512
+ node_identifiers_to_drop = updated_node_identifiers - previous_diff_pair.branch_node_identifiers
513
+
514
+ return previous_diff_pair, node_identifiers_to_drop
503
515
 
504
516
  async def _combine_diffs(
505
517
  self,
506
518
  earlier: EnrichedDiffs | EnrichedDiffsMetadata,
507
519
  later: EnrichedDiffs | EnrichedDiffsMetadata,
508
- node_uuids: set[str],
520
+ node_identifiers: set[NodeIdentifier],
509
521
  ) -> EnrichedDiffs | EnrichedDiffsMetadata:
510
522
  log.info(f"Earlier diff to combine: {earlier!r}")
511
523
  log.info(f"Later diff to combine: {later!r}")
@@ -522,11 +534,15 @@ class DiffCoordinator:
522
534
  # hydrate the diffs to combine, if necessary
523
535
  if not isinstance(earlier, EnrichedDiffs):
524
536
  log.info("Hydrating earlier diff...")
525
- earlier = await self.diff_repo.hydrate_diff_pair(enriched_diffs_metadata=earlier, node_uuids=node_uuids)
537
+ earlier = await self.diff_repo.hydrate_diff_pair(
538
+ enriched_diffs_metadata=earlier, node_identifiers=node_identifiers
539
+ )
526
540
  log.info("Earlier diff hydrated.")
527
541
  if not isinstance(later, EnrichedDiffs):
528
542
  log.info("Hydrating later diff...")
529
- later = await self.diff_repo.hydrate_diff_pair(enriched_diffs_metadata=later, node_uuids=node_uuids)
543
+ later = await self.diff_repo.hydrate_diff_pair(
544
+ enriched_diffs_metadata=later, node_identifiers=node_identifiers
545
+ )
530
546
  log.info("Later diff hydrated.")
531
547
 
532
548
  return await self.diff_combiner.combine(earlier_diffs=earlier, later_diffs=later)
@@ -1,6 +1,7 @@
1
1
  from enum import Enum
2
2
 
3
3
  from infrahub.core.constants import BranchConflictKeep, InfrahubKind
4
+ from infrahub.core.diff.query.filters import EnrichedDiffQueryFilters
4
5
  from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder
5
6
  from infrahub.core.manager import NodeManager
6
7
  from infrahub.core.node import Node
@@ -74,7 +75,7 @@ class DiffDataCheckSynchronizer:
74
75
  retrieved_diff_conflicts_only = await self.diff_repository.get_one(
75
76
  diff_branch_name=enriched_diff.diff_branch_name,
76
77
  diff_id=enriched_diff.uuid,
77
- filters={"only_conflicted": True},
78
+ filters=EnrichedDiffQueryFilters(only_conflicted=True),
78
79
  )
79
80
  enriched_diff_all_conflicts = retrieved_diff_conflicts_only
80
81
  # if `enriched_diff` is an EnrichedDiffRootsMetadata, then there have been no changes to the diff and
@@ -116,7 +117,7 @@ class DiffDataCheckSynchronizer:
116
117
  def _update_diff_conflicts(self, updated_diff: EnrichedDiffRoot, retrieved_diff: EnrichedDiffRoot) -> None:
117
118
  for updated_node in updated_diff.nodes:
118
119
  try:
119
- retrieved_node = retrieved_diff.get_node(node_uuid=updated_node.uuid)
120
+ retrieved_node = retrieved_diff.get_node(node_identifier=updated_node.identifier)
120
121
  except ValueError:
121
122
  retrieved_node = None
122
123
  if not retrieved_node:
@@ -12,6 +12,7 @@ from infrahub.log import get_logger
12
12
  from ..model.path import (
13
13
  CalculatedDiffs,
14
14
  EnrichedDiffRoot,
15
+ NodeIdentifier,
15
16
  )
16
17
  from ..parent_node_adder import DiffParentNodeAdder, ParentNodeAddRequest
17
18
  from .interface import DiffEnricherInterface
@@ -37,8 +38,8 @@ class DiffHierarchyEnricher(DiffEnricherInterface):
37
38
 
38
39
  log.info("Beginning hierarchical diff enrichment...")
39
40
  self.parent_adder.initialize(enriched_diff_root=enriched_diff_root)
40
- node_rel_parent_map: dict[str, list[str]] = defaultdict(list)
41
- node_hierarchy_map: dict[str, list[str]] = defaultdict(list)
41
+ node_rel_parent_map: dict[str, list[NodeIdentifier]] = defaultdict(list)
42
+ node_hierarchy_map: dict[str, list[NodeIdentifier]] = defaultdict(list)
42
43
 
43
44
  for node in enriched_diff_root.nodes:
44
45
  schema_node = self.db.schema.get(
@@ -49,14 +50,14 @@ class DiffHierarchyEnricher(DiffEnricherInterface):
49
50
  continue
50
51
 
51
52
  if schema_node.has_parent_relationship:
52
- node_rel_parent_map[node.kind].append(node.uuid)
53
+ node_rel_parent_map[node.kind].append(node.identifier)
53
54
  continue
54
55
 
55
56
  try:
56
57
  hierarchy_schema = schema_node.get_hierarchy_schema(
57
58
  db=self.db, branch=enriched_diff_root.diff_branch_name
58
59
  )
59
- node_hierarchy_map[hierarchy_schema.kind].append(node.uuid)
60
+ node_hierarchy_map[hierarchy_schema.kind].append(node.identifier)
60
61
  except ValueError:
61
62
  pass
62
63
 
@@ -67,21 +68,21 @@ class DiffHierarchyEnricher(DiffEnricherInterface):
67
68
  async def _enrich_hierarchical_nodes(
68
69
  self,
69
70
  enriched_diff_root: EnrichedDiffRoot,
70
- node_map: dict[str, list[str]],
71
+ node_map: dict[str, list[NodeIdentifier]],
71
72
  ) -> None:
72
73
  diff_branch = registry.get_branch_from_registry(branch=enriched_diff_root.diff_branch_name)
73
74
 
74
75
  # Retrieve the ID of all ancestors
75
- for kind, node_ids in node_map.items():
76
- log.info(f"Beginning hierarchy enrichment for {kind} node, num_nodes={len(node_ids)}...")
76
+ for kind, node_identifiers in node_map.items():
77
+ log.info(f"Beginning hierarchy enrichment for {kind} node, num_nodes={len(node_identifiers)}...")
77
78
  hierarchy_schema = self.db.schema.get(
78
79
  name=kind, branch=enriched_diff_root.diff_branch_name, duplicate=False
79
80
  )
80
- for node_id in node_ids:
81
+ for node_identifier in node_identifiers:
81
82
  query = await NodeGetHierarchyQuery.init(
82
83
  db=self.db,
83
84
  direction=RelationshipHierarchyDirection.ANCESTORS,
84
- node_id=node_id,
85
+ node_id=node_identifier.uuid,
85
86
  node_schema=hierarchy_schema,
86
87
  branch=diff_branch,
87
88
  hierarchical_ordering=True,
@@ -93,15 +94,15 @@ class DiffHierarchyEnricher(DiffEnricherInterface):
93
94
  if not ancestors:
94
95
  continue
95
96
 
96
- node = enriched_diff_root.get_node(node_uuid=node_id)
97
+ node = enriched_diff_root.get_node(node_identifier=node_identifier)
97
98
  parent_rel = hierarchy_schema.get_relationship(name="parent")
98
99
 
99
100
  current_node = node
100
101
  for ancestor in ancestors:
102
+ ancestor_identifier = NodeIdentifier(uuid=ancestor.uuid, kind=ancestor.kind, db_id=ancestor.db_id)
101
103
  parent_request = ParentNodeAddRequest(
102
- node_id=current_node.uuid,
103
- parent_id=str(ancestor.uuid),
104
- parent_kind=ancestor.kind,
104
+ node_identifier=current_node.identifier,
105
+ parent_identifier=ancestor_identifier,
105
106
  parent_label="",
106
107
  parent_rel_name=parent_rel.name,
107
108
  parent_rel_identifier=parent_rel.get_identifier(),
@@ -113,20 +114,20 @@ class DiffHierarchyEnricher(DiffEnricherInterface):
113
114
  current_node = parent
114
115
 
115
116
  async def _enrich_nodes_with_parent(
116
- self, enriched_diff_root: EnrichedDiffRoot, node_map: dict[str, list[str]]
117
+ self, enriched_diff_root: EnrichedDiffRoot, node_map: dict[str, list[NodeIdentifier]]
117
118
  ) -> None:
118
119
  diff_branch = registry.get_branch_from_registry(branch=enriched_diff_root.diff_branch_name)
119
120
 
120
- parent_peers: dict[str, RelationshipPeerData] = {}
121
+ parent_peers: dict[NodeIdentifier, RelationshipPeerData] = {}
121
122
 
122
123
  # Prepare a map to capture all parents that also have a parent
123
- node_parent_with_parent_map: dict[str, list[str]] = defaultdict(list)
124
+ node_parent_with_parent_map: dict[str, list[NodeIdentifier]] = defaultdict(list)
124
125
 
125
126
  # TODO Not gonna implement it now but technically we could check the content of the node to see if the parent relationship is present
126
127
 
127
128
  # Query the UUID of the parent
128
- for kind, ids in node_map.items():
129
- log.info(f"Beginning parent enrichment for {kind} node, num_nodes={len(ids)}...")
129
+ for kind, node_identifiers in node_map.items():
130
+ log.info(f"Beginning parent enrichment for {kind} node, num_nodes={len(node_identifiers)}...")
130
131
  schema_node = self.db.schema.get(name=kind, branch=enriched_diff_root.diff_branch_name, duplicate=False)
131
132
 
132
133
  parent_rel = [rel for rel in schema_node.relationships if rel.kind == RelationshipKind.PARENT][0]
@@ -137,33 +138,41 @@ class DiffHierarchyEnricher(DiffEnricherInterface):
137
138
  query = await RelationshipGetPeerQuery.init(
138
139
  db=self.db,
139
140
  branch=diff_branch,
140
- source_ids=ids,
141
+ source_ids=[ni.uuid for ni in node_identifiers],
141
142
  rel_type=DatabaseEdgeType.IS_RELATED.value,
142
143
  schema=parent_rel,
143
144
  )
144
145
  await query.execute(db=self.db)
145
146
 
146
147
  for peer in query.get_peers():
147
- parent_peers[str(peer.source_id)] = peer
148
+ source_identifier = NodeIdentifier(
149
+ uuid=str(peer.source_id), kind=peer.source_kind, db_id=peer.source_db_id
150
+ )
151
+ parent_peers[source_identifier] = peer
148
152
  if parent_schema.has_parent_relationship:
149
- node_parent_with_parent_map[parent_schema.kind].append(str(peer.peer_id))
153
+ peer_identifier = NodeIdentifier(uuid=str(peer.peer_id), kind=peer.peer_kind, db_id=peer.peer_db_id)
154
+ node_parent_with_parent_map[parent_schema.kind].append(peer_identifier)
150
155
 
151
156
  # Check if the parent are already present
152
157
  # If parent is already in the list of node we need to add a relationship
153
158
  # If parent is not in the list of node, we need to add it
154
- diff_node_map = enriched_diff_root.get_node_map(node_uuids=set(parent_peers.keys()))
155
- for node_id, peer_parent in parent_peers.items():
159
+ diff_node_map = enriched_diff_root.get_node_map(
160
+ node_uuids={source_identifier.uuid for source_identifier in parent_peers.keys()}
161
+ )
162
+ for node_identifier, peer_parent in parent_peers.items():
156
163
  # TODO check if we can optimize this part to avoid querying this multiple times
157
- node = diff_node_map[node_id]
164
+ node = diff_node_map[node_identifier]
158
165
  schema_node = self.db.schema.get(
159
166
  name=node.kind, branch=enriched_diff_root.diff_branch_name, duplicate=False
160
167
  )
161
168
  parent_rel = [rel for rel in schema_node.relationships if rel.kind == RelationshipKind.PARENT][0]
162
169
 
170
+ peer_identifier = NodeIdentifier(
171
+ uuid=str(peer_parent.peer_id), kind=peer_parent.peer_kind, db_id=peer_parent.peer_db_id
172
+ )
163
173
  parent_request = ParentNodeAddRequest(
164
- node_id=node.uuid,
165
- parent_id=str(peer_parent.peer_id),
166
- parent_kind=peer_parent.peer_kind,
174
+ node_identifier=node.identifier,
175
+ parent_identifier=peer_identifier,
167
176
  parent_label="",
168
177
  parent_rel_name=parent_rel.name,
169
178
  parent_rel_identifier=parent_rel.get_identifier(),
@@ -3,6 +3,7 @@ from dataclasses import dataclass
3
3
  from infrahub.core.constants import DiffAction
4
4
  from infrahub.core.constants.database import DatabaseEdgeType
5
5
  from infrahub.core.diff.model.path import BranchTrackingId
6
+ from infrahub.core.diff.query.filters import EnrichedDiffQueryFilters, IncExclActionFilterOptions, IncExclFilterOptions
6
7
  from infrahub.core.ipam.kinds_getter import IpamKindsGetter
7
8
  from infrahub.core.ipam.model import IpamNodeDetails
8
9
  from infrahub.core.manager import NodeManager
@@ -48,10 +49,10 @@ class IpamDiffParser:
48
49
  base_branch_name=target_branch_name,
49
50
  diff_branch_names=[source_branch_name],
50
51
  tracking_id=BranchTrackingId(name=source_branch_name),
51
- filters={
52
- "kind": {"includes": list(ip_address_kinds | ip_prefix_kinds)},
53
- "status": {"excludes": {DiffAction.UNCHANGED}},
54
- },
52
+ filters=EnrichedDiffQueryFilters(
53
+ kind=IncExclFilterOptions(includes=list(ip_address_kinds | ip_prefix_kinds)),
54
+ status=IncExclActionFilterOptions(excludes={DiffAction.UNCHANGED}),
55
+ ),
55
56
  )
56
57
  changed_node_details: list[ChangedIpamNodeDetails] = []
57
58
  for diff in enriched_diffs:
@@ -3,8 +3,14 @@ from __future__ import annotations
3
3
  from typing import TYPE_CHECKING
4
4
 
5
5
  from infrahub.core import registry
6
+ from infrahub.core.constants import DiffAction
6
7
  from infrahub.core.diff.model.path import BranchTrackingId
7
- from infrahub.core.diff.query.merge import DiffMergePropertiesQuery, DiffMergeQuery, DiffMergeRollbackQuery
8
+ from infrahub.core.diff.query.merge import (
9
+ DiffMergeMigratedKindsQuery,
10
+ DiffMergePropertiesQuery,
11
+ DiffMergeQuery,
12
+ DiffMergeRollbackQuery,
13
+ )
8
14
  from infrahub.log import get_logger
9
15
 
10
16
  if TYPE_CHECKING:
@@ -53,27 +59,51 @@ class DiffMerger:
53
59
  )
54
60
  log.info(f"Diff {latest_diff.uuid} retrieved")
55
61
  batch_num = 0
62
+ migrated_kinds_id_map = {}
63
+ for n in enriched_diff.nodes:
64
+ if not n.is_node_kind_migration:
65
+ continue
66
+ if n.uuid not in migrated_kinds_id_map or (
67
+ n.uuid in migrated_kinds_id_map and n.action is DiffAction.ADDED
68
+ ):
69
+ # make sure that we use the ADDED db_id if it exists
70
+ # it will not if a node was migrated and then deleted
71
+ migrated_kinds_id_map[n.uuid] = n.identifier.db_id
56
72
  async for node_diff_dicts, property_diff_dicts in self.serializer.serialize_diff(diff=enriched_diff):
57
- log.info(f"Merging batch of nodes #{batch_num}")
58
- merge_query = await DiffMergeQuery.init(
59
- db=self.db,
60
- branch=self.source_branch,
61
- at=at,
62
- target_branch=self.destination_branch,
63
- node_diff_dicts=node_diff_dicts,
64
- )
65
- await merge_query.execute(db=self.db)
66
- log.info(f"Merging batch of properties #{batch_num}")
67
- merge_properties_query = await DiffMergePropertiesQuery.init(
73
+ if node_diff_dicts:
74
+ log.info(f"Merging batch of nodes #{batch_num}")
75
+ merge_query = await DiffMergeQuery.init(
76
+ db=self.db,
77
+ branch=self.source_branch,
78
+ at=at,
79
+ target_branch=self.destination_branch,
80
+ node_diff_dicts=node_diff_dicts,
81
+ migrated_kinds_id_map=migrated_kinds_id_map,
82
+ )
83
+ await merge_query.execute(db=self.db)
84
+ if property_diff_dicts:
85
+ log.info(f"Merging batch of properties #{batch_num}")
86
+ merge_properties_query = await DiffMergePropertiesQuery.init(
87
+ db=self.db,
88
+ branch=self.source_branch,
89
+ at=at,
90
+ target_branch=self.destination_branch,
91
+ property_diff_dicts=property_diff_dicts,
92
+ migrated_kinds_id_map=migrated_kinds_id_map,
93
+ )
94
+ await merge_properties_query.execute(db=self.db)
95
+ log.info(f"Batch #{batch_num} merged")
96
+ batch_num += 1
97
+ migrated_kind_uuids = {n.identifier.uuid for n in enriched_diff.nodes if n.is_node_kind_migration}
98
+ if migrated_kind_uuids:
99
+ migrated_merge_query = await DiffMergeMigratedKindsQuery.init(
68
100
  db=self.db,
69
101
  branch=self.source_branch,
70
102
  at=at,
71
103
  target_branch=self.destination_branch,
72
- property_diff_dicts=property_diff_dicts,
104
+ migrated_uuids=list(migrated_kind_uuids),
73
105
  )
74
- await merge_properties_query.execute(db=self.db)
75
- log.info(f"Batch #{batch_num} merged")
76
- batch_num += 1
106
+ await migrated_merge_query.execute(db=self.db)
77
107
 
78
108
  self.source_branch.branched_from = at.to_string()
79
109
  await self.source_branch.save(db=self.db)
@@ -39,6 +39,7 @@ class DiffMergeSerializer:
39
39
 
40
40
  def _reset_caches(self) -> None:
41
41
  self._attribute_type_cache = {}
42
+ self._conflicted_cardinality_one_relationships = set()
42
43
 
43
44
  @property
44
45
  def source_branch_name(self) -> str:
@@ -0,0 +1,64 @@
1
+ from __future__ import annotations
2
+
3
+
4
+ class NodeFieldSpecifierMap:
5
+ def __init__(self) -> None:
6
+ # {uuid: {kind: {field_name, ...}}}
7
+ self._map: dict[str, dict[str, set[str]]] = {}
8
+
9
+ def __len__(self) -> int:
10
+ return len(self._map)
11
+
12
+ def __hash__(self) -> int:
13
+ full_node_hash_sum = 0
14
+ for node_uuid, node_dict in self._map.items():
15
+ node_kinds_hash_sum = 0
16
+ for kind, field_names in node_dict.items():
17
+ fields_hash = hash(frozenset(field_names))
18
+ node_kinds_hash_sum += hash(f"{hash(kind)}:{fields_hash}")
19
+ full_node_hash_sum += hash(f"{node_uuid}:{node_kinds_hash_sum}")
20
+ return hash(full_node_hash_sum)
21
+
22
+ def __eq__(self, other: object) -> bool:
23
+ if not isinstance(other, NodeFieldSpecifierMap):
24
+ return False
25
+ return self._map == other._map
26
+
27
+ def __sub__(self, other: NodeFieldSpecifierMap) -> NodeFieldSpecifierMap:
28
+ subtracted = NodeFieldSpecifierMap()
29
+ for node_uuid, node_dict in self._map.items():
30
+ if node_uuid not in other._map:
31
+ subtracted._map[node_uuid] = {**node_dict}
32
+ continue
33
+ subtracted_node_map = {}
34
+ for kind, field_names in node_dict.items():
35
+ subtracted_field_names = field_names - other._map[node_uuid].get(kind, set())
36
+ if not subtracted_field_names:
37
+ continue
38
+ subtracted_node_map[kind] = subtracted_field_names
39
+ if not subtracted_node_map:
40
+ continue
41
+ subtracted._map[node_uuid] = subtracted_node_map
42
+ return subtracted
43
+
44
+ def add_entry(self, node_uuid: str, kind: str, field_name: str) -> None:
45
+ if node_uuid not in self._map:
46
+ self._map[node_uuid] = {}
47
+ if kind not in self._map[node_uuid]:
48
+ self._map[node_uuid][kind] = set()
49
+ self._map[node_uuid][kind].add(field_name)
50
+
51
+ def has_entry(self, node_uuid: str, kind: str, field_name: str) -> bool:
52
+ return field_name in self._map.get(node_uuid, {}).get(kind, set())
53
+
54
+ def get_uuids_list(self) -> list[str]:
55
+ return list(self._map.keys())
56
+
57
+ def get_uuid_field_names_map(self) -> dict[str, list[str]]:
58
+ uuid_field_names_map: dict[str, list[str]] = {}
59
+ for node_uuid, node_dict in self._map.items():
60
+ field_names_set: set[str] = set()
61
+ for field_names in node_dict.values():
62
+ field_names_set |= field_names
63
+ uuid_field_names_map[node_uuid] = list(field_names_set)
64
+ return uuid_field_names_map