infrahub-server 1.1.5__py3-none-any.whl → 1.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. infrahub/api/oidc.py +1 -0
  2. infrahub/core/attribute.py +4 -1
  3. infrahub/core/branch/tasks.py +7 -4
  4. infrahub/core/diff/calculator.py +21 -39
  5. infrahub/core/diff/combiner.py +11 -7
  6. infrahub/core/diff/coordinator.py +49 -70
  7. infrahub/core/diff/data_check_synchronizer.py +86 -7
  8. infrahub/core/diff/enricher/aggregated.py +3 -3
  9. infrahub/core/diff/enricher/cardinality_one.py +1 -6
  10. infrahub/core/diff/enricher/labels.py +13 -3
  11. infrahub/core/diff/enricher/path_identifier.py +2 -8
  12. infrahub/core/diff/ipam_diff_parser.py +1 -1
  13. infrahub/core/diff/merger/merger.py +5 -3
  14. infrahub/core/diff/merger/serializer.py +15 -8
  15. infrahub/core/diff/model/path.py +42 -24
  16. infrahub/core/diff/query/all_conflicts.py +5 -2
  17. infrahub/core/diff/query/diff_get.py +19 -23
  18. infrahub/core/diff/query/field_specifiers.py +2 -0
  19. infrahub/core/diff/query/field_summary.py +2 -1
  20. infrahub/core/diff/query/filters.py +12 -1
  21. infrahub/core/diff/query/has_conflicts_query.py +5 -2
  22. infrahub/core/diff/query/{drop_tracking_id.py → merge_tracking_id.py} +3 -3
  23. infrahub/core/diff/query/roots_metadata.py +8 -1
  24. infrahub/core/diff/query/save.py +148 -63
  25. infrahub/core/diff/query/summary_counts_enricher.py +220 -0
  26. infrahub/core/diff/query/time_range_query.py +2 -1
  27. infrahub/core/diff/query_parser.py +49 -24
  28. infrahub/core/diff/repository/deserializer.py +74 -71
  29. infrahub/core/diff/repository/repository.py +119 -30
  30. infrahub/core/node/__init__.py +6 -1
  31. infrahub/core/node/constraints/grouped_uniqueness.py +9 -2
  32. infrahub/core/node/ipam.py +6 -1
  33. infrahub/core/node/permissions.py +4 -0
  34. infrahub/core/query/diff.py +223 -230
  35. infrahub/core/query/node.py +8 -2
  36. infrahub/core/query/relationship.py +2 -1
  37. infrahub/core/query/resource_manager.py +3 -1
  38. infrahub/core/relationship/model.py +1 -1
  39. infrahub/core/schema/schema_branch.py +16 -7
  40. infrahub/core/utils.py +1 -0
  41. infrahub/core/validators/uniqueness/query.py +20 -17
  42. infrahub/database/__init__.py +13 -0
  43. infrahub/dependencies/builder/constraint/grouped/node_runner.py +0 -2
  44. infrahub/dependencies/builder/diff/coordinator.py +0 -2
  45. infrahub/git/integrator.py +10 -6
  46. infrahub/graphql/mutations/computed_attribute.py +3 -1
  47. infrahub/graphql/mutations/diff.py +28 -4
  48. infrahub/graphql/mutations/main.py +11 -6
  49. infrahub/graphql/mutations/relationship.py +29 -1
  50. infrahub/graphql/mutations/tasks.py +6 -3
  51. infrahub/graphql/queries/resource_manager.py +7 -3
  52. infrahub/permissions/__init__.py +2 -1
  53. infrahub/permissions/types.py +26 -0
  54. infrahub/proposed_change/tasks.py +6 -1
  55. infrahub/storage.py +6 -5
  56. {infrahub_server-1.1.5.dist-info → infrahub_server-1.1.7.dist-info}/METADATA +41 -7
  57. {infrahub_server-1.1.5.dist-info → infrahub_server-1.1.7.dist-info}/RECORD +64 -64
  58. infrahub_testcontainers/container.py +12 -3
  59. infrahub_testcontainers/docker-compose.test.yml +22 -3
  60. infrahub_testcontainers/haproxy.cfg +43 -0
  61. infrahub_testcontainers/helpers.py +85 -1
  62. infrahub/core/diff/enricher/summary_counts.py +0 -105
  63. infrahub/dependencies/builder/diff/enricher/summary_counts.py +0 -8
  64. {infrahub_server-1.1.5.dist-info → infrahub_server-1.1.7.dist-info}/LICENSE.txt +0 -0
  65. {infrahub_server-1.1.5.dist-info → infrahub_server-1.1.7.dist-info}/WHEEL +0 -0
  66. {infrahub_server-1.1.5.dist-info → infrahub_server-1.1.7.dist-info}/entry_points.txt +0 -0
@@ -905,7 +905,8 @@ class RelationshipCountPerNodeQuery(Query):
905
905
  path = "<-[r:IS_RELATED]-"
906
906
 
907
907
  query = """
908
- MATCH (rl:Relationship { name: $rel_identifier })
908
+ MATCH (peer_node:Node)%(path)s(rl:Relationship { name: $rel_identifier })
909
+ WHERE peer_node.uuid IN $peer_ids AND %(branch_filter)s
909
910
  CALL {
910
911
  WITH rl
911
912
  MATCH path = (peer_node:Node)%(path)s(rl)
@@ -123,6 +123,7 @@ class NumberPoolGetAllocated(Query):
123
123
  self.params["node_attribute"] = self.pool.node_attribute.value
124
124
  self.params["start_range"] = self.pool.start_range.value
125
125
  self.params["end_range"] = self.pool.end_range.value
126
+ self.params["pool_id"] = self.pool.get_id()
126
127
 
127
128
  branch_filter, branch_params = self.branch.get_query_filter_path(
128
129
  at=self.at.to_string(), branch_agnostic=self.branch_agnostic
@@ -133,7 +134,8 @@ class NumberPoolGetAllocated(Query):
133
134
  MATCH (n:%(node)s)-[ha:HAS_ATTRIBUTE]-(a:Attribute {name: $node_attribute})-[hv:HAS_VALUE]-(av:AttributeValue)
134
135
  MATCH (a)-[hs:HAS_SOURCE]-(pool:%(number_pool_kind)s)
135
136
  WHERE
136
- av.value >= $start_range and av.value <= $end_range
137
+ pool.uuid = $pool_id
138
+ AND av.value >= $start_range and av.value <= $end_range
137
139
  AND all(r in [ha, hv, hs] WHERE (%(branch_filter)s))
138
140
  AND ha.status = "active"
139
141
  AND hv.status = "active"
@@ -1110,7 +1110,7 @@ class RelationshipManager:
1110
1110
  # - Update the existing relationship if we are on the same branch
1111
1111
  rel_ids_per_branch = peer_data.rel_ids_per_branch()
1112
1112
  if branch.name in rel_ids_per_branch:
1113
- await update_relationships_to([str(ri) for ri in rel_ids_per_branch[self.branch.name]], to=remove_at, db=db)
1113
+ await update_relationships_to([str(ri) for ri in rel_ids_per_branch[branch.name]], to=remove_at, db=db)
1114
1114
 
1115
1115
  # - Create a new rel of type DELETED if the existing relationship is on a different branch
1116
1116
  rel_branches: set[str] = set()
@@ -492,6 +492,8 @@ class SchemaBranch:
492
492
  self.process_branch_support()
493
493
  self.manage_profile_schemas()
494
494
  self.manage_profile_relationships()
495
+ self.add_hierarchy_generic()
496
+ self.add_hierarchy_node()
495
497
 
496
498
  def process_validate(self) -> None:
497
499
  self.validate_names()
@@ -512,8 +514,6 @@ class SchemaBranch:
512
514
  def process_post_validation(self) -> None:
513
515
  self.cleanup_inherited_elements()
514
516
  self.add_groups()
515
- self.add_hierarchy_generic()
516
- self.add_hierarchy_node()
517
517
  self.generate_weight()
518
518
  self.process_labels()
519
519
  self.process_dropdowns()
@@ -633,7 +633,7 @@ class SchemaBranch:
633
633
  and not (
634
634
  schema_attribute_path.relationship_schema.name == "ip_namespace"
635
635
  and isinstance(node_schema, NodeSchema)
636
- and (node_schema.is_ip_address() or node_schema.is_ip_prefix)
636
+ and (node_schema.is_ip_address() or node_schema.is_ip_prefix())
637
637
  )
638
638
  ):
639
639
  raise ValueError(
@@ -1509,7 +1509,7 @@ class SchemaBranch:
1509
1509
  if changed:
1510
1510
  self.set(name=node_name, schema=schema)
1511
1511
 
1512
- def _get_hierarchy_child_rel(self, peer: str, hierarchical: str, read_only: bool) -> RelationshipSchema:
1512
+ def _get_hierarchy_child_rel(self, peer: str, hierarchical: str | None, read_only: bool) -> RelationshipSchema:
1513
1513
  return RelationshipSchema(
1514
1514
  name="children",
1515
1515
  identifier="parent__child",
@@ -1522,18 +1522,22 @@ class SchemaBranch:
1522
1522
  read_only=read_only,
1523
1523
  )
1524
1524
 
1525
- def _get_hierarchy_parent_rel(self, peer: str, hierarchical: str, read_only: bool) -> RelationshipSchema:
1525
+ def _get_hierarchy_parent_rel(
1526
+ self, peer: str, hierarchical: str | None, read_only: bool, optional: bool
1527
+ ) -> RelationshipSchema:
1526
1528
  return RelationshipSchema(
1527
1529
  name="parent",
1528
1530
  identifier="parent__child",
1529
1531
  peer=peer,
1530
1532
  kind=RelationshipKind.HIERARCHY,
1531
1533
  cardinality=RelationshipCardinality.ONE,
1534
+ min_count=0 if optional else 1,
1532
1535
  max_count=1,
1533
1536
  branch=BranchSupportType.AWARE,
1534
1537
  direction=RelationshipDirection.OUTBOUND,
1535
1538
  hierarchical=hierarchical,
1536
1539
  read_only=read_only,
1540
+ optional=optional,
1537
1541
  )
1538
1542
 
1539
1543
  def add_hierarchy_generic(self) -> None:
@@ -1548,7 +1552,9 @@ class SchemaBranch:
1548
1552
 
1549
1553
  if "parent" not in generic.relationship_names:
1550
1554
  generic.relationships.append(
1551
- self._get_hierarchy_parent_rel(peer=generic_name, hierarchical=generic_name, read_only=read_only)
1555
+ self._get_hierarchy_parent_rel(
1556
+ peer=generic_name, hierarchical=generic_name, read_only=read_only, optional=True
1557
+ )
1552
1558
  )
1553
1559
  if "children" not in generic.relationship_names:
1554
1560
  generic.relationships.append(
@@ -1571,7 +1577,10 @@ class SchemaBranch:
1571
1577
  if "parent" not in node.relationship_names:
1572
1578
  node.relationships.append(
1573
1579
  self._get_hierarchy_parent_rel(
1574
- peer=node.parent, hierarchical=node.hierarchy, read_only=read_only
1580
+ peer=node.parent,
1581
+ hierarchical=node.hierarchy,
1582
+ read_only=read_only,
1583
+ optional=node.parent in [node_name] + self.generic_names,
1575
1584
  )
1576
1585
  )
1577
1586
  else:
infrahub/core/utils.py CHANGED
@@ -72,6 +72,7 @@ async def update_relationships_to(ids: list[str], db: InfrahubDatabase, to: Time
72
72
  query = """
73
73
  MATCH ()-[r]->()
74
74
  WHERE %(id_func)s(r) IN $ids
75
+ AND r.to IS NULL
75
76
  SET r.to = $to
76
77
  RETURN %(id_func)s(r)
77
78
  """ % {"id_func": db.get_id_function_name()}
@@ -30,7 +30,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
30
30
  def get_context(self) -> dict[str, str]:
31
31
  return {"kind": self.query_request.kind}
32
32
 
33
- async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None:
33
+ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # pylint: disable=too-many-branches
34
34
  branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False)
35
35
  self.params.update(branch_params)
36
36
  from_times = db.render_list_comprehension(items="relationships(potential_path)", item_name="from")
@@ -56,6 +56,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
56
56
  relationship_names = set()
57
57
  relationship_attr_paths = []
58
58
  relationship_only_attr_paths = []
59
+ relationship_only_attr_values = []
59
60
  relationship_attr_paths_with_value = []
60
61
  for rel_path in self.query_request.relationship_attribute_paths:
61
62
  relationship_names.add(rel_path.identifier)
@@ -67,6 +68,8 @@ class NodeUniqueAttributeConstraintQuery(Query):
67
68
  relationship_attr_paths.append((rel_path.identifier, rel_path.attribute_name))
68
69
  else:
69
70
  relationship_only_attr_paths.append(rel_path.identifier)
71
+ if rel_path.value:
72
+ relationship_only_attr_values.append(rel_path.value)
70
73
 
71
74
  if (
72
75
  not attr_paths
@@ -89,34 +92,37 @@ class NodeUniqueAttributeConstraintQuery(Query):
89
92
  "relationship_attr_paths": relationship_attr_paths,
90
93
  "relationship_attr_paths_with_value": relationship_attr_paths_with_value,
91
94
  "relationship_only_attr_paths": relationship_only_attr_paths,
95
+ "relationship_only_attr_values": relationship_only_attr_values,
92
96
  "min_count_required": self.min_count_required,
93
97
  }
94
98
  )
95
99
 
96
100
  attr_paths_subquery = """
97
- WITH start_node
98
- MATCH attr_path = (start_node)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue)
101
+ MATCH attr_path = (start_node:%(node_kind)s)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue)
99
102
  WHERE attr.name in $attribute_names
100
103
  AND ([attr.name, type(r)] in $attr_paths
101
104
  OR [attr.name, type(r), attr_value.value] in $attr_paths_with_value)
102
- RETURN attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value
103
- """
105
+ RETURN start_node, attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value
106
+ """ % {"node_kind": self.query_request.kind}
104
107
 
105
108
  relationship_attr_paths_with_value_subquery = """
106
- WITH start_node
107
- MATCH rel_path = (start_node)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue)
109
+ MATCH rel_path = (start_node:%(node_kind)s)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue)
108
110
  WHERE relationship_node.name in $relationship_names
109
111
  AND ([relationship_node.name, rel_attr.name] in $relationship_attr_paths
110
112
  OR [relationship_node.name, rel_attr.name, rel_attr_value.value] in $relationship_attr_paths_with_value)
111
- RETURN rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value
112
- """
113
+ RETURN start_node, rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value
114
+ """ % {"node_kind": self.query_request.kind}
113
115
 
114
116
  relationship_only_attr_paths_subquery = """
115
- WITH start_node
116
- MATCH rel_path = (start_node)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)
117
- WHERE relationship_node.name in $relationship_only_attr_paths
118
- RETURN rel_path as potential_path, relationship_node.name as rel_identifier, "id" as potential_attr, related_n.uuid as potential_attr_value
119
- """
117
+ MATCH rel_path = (start_node:%(node_kind)s)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)
118
+ WHERE %(rel_node_filter)s relationship_node.name in $relationship_only_attr_paths
119
+ RETURN start_node, rel_path as potential_path, relationship_node.name as rel_identifier, "id" as potential_attr, related_n.uuid as potential_attr_value
120
+ """ % {
121
+ "node_kind": self.query_request.kind,
122
+ "rel_node_filter": "related_n.uuid IN $relationship_only_attr_values AND "
123
+ if relationship_only_attr_values
124
+ else "",
125
+ }
120
126
 
121
127
  select_subqueries = []
122
128
  if attr_paths or attr_paths_with_value:
@@ -130,8 +136,6 @@ class NodeUniqueAttributeConstraintQuery(Query):
130
136
 
131
137
  # ruff: noqa: E501
132
138
  query = """
133
- // group by node
134
- MATCH (start_node:%(node_kind)s)
135
139
  // get attributes for node and its relationships
136
140
  CALL {
137
141
  %(select_subqueries_str)s
@@ -201,7 +205,6 @@ class NodeUniqueAttributeConstraintQuery(Query):
201
205
  attr_value,
202
206
  relationship_identifier
203
207
  """ % {
204
- "node_kind": self.query_request.kind,
205
208
  "select_subqueries_str": select_subqueries_str,
206
209
  "branch_filter": branch_filter,
207
210
  "from_times": from_times,
@@ -173,6 +173,19 @@ class InfrahubDatabase:
173
173
  elif self.db_type == DatabaseType.MEMGRAPH:
174
174
  self.manager = DatabaseManagerMemgraph(db=self)
175
175
 
176
+ def __del__(self) -> None:
177
+ if not self._session or not self._is_session_local or self._session.closed():
178
+ return
179
+
180
+ try:
181
+ loop = asyncio.get_running_loop()
182
+ except RuntimeError:
183
+ loop = None
184
+ if loop and loop.is_running():
185
+ loop.create_task(self._session.close())
186
+ else:
187
+ asyncio.run(self._session.close())
188
+
176
189
  @property
177
190
  def is_session(self) -> bool:
178
191
  if self._mode == InfrahubDatabaseMode.SESSION:
@@ -2,7 +2,6 @@ from infrahub.core.constraint.node.runner import NodeConstraintRunner
2
2
  from infrahub.dependencies.interface import DependencyBuilder, DependencyBuilderContext
3
3
 
4
4
  from ..node.grouped_uniqueness import NodeGroupedUniquenessConstraintDependency
5
- from ..node.uniqueness import NodeAttributeUniquenessConstraintDependency
6
5
  from ..relationship_manager.count import RelationshipCountConstraintDependency
7
6
  from ..relationship_manager.peer_kind import RelationshipPeerKindConstraintDependency
8
7
  from ..relationship_manager.profiles_kind import RelationshipProfilesKindConstraintDependency
@@ -15,7 +14,6 @@ class NodeConstraintRunnerDependency(DependencyBuilder[NodeConstraintRunner]):
15
14
  db=context.db,
16
15
  branch=context.branch,
17
16
  node_constraints=[
18
- NodeAttributeUniquenessConstraintDependency.build(context=context),
19
17
  NodeGroupedUniquenessConstraintDependency.build(context=context),
20
18
  ],
21
19
  relationship_manager_constraints=[
@@ -8,7 +8,6 @@ from .conflicts_enricher import DiffConflictsEnricherDependency
8
8
  from .data_check_synchronizer import DiffDataCheckSynchronizerDependency
9
9
  from .enricher.aggregated import DiffAggregatedEnricherDependency
10
10
  from .enricher.labels import DiffLabelsEnricherDependency
11
- from .enricher.summary_counts import DiffSummaryCountsEnricherDependency
12
11
  from .repository import DiffRepositoryDependency
13
12
 
14
13
 
@@ -22,7 +21,6 @@ class DiffCoordinatorDependency(DependencyBuilder[DiffCoordinator]):
22
21
  diff_enricher=DiffAggregatedEnricherDependency.build(context=context),
23
22
  conflicts_enricher=DiffConflictsEnricherDependency.build(context=context),
24
23
  labels_enricher=DiffLabelsEnricherDependency.build(context=context),
25
- summary_counts_enricher=DiffSummaryCountsEnricherDependency.build(context=context),
26
24
  data_check_synchronizer=DiffDataCheckSynchronizerDependency.build(context=context),
27
25
  conflict_transferer=DiffConflictTransfererDependency.build(context=context),
28
26
  )
@@ -1238,10 +1238,12 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase): # pylint: disable=t
1238
1238
  client=self.sdk,
1239
1239
  )
1240
1240
 
1241
- if definition.content_type.value == ContentType.APPLICATION_JSON.value:
1241
+ if definition.content_type.value == ContentType.APPLICATION_JSON.value and isinstance(artifact_content, dict):
1242
1242
  artifact_content_str = ujson.dumps(artifact_content, indent=2)
1243
- elif definition.content_type.value == ContentType.TEXT_PLAIN.value:
1244
- artifact_content_str = artifact_content
1243
+ elif definition.content_type.value == ContentType.APPLICATION_YAML.value and isinstance(artifact_content, dict):
1244
+ artifact_content_str = yaml.dump(artifact_content, indent=2)
1245
+ else:
1246
+ artifact_content_str = str(artifact_content)
1245
1247
 
1246
1248
  checksum = hashlib.md5(bytes(artifact_content_str, encoding="utf-8"), usedforsecurity=False).hexdigest()
1247
1249
 
@@ -1288,10 +1290,12 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase): # pylint: disable=t
1288
1290
  client=self.sdk,
1289
1291
  )
1290
1292
 
1291
- if message.content_type == ContentType.APPLICATION_JSON.value:
1293
+ if message.content_type == ContentType.APPLICATION_JSON.value and isinstance(artifact_content, dict):
1292
1294
  artifact_content_str = ujson.dumps(artifact_content, indent=2)
1293
- elif message.content_type == ContentType.TEXT_PLAIN.value:
1294
- artifact_content_str = artifact_content
1295
+ elif message.content_type == ContentType.APPLICATION_YAML.value and isinstance(artifact_content, dict):
1296
+ artifact_content_str = yaml.dump(artifact_content, indent=2)
1297
+ else:
1298
+ artifact_content_str = str(artifact_content)
1295
1299
 
1296
1300
  checksum = hashlib.md5(bytes(artifact_content_str, encoding="utf-8"), usedforsecurity=False).hexdigest()
1297
1301
 
@@ -87,7 +87,9 @@ class UpdateComputedAttribute(Mutation):
87
87
  log_data = get_log_data()
88
88
  request_id = log_data.get("request_id", "")
89
89
 
90
- graphql_payload = await target_node.to_graphql(db=context.db, filter_sensitive=True)
90
+ graphql_payload = await target_node.to_graphql(
91
+ db=context.db, filter_sensitive=True, include_properties=False
92
+ )
91
93
 
92
94
  event = NodeMutatedEvent(
93
95
  branch=context.branch.name,
@@ -5,9 +5,13 @@ from graphql import GraphQLResolveInfo
5
5
 
6
6
  from infrahub.core import registry
7
7
  from infrahub.core.diff.coordinator import DiffCoordinator
8
+ from infrahub.core.diff.model.path import NameTrackingId
8
9
  from infrahub.core.diff.models import RequestDiffUpdate
10
+ from infrahub.core.diff.repository.repository import DiffRepository
11
+ from infrahub.core.timestamp import Timestamp
9
12
  from infrahub.database import retry_db_transaction
10
13
  from infrahub.dependencies.registry import get_component_registry
14
+ from infrahub.exceptions import ValidationError
11
15
  from infrahub.workflows.catalogue import DIFF_UPDATE
12
16
 
13
17
  if TYPE_CHECKING:
@@ -40,11 +44,31 @@ class DiffUpdateMutation(Mutation):
40
44
 
41
45
  from_timestamp_str = DateTime.serialize(data.from_time) if data.from_time else None
42
46
  to_timestamp_str = DateTime.serialize(data.to_time) if data.to_time else None
43
- if data.wait_for_completion is True:
44
- component_registry = get_component_registry()
45
- base_branch = await registry.get_branch(db=context.db, branch=registry.default_branch)
46
- diff_branch = await registry.get_branch(db=context.db, branch=data.branch)
47
+ if (data.from_time or data.to_time) and not data.name:
48
+ raise ValidationError("diff with specified time range requires a name")
49
+
50
+ component_registry = get_component_registry()
51
+ base_branch = await registry.get_branch(db=context.db, branch=registry.default_branch)
52
+ diff_branch = await registry.get_branch(db=context.db, branch=data.branch)
53
+ diff_repository = await component_registry.get_component(DiffRepository, db=context.db, branch=diff_branch)
47
54
 
55
+ tracking_id = NameTrackingId(name=data.name)
56
+ existing_diffs_metatdatas = await diff_repository.get_roots_metadata(
57
+ diff_branch_names=[diff_branch.name], base_branch_names=[base_branch.name], tracking_id=tracking_id
58
+ )
59
+ if existing_diffs_metatdatas:
60
+ metadata = existing_diffs_metatdatas[0]
61
+ from_time = Timestamp(from_timestamp_str) if from_timestamp_str else None
62
+ to_time = Timestamp(to_timestamp_str) if to_timestamp_str else None
63
+ branched_from_timestamp = Timestamp(diff_branch.get_branched_from())
64
+ if from_time and from_time > metadata.from_time:
65
+ raise ValidationError(f"from_time must be null or less than or equal to {metadata.from_time}")
66
+ if from_time and from_time < branched_from_timestamp:
67
+ raise ValidationError(f"from_time must be null or greater than or equal to {branched_from_timestamp}")
68
+ if to_time and to_time < metadata.to_time:
69
+ raise ValidationError(f"to_time must be null or greater than or equal to {metadata.to_time}")
70
+
71
+ if data.wait_for_completion is True:
48
72
  diff_coordinator = await component_registry.get_component(
49
73
  DiffCoordinator, db=context.db, branch=diff_branch
50
74
  )
@@ -97,7 +97,7 @@ class InfrahubMutationMixin:
97
97
  log_data = get_log_data()
98
98
  request_id = log_data.get("request_id", "")
99
99
 
100
- graphql_payload = await obj.to_graphql(db=context.db, filter_sensitive=True)
100
+ graphql_payload = await obj.to_graphql(db=context.db, filter_sensitive=True, include_properties=False)
101
101
  event = NodeMutatedEvent(
102
102
  branch=context.branch.name,
103
103
  kind=obj._schema.kind,
@@ -175,20 +175,25 @@ class InfrahubMutationMixin:
175
175
  branch: Branch,
176
176
  ) -> Node:
177
177
  component_registry = get_component_registry()
178
- node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch)
178
+ node_constraint_runner = await component_registry.get_component(
179
+ NodeConstraintRunner, db=db.start_session(), branch=branch
180
+ )
179
181
  node_class = Node
180
182
  if cls._meta.schema.kind in registry.node:
181
183
  node_class = registry.node[cls._meta.schema.kind]
182
184
 
185
+ fields_to_validate = list(data)
183
186
  try:
184
- obj = await node_class.init(db=db, schema=cls._meta.schema, branch=branch)
185
- await obj.new(db=db, **data)
186
- fields_to_validate = list(data)
187
- await node_constraint_runner.check(node=obj, field_filters=fields_to_validate)
188
187
  if db.is_transaction:
188
+ obj = await node_class.init(db=db, schema=cls._meta.schema, branch=branch)
189
+ await obj.new(db=db, **data)
190
+ await node_constraint_runner.check(node=obj, field_filters=fields_to_validate)
189
191
  await obj.save(db=db)
190
192
  else:
191
193
  async with db.start_transaction() as dbt:
194
+ obj = await node_class.init(db=dbt, schema=cls._meta.schema, branch=branch)
195
+ await obj.new(db=dbt, **data)
196
+ await node_constraint_runner.check(node=obj, field_filters=fields_to_validate)
192
197
  await obj.save(db=dbt)
193
198
 
194
199
  except ValidationError as exc:
@@ -5,7 +5,8 @@ from typing import TYPE_CHECKING
5
5
  from graphene import Boolean, InputField, InputObjectType, List, Mutation, String
6
6
  from infrahub_sdk.utils import compare_lists
7
7
 
8
- from infrahub.core.constants import InfrahubKind, RelationshipCardinality
8
+ from infrahub.core.account import GlobalPermission, ObjectPermission
9
+ from infrahub.core.constants import InfrahubKind, PermissionAction, PermissionDecision, RelationshipCardinality
9
10
  from infrahub.core.manager import NodeManager
10
11
  from infrahub.core.query.relationship import (
11
12
  RelationshipGetPeerQuery,
@@ -14,6 +15,7 @@ from infrahub.core.query.relationship import (
14
15
  from infrahub.core.relationship import Relationship
15
16
  from infrahub.database import retry_db_transaction
16
17
  from infrahub.exceptions import NodeNotFoundError, ValidationError
18
+ from infrahub.permissions import get_global_permission_for_kind
17
19
 
18
20
  from ..types import RelatedNodeInput
19
21
 
@@ -76,6 +78,32 @@ class RelationshipMixin:
76
78
  db=context.db, ids=node_ids, fields={"display_label": None}, branch=context.branch
77
79
  )
78
80
 
81
+ if context.account_session:
82
+ impacted_schemas = {node.get_schema() for node in [source] + list(nodes.values())}
83
+ required_permissions: list[GlobalPermission | ObjectPermission] = []
84
+ decision = (
85
+ PermissionDecision.ALLOW_DEFAULT.value
86
+ if context.branch.is_default
87
+ else PermissionDecision.ALLOW_OTHER.value
88
+ )
89
+
90
+ for impacted_schema in impacted_schemas:
91
+ global_action = get_global_permission_for_kind(schema=impacted_schema)
92
+
93
+ if global_action:
94
+ required_permissions.append(GlobalPermission(action=global_action, decision=decision))
95
+ else:
96
+ required_permissions.append(
97
+ ObjectPermission(
98
+ namespace=impacted_schema.namespace,
99
+ name=impacted_schema.name,
100
+ action=PermissionAction.UPDATE.value,
101
+ decision=decision,
102
+ )
103
+ )
104
+
105
+ context.active_permissions.raise_for_permissions(permissions=required_permissions)
106
+
79
107
  _, _, in_list2 = compare_lists(list1=list(nodes.keys()), list2=node_ids)
80
108
  if in_list2:
81
109
  for node_id in in_list2:
@@ -31,14 +31,17 @@ async def merge_branch_mutation(branch: str) -> None:
31
31
  diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=obj)
32
32
  diff_repository = await component_registry.get_component(DiffRepository, db=db, branch=obj)
33
33
  diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=obj)
34
- enriched_diff = await diff_coordinator.update_branch_diff_and_return(base_branch=base_branch, diff_branch=obj)
35
- if enriched_diff.get_all_conflicts():
34
+ enriched_diff_metadata = await diff_coordinator.update_branch_diff(base_branch=base_branch, diff_branch=obj)
35
+ async for _ in diff_repository.get_all_conflicts_for_diff(
36
+ diff_branch_name=enriched_diff_metadata.diff_branch_name, diff_id=enriched_diff_metadata.uuid
37
+ ):
38
+ # if there are any conflicts, raise the error
36
39
  raise ValidationError(
37
40
  f"Branch {obj.name} contains conflicts with the default branch."
38
41
  " Please create a Proposed Change to resolve the conflicts or manually update them before merging."
39
42
  )
40
43
  node_diff_field_summaries = await diff_repository.get_node_field_summaries(
41
- diff_branch_name=enriched_diff.diff_branch_name, diff_id=enriched_diff.uuid
44
+ diff_branch_name=enriched_diff_metadata.diff_branch_name, diff_id=enriched_diff_metadata.uuid
42
45
  )
43
46
 
44
47
  merger = BranchMerger(
@@ -21,8 +21,10 @@ from infrahub.pools.number import NumberUtilizationGetter
21
21
  if TYPE_CHECKING:
22
22
  from graphql import GraphQLResolveInfo
23
23
 
24
+ from infrahub.core.branch import Branch
24
25
  from infrahub.core.node import Node
25
26
  from infrahub.core.protocols import CoreNode
27
+ from infrahub.core.timestamp import Timestamp
26
28
  from infrahub.database import InfrahubDatabase
27
29
  from infrahub.graphql.initialization import GraphqlContext
28
30
 
@@ -184,7 +186,7 @@ class PoolUtilization(ObjectType):
184
186
  pool: CoreNode | None = await NodeManager.get_one(id=pool_id, db=db, branch=context.branch)
185
187
  pool = _validate_pool_type(pool_id=pool_id, pool=pool)
186
188
  if pool.get_kind() == "CoreNumberPool":
187
- return await resolve_number_pool_utilization(db=db, context=context, pool=pool)
189
+ return await resolve_number_pool_utilization(db=db, at=context.at, pool=pool, branch=context.branch)
188
190
 
189
191
  resources_map: dict[str, Node] = {}
190
192
 
@@ -290,8 +292,10 @@ async def resolve_number_pool_allocation(
290
292
  return response
291
293
 
292
294
 
293
- async def resolve_number_pool_utilization(db: InfrahubDatabase, context: GraphqlContext, pool: CoreNode) -> dict:
294
- number_pool = NumberUtilizationGetter(db=db, pool=pool, at=context.at, branch=context.branch)
295
+ async def resolve_number_pool_utilization(
296
+ db: InfrahubDatabase, pool: CoreNode, at: Timestamp | str | None, branch: Branch
297
+ ) -> dict:
298
+ number_pool = NumberUtilizationGetter(db=db, pool=pool, at=at, branch=branch)
295
299
  await number_pool.load_data()
296
300
 
297
301
  return {
@@ -2,12 +2,13 @@ from infrahub.permissions.backend import PermissionBackend
2
2
  from infrahub.permissions.local_backend import LocalPermissionBackend
3
3
  from infrahub.permissions.manager import PermissionManager
4
4
  from infrahub.permissions.report import report_schema_permissions
5
- from infrahub.permissions.types import AssignedPermissions
5
+ from infrahub.permissions.types import AssignedPermissions, get_global_permission_for_kind
6
6
 
7
7
  __all__ = [
8
8
  "AssignedPermissions",
9
9
  "LocalPermissionBackend",
10
10
  "PermissionBackend",
11
11
  "PermissionManager",
12
+ "get_global_permission_for_kind",
12
13
  "report_schema_permissions",
13
14
  ]
@@ -2,8 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING, TypedDict
4
4
 
5
+ from infrahub.core.constants import GlobalPermissions, InfrahubKind
6
+ from infrahub.core.schema import NodeSchema
7
+
5
8
  if TYPE_CHECKING:
6
9
  from infrahub.core.account import GlobalPermission, ObjectPermission
10
+ from infrahub.core.schema import MainSchemaTypes
7
11
  from infrahub.permissions.constants import BranchRelativePermissionDecision
8
12
 
9
13
 
@@ -18,3 +22,25 @@ class KindPermissions(TypedDict):
18
22
  delete: BranchRelativePermissionDecision
19
23
  update: BranchRelativePermissionDecision
20
24
  view: BranchRelativePermissionDecision
25
+
26
+
27
+ def get_global_permission_for_kind(schema: MainSchemaTypes) -> GlobalPermissions | None:
28
+ kind_permission_map = {
29
+ InfrahubKind.GENERICACCOUNT: GlobalPermissions.MANAGE_ACCOUNTS,
30
+ InfrahubKind.ACCOUNTGROUP: GlobalPermissions.MANAGE_ACCOUNTS,
31
+ InfrahubKind.ACCOUNTROLE: GlobalPermissions.MANAGE_ACCOUNTS,
32
+ InfrahubKind.BASEPERMISSION: GlobalPermissions.MANAGE_PERMISSIONS,
33
+ InfrahubKind.GENERICREPOSITORY: GlobalPermissions.MANAGE_REPOSITORIES,
34
+ }
35
+
36
+ if schema.kind in kind_permission_map:
37
+ return kind_permission_map[schema.kind]
38
+
39
+ if isinstance(schema, NodeSchema):
40
+ for base in schema.inherit_from:
41
+ try:
42
+ return kind_permission_map[base]
43
+ except KeyError:
44
+ continue
45
+
46
+ return None
@@ -31,6 +31,7 @@ from infrahub.core.validators.determiner import ConstraintValidatorDeterminer
31
31
  from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
32
32
  from infrahub.core.validators.tasks import schema_validate_migrations
33
33
  from infrahub.dependencies.registry import get_component_registry
34
+ from infrahub.exceptions import MergeFailedError
34
35
  from infrahub.generators.models import ProposedChangeGeneratorDefinition
35
36
  from infrahub.git.repository import get_initialized_repo
36
37
  from infrahub.log import get_logger
@@ -131,7 +132,11 @@ async def merge_proposed_change(proposed_change_id: str, proposed_change_name: s
131
132
  )
132
133
 
133
134
  log.info("Proposed change is eligible to be merged")
134
- await merge_branch(branch=source_branch.name)
135
+ try:
136
+ await merge_branch(branch=source_branch.name)
137
+ except MergeFailedError as exc:
138
+ await _proposed_change_transition_state(proposed_change=proposed_change, state=ProposedChangeState.OPEN)
139
+ return Failed(message=f"Merge failure when trying to merge {exc.message}")
135
140
 
136
141
  log.info(f"Branch {source_branch.name} has been merged successfully")
137
142
 
infrahub/storage.py CHANGED
@@ -1,3 +1,4 @@
1
+ import io
1
2
  import tempfile
2
3
  from typing import Any, BinaryIO
3
4
 
@@ -17,11 +18,11 @@ class InfrahubS3ObjectStorage(fastapi_storages.S3Storage):
17
18
  super().__init__()
18
19
 
19
20
  def open(self, name: str) -> BinaryIO:
20
- with tempfile.NamedTemporaryFile() as f:
21
- self._bucket.download_fileobj(name, f)
22
- f.flush()
23
- f.seek(0)
24
- return f # type: ignore
21
+ f = io.BytesIO()
22
+ self._bucket.download_fileobj(name, f)
23
+ f.flush()
24
+ f.seek(0)
25
+ return f # type: ignore
25
26
 
26
27
 
27
28
  fastapi_storages.InfrahubS3ObjectStorage = InfrahubS3ObjectStorage