infrahub-server 1.2.9rc0__py3-none-any.whl → 1.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. infrahub/computed_attribute/models.py +13 -0
  2. infrahub/computed_attribute/tasks.py +48 -26
  3. infrahub/config.py +9 -0
  4. infrahub/core/attribute.py +43 -2
  5. infrahub/core/branch/models.py +8 -9
  6. infrahub/core/branch/tasks.py +0 -2
  7. infrahub/core/constants/infrahubkind.py +1 -0
  8. infrahub/core/constraint/node/runner.py +1 -1
  9. infrahub/core/diff/calculator.py +65 -11
  10. infrahub/core/diff/combiner.py +38 -31
  11. infrahub/core/diff/coordinator.py +44 -28
  12. infrahub/core/diff/data_check_synchronizer.py +3 -2
  13. infrahub/core/diff/enricher/hierarchy.py +36 -27
  14. infrahub/core/diff/ipam_diff_parser.py +5 -4
  15. infrahub/core/diff/merger/merger.py +46 -16
  16. infrahub/core/diff/merger/serializer.py +1 -0
  17. infrahub/core/diff/model/field_specifiers_map.py +64 -0
  18. infrahub/core/diff/model/path.py +58 -58
  19. infrahub/core/diff/parent_node_adder.py +14 -16
  20. infrahub/core/diff/query/drop_nodes.py +42 -0
  21. infrahub/core/diff/query/field_specifiers.py +8 -7
  22. infrahub/core/diff/query/filters.py +15 -1
  23. infrahub/core/diff/query/merge.py +264 -28
  24. infrahub/core/diff/query/save.py +6 -2
  25. infrahub/core/diff/query_parser.py +55 -65
  26. infrahub/core/diff/repository/deserializer.py +38 -24
  27. infrahub/core/diff/repository/repository.py +31 -12
  28. infrahub/core/diff/tasks.py +3 -3
  29. infrahub/core/graph/__init__.py +1 -1
  30. infrahub/core/migrations/graph/__init__.py +2 -0
  31. infrahub/core/migrations/graph/m027_delete_isolated_nodes.py +50 -0
  32. infrahub/core/migrations/graph/m028_delete_diffs.py +38 -0
  33. infrahub/core/node/resource_manager/ip_address_pool.py +6 -2
  34. infrahub/core/node/resource_manager/ip_prefix_pool.py +6 -2
  35. infrahub/core/protocols.py +4 -0
  36. infrahub/core/query/branch.py +27 -17
  37. infrahub/core/query/diff.py +169 -51
  38. infrahub/core/query/node.py +39 -5
  39. infrahub/core/query/relationship.py +105 -30
  40. infrahub/core/query/subquery.py +2 -2
  41. infrahub/core/relationship/model.py +1 -1
  42. infrahub/core/schema/definitions/core/__init__.py +8 -1
  43. infrahub/core/schema/definitions/core/resource_pool.py +20 -0
  44. infrahub/core/schema/schema_branch.py +3 -0
  45. infrahub/core/validators/tasks.py +1 -1
  46. infrahub/core/validators/uniqueness/query.py +7 -0
  47. infrahub/database/__init__.py +5 -4
  48. infrahub/graphql/app.py +1 -1
  49. infrahub/graphql/loaders/node.py +1 -1
  50. infrahub/graphql/loaders/peers.py +1 -1
  51. infrahub/graphql/mutations/proposed_change.py +1 -1
  52. infrahub/graphql/queries/diff/tree.py +2 -1
  53. infrahub/graphql/queries/relationship.py +1 -1
  54. infrahub/graphql/queries/task.py +10 -0
  55. infrahub/graphql/resolvers/many_relationship.py +4 -4
  56. infrahub/graphql/resolvers/resolver.py +4 -4
  57. infrahub/graphql/resolvers/single_relationship.py +2 -2
  58. infrahub/graphql/subscription/graphql_query.py +2 -2
  59. infrahub/graphql/types/branch.py +1 -1
  60. infrahub/graphql/types/task_log.py +3 -2
  61. infrahub/message_bus/operations/refresh/registry.py +1 -1
  62. infrahub/task_manager/task.py +44 -4
  63. infrahub/telemetry/database.py +1 -1
  64. infrahub/telemetry/tasks.py +1 -1
  65. infrahub/trigger/models.py +11 -1
  66. infrahub/trigger/setup.py +51 -15
  67. infrahub/trigger/tasks.py +1 -4
  68. infrahub/types.py +1 -1
  69. infrahub/webhook/models.py +2 -1
  70. infrahub/workflows/catalogue.py +9 -0
  71. infrahub/workflows/initialization.py +1 -3
  72. infrahub_sdk/timestamp.py +2 -2
  73. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/METADATA +3 -3
  74. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/RECORD +79 -75
  75. infrahub_testcontainers/docker-compose.test.yml +3 -3
  76. infrahub_testcontainers/performance_test.py +6 -3
  77. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/LICENSE.txt +0 -0
  78. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/WHEEL +0 -0
  79. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.11.dist-info}/entry_points.txt +0 -0
@@ -73,9 +73,18 @@ class RelationshipPeerData:
73
73
  source_id: UUID
74
74
  """UUID of the Source Node."""
75
75
 
76
+ source_db_id: str
77
+ """Internal DB ID of the Source Node."""
78
+
79
+ source_kind: str
80
+ """Kind of the Source Node."""
81
+
76
82
  peer_id: UUID
77
83
  """UUID of the Peer Node."""
78
84
 
85
+ peer_db_id: str
86
+ """Internal DB ID of the Peer Node."""
87
+
79
88
  peer_kind: str
80
89
  """Kind of the Peer Node."""
81
90
 
@@ -85,9 +94,6 @@ class RelationshipPeerData:
85
94
  rel_node_id: UUID | None = None
86
95
  """UUID of the Relationship Node."""
87
96
 
88
- peer_db_id: str | None = None
89
- """Internal DB ID of the Peer Node."""
90
-
91
97
  rel_node_db_id: str | None = None
92
98
  """Internal DB ID of the Relationship Node."""
93
99
 
@@ -196,6 +202,63 @@ class RelationshipQuery(Query):
196
202
  rel_prop_dict["hierarchy"] = self.schema.hierarchical
197
203
  return rel_prop_dict
198
204
 
205
+ def add_source_match_to_query(self, source_branch: Branch) -> None:
206
+ self.params["source_id"] = self.source_id or self.source.get_id()
207
+ if source_branch.is_global or source_branch.is_default:
208
+ source_query_match = """
209
+ MATCH (s:Node { uuid: $source_id })
210
+ OPTIONAL MATCH (s)-[delete_edge:IS_PART_OF {status: "deleted", branch: $source_branch}]->(:Root)
211
+ WHERE delete_edge.from <= $at
212
+ WITH *, s WHERE delete_edge IS NULL
213
+ """
214
+ self.params["source_branch"] = source_branch.name
215
+ source_filter, source_filter_params = source_branch.get_query_filter_path(
216
+ at=self.at, variable_name="r", params_prefix="src_"
217
+ )
218
+ source_query_match = """
219
+ MATCH (s:Node { uuid: $source_id })
220
+ CALL {
221
+ WITH s
222
+ MATCH (s)-[r:IS_PART_OF]->(:Root)
223
+ WHERE %(source_filter)s
224
+ RETURN r.status = "active" AS s_is_active
225
+ ORDER BY r.from DESC
226
+ LIMIT 1
227
+ }
228
+ WITH *, s WHERE s_is_active = TRUE
229
+ """ % {"source_filter": source_filter}
230
+ self.params.update(source_filter_params)
231
+ self.add_to_query(source_query_match)
232
+
233
+ def add_dest_match_to_query(self, destination_branch: Branch, destination_id: str) -> None:
234
+ self.params["destination_id"] = destination_id
235
+ if destination_branch.is_global or destination_branch.is_default:
236
+ destination_query_match = """
237
+ MATCH (d:Node { uuid: $destination_id })
238
+ OPTIONAL MATCH (d)-[delete_edge:IS_PART_OF {status: "deleted", branch: $destination_branch}]->(:Root)
239
+ WHERE delete_edge.from <= $at
240
+ WITH *, d WHERE delete_edge IS NULL
241
+ """
242
+ self.params["destination_branch"] = destination_branch.name
243
+ else:
244
+ destination_filter, destination_filter_params = destination_branch.get_query_filter_path(
245
+ at=self.at, variable_name="r", params_prefix="dst_"
246
+ )
247
+ destination_query_match = """
248
+ MATCH (d:Node { uuid: $destination_id })
249
+ CALL {
250
+ WITH d
251
+ MATCH (d)-[r:IS_PART_OF]->(:Root)
252
+ WHERE %(destination_filter)s
253
+ RETURN r.status = "active" AS d_is_active
254
+ ORDER BY r.from DESC
255
+ LIMIT 1
256
+ }
257
+ WITH *, d WHERE d_is_active = TRUE
258
+ """ % {"destination_filter": destination_filter}
259
+ self.params.update(destination_filter_params)
260
+ self.add_to_query(destination_query_match)
261
+
199
262
 
200
263
  class RelationshipCreateQuery(RelationshipQuery):
201
264
  name = "relationship_create"
@@ -214,8 +277,6 @@ class RelationshipCreateQuery(RelationshipQuery):
214
277
  super().__init__(destination=destination, destination_id=destination_id, **kwargs)
215
278
 
216
279
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
217
- self.params["source_id"] = self.source_id
218
- self.params["destination_id"] = self.destination_id
219
280
  self.params["name"] = self.schema.identifier
220
281
  self.params["branch_support"] = self.schema.branch.value
221
282
 
@@ -228,12 +289,11 @@ class RelationshipCreateQuery(RelationshipQuery):
228
289
  self.params["is_protected"] = self.rel.is_protected
229
290
  self.params["is_visible"] = self.rel.is_visible
230
291
 
231
- query_match = """
232
- MATCH (s:Node { uuid: $source_id })
233
- MATCH (d:Node { uuid: $destination_id })
234
- """
235
- self.add_to_query(query_match)
236
-
292
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
293
+ self.add_dest_match_to_query(
294
+ destination_branch=self.destination.get_branch_based_on_support_type(),
295
+ destination_id=self.destination_id or self.destination.get_id(),
296
+ )
237
297
  self.query_add_all_node_property_match()
238
298
 
239
299
  self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.ACTIVE)
@@ -378,7 +438,6 @@ class RelationshipDataDeleteQuery(RelationshipQuery):
378
438
 
379
439
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
380
440
  self.params["source_id"] = self.source_id
381
- self.params["destination_id"] = self.data.peer_id
382
441
  self.params["rel_node_id"] = self.data.rel_node_id
383
442
  self.params["name"] = self.schema.identifier
384
443
  self.params["branch"] = self.branch.name
@@ -388,9 +447,10 @@ class RelationshipDataDeleteQuery(RelationshipQuery):
388
447
  # -----------------------------------------------------------------------
389
448
  # Match all nodes, including properties
390
449
  # -----------------------------------------------------------------------
450
+
451
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
452
+ self.add_dest_match_to_query(destination_branch=self.branch, destination_id=self.data.peer_id)
391
453
  query = """
392
- MATCH (s:Node { uuid: $source_id })
393
- MATCH (d:Node { uuid: $destination_id })
394
454
  MATCH (rl:Relationship { uuid: $rel_node_id })
395
455
  """
396
456
  self.add_to_query(query)
@@ -442,8 +502,6 @@ class RelationshipDeleteQuery(RelationshipQuery):
442
502
 
443
503
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
444
504
  rel_filter, rel_params = self.branch.get_query_filter_path(at=self.at, variable_name="edge")
445
- self.params["source_id"] = self.source_id
446
- self.params["destination_id"] = self.destination_id
447
505
  self.params["rel_id"] = self.rel.id
448
506
  self.params["branch"] = self.branch.name
449
507
  self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.DELETED)
@@ -454,9 +512,14 @@ class RelationshipDeleteQuery(RelationshipQuery):
454
512
  r1 = f"{arrows.left.start}[r1:{self.rel_type} $rel_prop ]{arrows.left.end}"
455
513
  r2 = f"{arrows.right.start}[r2:{self.rel_type} $rel_prop ]{arrows.right.end}"
456
514
 
515
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
516
+ self.add_dest_match_to_query(
517
+ destination_branch=self.destination.get_branch_based_on_support_type(),
518
+ destination_id=self.destination_id or self.destination.get_id(),
519
+ )
457
520
  query = """
458
- MATCH (s:Node { uuid: $source_id })-[:IS_RELATED]-(rl:Relationship {uuid: $rel_id})-[:IS_RELATED]-(d:Node { uuid: $destination_id })
459
- WITH s, rl, d
521
+ MATCH (s)-[:IS_RELATED]-(rl:Relationship {uuid: $rel_id})-[:IS_RELATED]-(d)
522
+ WITH DISTINCT s, rl, d
460
523
  LIMIT 1
461
524
  CREATE (s)%(r1)s(rl)
462
525
  CREATE (rl)%(r2)s(d)
@@ -752,10 +815,15 @@ class RelationshipGetPeerQuery(Query):
752
815
  def get_peers(self) -> Generator[RelationshipPeerData, None, None]:
753
816
  for result in self.get_results_group_by(("peer", "uuid"), ("source_node", "uuid")):
754
817
  rels = result.get("rels")
818
+ source_node = result.get_node("source_node")
819
+ peer_node = result.get_node("peer")
755
820
  data = RelationshipPeerData(
756
- source_id=result.get_node("source_node").get("uuid"),
757
- peer_id=result.get_node("peer").get("uuid"),
758
- peer_kind=result.get_node("peer").get("kind"),
821
+ source_id=source_node.get("uuid"),
822
+ source_db_id=source_node.element_id,
823
+ source_kind=source_node.get("kind"),
824
+ peer_id=peer_node.get("uuid"),
825
+ peer_db_id=peer_node.element_id,
826
+ peer_kind=peer_node.get("kind"),
759
827
  rel_node_db_id=result.get("rl").element_id,
760
828
  rel_node_id=result.get("rl").get("uuid"),
761
829
  updated_at=rels[0]["from"],
@@ -793,8 +861,6 @@ class RelationshipGetQuery(RelationshipQuery):
793
861
  type: QueryType = QueryType.READ
794
862
 
795
863
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
796
- self.params["source_id"] = self.source_id
797
- self.params["destination_id"] = self.destination_id
798
864
  self.params["name"] = self.schema.identifier
799
865
  self.params["branch"] = self.branch.name
800
866
 
@@ -808,9 +874,12 @@ class RelationshipGetQuery(RelationshipQuery):
808
874
  r1 = f"{arrows.left.start}[r1:{self.rel.rel_type}]{arrows.left.end}"
809
875
  r2 = f"{arrows.right.start}[r2:{self.rel.rel_type}]{arrows.right.end}"
810
876
 
877
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
878
+ self.add_dest_match_to_query(
879
+ destination_branch=self.destination.get_branch_based_on_support_type(),
880
+ destination_id=self.destination_id or self.destination.get_id(),
881
+ )
811
882
  query = """
812
- MATCH (s:Node { uuid: $source_id })
813
- MATCH (d:Node { uuid: $destination_id })
814
883
  MATCH (s)%s(rl:Relationship { name: $name })%s(d)
815
884
  WHERE %s
816
885
  """ % (
@@ -1037,7 +1106,11 @@ class RelationshipDeleteAllQuery(Query):
1037
1106
  CALL {
1038
1107
  WITH rl
1039
1108
  MATCH (rl)-[active_edge:IS_RELATED]->(n)
1040
- WHERE %(active_rel_filter)s AND active_edge.status ="active"
1109
+ WHERE %(active_rel_filter)s
1110
+ WITH rl, active_edge, n
1111
+ ORDER BY %(id_func)s(rl), %(id_func)s(n), active_edge.from DESC
1112
+ WITH rl, n, head(collect(active_edge)) AS active_edge
1113
+ WHERE active_edge.status = "active"
1041
1114
  CREATE (rl)-[deleted_edge:IS_RELATED $rel_prop]->(n)
1042
1115
  SET deleted_edge.hierarchy = active_edge.hierarchy
1043
1116
  WITH rl, active_edge, n
@@ -1053,7 +1126,11 @@ class RelationshipDeleteAllQuery(Query):
1053
1126
 
1054
1127
  WITH rl
1055
1128
  MATCH (rl)<-[active_edge:IS_RELATED]-(n)
1056
- WHERE %(active_rel_filter)s AND active_edge.status ="active"
1129
+ WHERE %(active_rel_filter)s
1130
+ WITH rl, active_edge, n
1131
+ ORDER BY %(id_func)s(rl), %(id_func)s(n), active_edge.from DESC
1132
+ WITH rl, n, head(collect(active_edge)) AS active_edge
1133
+ WHERE active_edge.status = "active"
1057
1134
  CREATE (rl)<-[deleted_edge:IS_RELATED $rel_prop]-(n)
1058
1135
  SET deleted_edge.hierarchy = active_edge.hierarchy
1059
1136
  WITH rl, active_edge, n
@@ -1066,9 +1143,7 @@ class RelationshipDeleteAllQuery(Query):
1066
1143
  "inbound" as rel_direction
1067
1144
  }
1068
1145
  RETURN DISTINCT uuid, kind, rel_identifier, rel_direction
1069
- """ % {
1070
- "active_rel_filter": active_rel_filter,
1071
- }
1146
+ """ % {"active_rel_filter": active_rel_filter, "id_func": db.get_id_function_name()}
1072
1147
 
1073
1148
  self.add_to_query(query)
1074
1149
 
@@ -57,7 +57,7 @@ async def build_subquery_filter(
57
57
  params.update(field_params)
58
58
 
59
59
  field_where.append("all(r IN relationships(path) WHERE (%s))" % branch_filter)
60
- filter_str = f"({node_alias})" + "".join([str(item) for item in field_filter])
60
+ filter_str = f"({node_alias}:Node {{uuid: {node_alias}.uuid}})" + "".join([str(item) for item in field_filter])
61
61
  where_str = " AND ".join(field_where)
62
62
  branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)"
63
63
  froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from")
@@ -138,7 +138,7 @@ async def build_subquery_order(
138
138
  field_filter[-1].name = "last"
139
139
 
140
140
  field_where.append("all(r IN relationships(path) WHERE (%s))" % branch_filter)
141
- filter_str = f"({node_alias})" + "".join([str(item) for item in field_filter])
141
+ filter_str = f"({node_alias}:Node {{uuid: {node_alias}.uuid}})" + "".join([str(item) for item in field_filter])
142
142
  where_str = " AND ".join(field_where)
143
143
  branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)"
144
144
  froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from")
@@ -416,7 +416,7 @@ class Relationship(FlagPropertyMixin, NodePropertyMixin):
416
416
  await update_relationships_to(rel_ids_to_update, to=delete_at, db=db)
417
417
 
418
418
  delete_query = await RelationshipDeleteQuery.init(
419
- db=db, rel=self, source_id=node.id, destination_id=peer.id, branch=branch, at=delete_at
419
+ db=db, rel=self, source=node, destination=peer, branch=branch, at=delete_at
420
420
  )
421
421
  await delete_query.execute(db=db)
422
422
 
@@ -56,7 +56,13 @@ from .propose_change_validator import (
56
56
  core_user_validator,
57
57
  )
58
58
  from .repository import core_generic_repository, core_read_only_repository, core_repository
59
- from .resource_pool import core_ip_address_pool, core_ip_prefix_pool, core_number_pool, core_resource_pool
59
+ from .resource_pool import (
60
+ core_ip_address_pool,
61
+ core_ip_prefix_pool,
62
+ core_number_pool,
63
+ core_resource_pool,
64
+ core_weighted_pool_resource,
65
+ )
60
66
  from .template import core_object_component_template, core_object_template
61
67
  from .transform import core_transform, core_transform_jinja2, core_transform_python
62
68
  from .webhook import core_custom_webhook, core_standard_webhook, core_webhook
@@ -81,6 +87,7 @@ core_models_mixed: dict[str, list] = {
81
87
  builtin_ip_prefix,
82
88
  builtin_ip_address,
83
89
  core_resource_pool,
90
+ core_weighted_pool_resource,
84
91
  core_generic_account,
85
92
  core_base_permission,
86
93
  core_credential,
@@ -32,6 +32,26 @@ core_resource_pool = GenericSchema(
32
32
  ],
33
33
  )
34
34
 
35
+ core_weighted_pool_resource = GenericSchema(
36
+ name="WeightedPoolResource",
37
+ namespace="Core",
38
+ label="Weighted Pool Resource",
39
+ description="Resource to be used in a pool, its weight is used to determine its priority on allocation.",
40
+ include_in_menu=False,
41
+ branch=BranchSupportType.AWARE,
42
+ generate_profile=False,
43
+ attributes=[
44
+ Attr(
45
+ name="allocation_weight",
46
+ label="Weight",
47
+ description="Weight determines allocation priority, resources with higher values are selected first.",
48
+ kind="Number",
49
+ optional=True,
50
+ order_weight=10000,
51
+ )
52
+ ],
53
+ )
54
+
35
55
  core_ip_prefix_pool = NodeSchema(
36
56
  name="IPPrefixPool",
37
57
  namespace="Core",
@@ -2050,6 +2050,9 @@ class SchemaBranch:
2050
2050
 
2051
2051
  identified.add(node_schema)
2052
2052
 
2053
+ if node_schema.is_node_schema:
2054
+ identified.update([self.get(name=kind, duplicate=False) for kind in node_schema.inherit_from])
2055
+
2053
2056
  for relationship in node_schema.relationships:
2054
2057
  if (
2055
2058
  relationship.peer in [InfrahubKind.GENERICGROUP, InfrahubKind.PROFILE]
@@ -71,7 +71,7 @@ async def schema_path_validate(
71
71
  schema_branch: SchemaBranch,
72
72
  service: InfrahubServices,
73
73
  ) -> SchemaValidatorPathResponseData:
74
- async with service.database.start_session() as db:
74
+ async with service.database.start_session(read_only=True) as db:
75
75
  constraint_request = SchemaConstraintValidatorRequest(
76
76
  branch=branch,
77
77
  constraint_name=constraint_name,
@@ -225,6 +225,13 @@ class NodeUniqueAttributeConstraintQuery(Query):
225
225
  attr_name,
226
226
  attr_value,
227
227
  relationship_identifier
228
+ ORDER BY
229
+ node_id,
230
+ deepest_branch_name,
231
+ node_count,
232
+ attr_name,
233
+ attr_value,
234
+ relationship_identifier
228
235
  """ % {
229
236
  "select_subqueries_str": select_subqueries_str,
230
237
  "branch_filter": branch_filter,
@@ -476,8 +476,6 @@ async def validate_database(
476
476
 
477
477
 
478
478
  async def get_db(retry: int = 0) -> AsyncDriver:
479
- URI = f"{config.SETTINGS.database.protocol}://{config.SETTINGS.database.address}:{config.SETTINGS.database.port}"
480
-
481
479
  trusted_certificates = TrustSystemCAs()
482
480
  if config.SETTINGS.database.tls_insecure:
483
481
  trusted_certificates = TrustAll()
@@ -485,11 +483,14 @@ async def get_db(retry: int = 0) -> AsyncDriver:
485
483
  trusted_certificates = TrustCustomCAs(config.SETTINGS.database.tls_ca_file)
486
484
 
487
485
  driver = AsyncGraphDatabase.driver(
488
- URI,
486
+ config.SETTINGS.database.database_uri,
489
487
  auth=(config.SETTINGS.database.username, config.SETTINGS.database.password),
490
488
  encrypted=config.SETTINGS.database.tls_enabled,
491
489
  trusted_certificates=trusted_certificates,
492
- notifications_disabled_categories=[NotificationDisabledCategory.UNRECOGNIZED],
490
+ notifications_disabled_categories=[
491
+ NotificationDisabledCategory.UNRECOGNIZED,
492
+ NotificationDisabledCategory.DEPRECATION, # TODO: Remove me with 1.3
493
+ ],
493
494
  notifications_min_severity=NotificationMinimumSeverity.WARNING,
494
495
  )
495
496
 
infrahub/graphql/app.py CHANGED
@@ -155,7 +155,7 @@ class InfrahubGraphQLApp:
155
155
 
156
156
  db = websocket.app.state.db
157
157
 
158
- async with db.start_session() as db:
158
+ async with db.start_session(read_only=True) as db:
159
159
  branch_name = websocket.path_params.get("branch_name", registry.default_branch)
160
160
  branch = await registry.get_branch(db=db, branch=branch_name)
161
161
 
@@ -53,7 +53,7 @@ class NodeDataLoader(DataLoader[str, Node | None]):
53
53
  self.db = db
54
54
 
55
55
  async def batch_load_fn(self, keys: list[Any]) -> list[Node | None]:
56
- async with self.db.start_session() as db:
56
+ async with self.db.start_session(read_only=True) as db:
57
57
  nodes_by_id = await NodeManager.get_many(
58
58
  db=db,
59
59
  ids=keys,
@@ -51,7 +51,7 @@ class PeerRelationshipsDataLoader(DataLoader[str, list[Relationship]]):
51
51
  self.db = db
52
52
 
53
53
  async def batch_load_fn(self, keys: list[Any]) -> list[list[Relationship]]: # pylint: disable=method-hidden
54
- async with self.db.start_session() as db:
54
+ async with self.db.start_session(read_only=True) as db:
55
55
  peer_rels = await NodeManager.query_peers(
56
56
  db=db,
57
57
  ids=keys,
@@ -222,7 +222,7 @@ class ProposedChangeMerge(Mutation):
222
222
 
223
223
  async with graphql_context.db.start_session() as db:
224
224
  proposed_change.state.value = ProposedChangeState.MERGING.value
225
- proposed_change.save(db=db)
225
+ await proposed_change.save(db=db)
226
226
 
227
227
  if wait_until_completion:
228
228
  await graphql_context.service.workflow.execute_workflow(
@@ -10,6 +10,7 @@ from infrahub.core import registry
10
10
  from infrahub.core.constants import DiffAction, RelationshipCardinality
11
11
  from infrahub.core.constants.database import DatabaseEdgeType
12
12
  from infrahub.core.diff.model.path import NameTrackingId
13
+ from infrahub.core.diff.query.filters import EnrichedDiffQueryFilters
13
14
  from infrahub.core.diff.repository.repository import DiffRepository
14
15
  from infrahub.core.query.diff import DiffCountChanges
15
16
  from infrahub.core.timestamp import Timestamp
@@ -415,7 +416,7 @@ class DiffTreeResolver:
415
416
  diff_branch_names=[diff_branch.name],
416
417
  from_time=from_timestamp,
417
418
  to_time=to_timestamp,
418
- filters=filters_dict,
419
+ filters=EnrichedDiffQueryFilters(**filters_dict),
419
420
  include_parents=include_parents,
420
421
  limit=limit,
421
422
  offset=offset,
@@ -34,7 +34,7 @@ class Relationships(ObjectType):
34
34
 
35
35
  response: dict[str, Any] = {"edges": [], "count": None}
36
36
 
37
- async with graphql_context.db.start_session() as db:
37
+ async with graphql_context.db.start_session(read_only=True) as db:
38
38
  query = await RelationshipGetByIdentifierQuery.init(
39
39
  db=db,
40
40
  branch=graphql_context.branch,
@@ -32,6 +32,8 @@ class Tasks(ObjectType):
32
32
  workflow: list[str] | None = None,
33
33
  related_node__ids: list | None = None,
34
34
  q: str | None = None,
35
+ log_limit: int | None = None,
36
+ log_offset: int | None = None,
35
37
  ) -> dict[str, Any]:
36
38
  related_nodes = related_node__ids or []
37
39
  ids = ids or []
@@ -45,6 +47,8 @@ class Tasks(ObjectType):
45
47
  statuses=state,
46
48
  workflows=workflow,
47
49
  related_nodes=related_nodes,
50
+ log_limit=log_limit,
51
+ log_offset=log_offset,
48
52
  )
49
53
 
50
54
  @staticmethod
@@ -71,6 +75,8 @@ class Tasks(ObjectType):
71
75
  branch: str | None = None,
72
76
  limit: int | None = None,
73
77
  offset: int | None = None,
78
+ log_limit: int | None = None,
79
+ log_offset: int | None = None,
74
80
  ) -> dict[str, Any]:
75
81
  graphql_context: GraphqlContext = info.context
76
82
  fields = await extract_fields_first_node(info)
@@ -87,6 +93,8 @@ class Tasks(ObjectType):
87
93
  related_nodes=related_nodes,
88
94
  limit=limit,
89
95
  offset=offset,
96
+ log_limit=log_limit,
97
+ log_offset=log_offset,
90
98
  )
91
99
  prefect_count = prefect_tasks.get("count", None)
92
100
  return {
@@ -105,6 +113,8 @@ Task = Field(
105
113
  workflow=List(String),
106
114
  ids=List(String),
107
115
  q=String(required=False),
116
+ log_limit=Int(required=False),
117
+ log_offset=Int(required=False),
108
118
  resolver=Tasks.resolve,
109
119
  required=True,
110
120
  )
@@ -33,7 +33,7 @@ class ManyRelationshipResolver:
33
33
  parent_id: str,
34
34
  node_schema: NodeSchema,
35
35
  ) -> list[str]:
36
- async with db.start_session() as dbs:
36
+ async with db.start_session(read_only=True) as dbs:
37
37
  query = await NodeGetHierarchyQuery.init(
38
38
  db=dbs,
39
39
  direction=RelationshipHierarchyDirection.DESCENDANTS,
@@ -55,7 +55,7 @@ class ManyRelationshipResolver:
55
55
  rel_schema: RelationshipSchema,
56
56
  filters: dict[str, Any],
57
57
  ) -> int:
58
- async with db.start_session() as dbs:
58
+ async with db.start_session(read_only=True) as dbs:
59
59
  return await NodeManager.count_peers(
60
60
  db=dbs,
61
61
  ids=ids,
@@ -194,7 +194,7 @@ class ManyRelationshipResolver:
194
194
  offset: int | None = None,
195
195
  limit: int | None = None,
196
196
  ) -> list[dict[str, Any]] | None:
197
- async with db.start_session() as dbs:
197
+ async with db.start_session(read_only=True) as dbs:
198
198
  objs = await NodeManager.query_peers(
199
199
  db=dbs,
200
200
  ids=ids,
@@ -257,7 +257,7 @@ class ManyRelationshipResolver:
257
257
  all_peer_rels.extend(node_peer_rels)
258
258
  if not all_peer_rels:
259
259
  return None
260
- async with db.start_session() as dbs:
260
+ async with db.start_session(read_only=True) as dbs:
261
261
  return [
262
262
  await obj.to_graphql(db=dbs, fields=node_fields, related_node_ids=related_node_ids)
263
263
  for obj in all_peer_rels
@@ -29,7 +29,7 @@ async def account_resolver(
29
29
  fields = await extract_fields(info.field_nodes[0].selection_set)
30
30
  graphql_context: GraphqlContext = info.context
31
31
 
32
- async with graphql_context.db.start_session() as db:
32
+ async with graphql_context.db.start_session(read_only=True) as db:
33
33
  results = await NodeManager.query(
34
34
  schema=InfrahubKind.GENERICACCOUNT,
35
35
  filters={"ids": [graphql_context.account_session.account_id]},
@@ -102,7 +102,7 @@ async def default_resolver(*args: Any, **kwargs) -> dict | list[dict] | None:
102
102
  if "__" in key and value or key in ["id", "ids"]
103
103
  }
104
104
 
105
- async with graphql_context.db.start_session() as db:
105
+ async with graphql_context.db.start_session(read_only=True) as db:
106
106
  objs = await NodeManager.query_peers(
107
107
  db=db,
108
108
  ids=[parent["id"]],
@@ -158,7 +158,7 @@ async def default_paginated_list_resolver(
158
158
  fields = await extract_selection(info.field_nodes[0], schema=schema)
159
159
 
160
160
  graphql_context: GraphqlContext = info.context
161
- async with graphql_context.db.start_session() as db:
161
+ async with graphql_context.db.start_session(read_only=True) as db:
162
162
  response: dict[str, Any] = {"edges": []}
163
163
  filters = {
164
164
  key: value for key, value in kwargs.items() if ("__" in key and value is not None) or key in ("ids", "hfid")
@@ -293,7 +293,7 @@ async def hierarchy_resolver(
293
293
 
294
294
  response: dict[str, Any] = {"edges": [], "count": None}
295
295
 
296
- async with graphql_context.db.start_session() as db:
296
+ async with graphql_context.db.start_session(read_only=True) as db:
297
297
  if "count" in fields:
298
298
  response["count"] = await NodeManager.count_hierarchy(
299
299
  db=db,
@@ -109,7 +109,7 @@ class SingleRelationshipResolver:
109
109
  for key, value in kwargs.items()
110
110
  if "__" in key and value or key in ["id", "ids"]
111
111
  }
112
- async with db.start_session() as dbs:
112
+ async with db.start_session(read_only=True) as dbs:
113
113
  objs = await NodeManager.query_peers(
114
114
  db=dbs,
115
115
  ids=[parent_id],
@@ -171,5 +171,5 @@ class SingleRelationshipResolver:
171
171
  node = await loader.load(key=peer_id)
172
172
  if not node:
173
173
  return None
174
- async with db.start_session() as dbs:
174
+ async with db.start_session(read_only=True) as dbs:
175
175
  return await node.to_graphql(db=dbs, fields=node_fields, related_node_ids=related_node_ids)
@@ -29,7 +29,7 @@ async def resolver_graphql_query(
29
29
  graphql_context: GraphqlContext = info.context
30
30
  at = Timestamp()
31
31
 
32
- async with graphql_context.db.start_session() as db:
32
+ async with graphql_context.db.start_session(read_only=True) as db:
33
33
  # Find the GraphQLQuery and the GraphQL Schema
34
34
  graphql_query = await NodeManager.get_one_by_default_filter(
35
35
  db=db, id=name, kind=CoreGraphQLQuery, branch=graphql_context.branch, at=at
@@ -38,7 +38,7 @@ async def resolver_graphql_query(
38
38
  raise ValueError(f"Unable to find the {InfrahubKind.GRAPHQLQUERY} {name}")
39
39
 
40
40
  while True:
41
- async with graphql_context.db.start_session() as db:
41
+ async with graphql_context.db.start_session(read_only=True) as db:
42
42
  result = await graphql(
43
43
  schema=graphql_schema,
44
44
  source=graphql_query.query.value,
@@ -37,7 +37,7 @@ class BranchType(InfrahubObjectType):
37
37
  graphql_context: GraphqlContext,
38
38
  **kwargs: Any,
39
39
  ) -> list[dict[str, Any]]:
40
- async with graphql_context.db.start_session() as db:
40
+ async with graphql_context.db.start_session(read_only=True) as db:
41
41
  objs = await Branch.get_list(db=db, **kwargs)
42
42
 
43
43
  if not objs:
@@ -1,4 +1,4 @@
1
- from graphene import Field, InputObjectType, List, ObjectType, String
1
+ from graphene import Field, InputObjectType, Int, List, NonNull, ObjectType, String
2
2
  from graphene.types.uuid import UUID
3
3
 
4
4
  from .enums import Severity
@@ -26,4 +26,5 @@ class TaskLogNodes(ObjectType):
26
26
 
27
27
 
28
28
  class TaskLogEdge(ObjectType):
29
- edges = List(TaskLogNodes)
29
+ edges = List(NonNull(TaskLogNodes), required=True)
30
+ count = Int(required=True)
@@ -11,7 +11,7 @@ async def branches(message: messages.RefreshRegistryBranches, service: InfrahubS
11
11
  service.log.info("Ignoring refresh registry refresh request originating from self", worker=WORKER_IDENTITY)
12
12
  return
13
13
 
14
- async with service.database.start_session() as db:
14
+ async with service.database.start_session(read_only=True) as db:
15
15
  await refresh_branches(db=db)
16
16
 
17
17
  await service.component.refresh_schema_hash()