infrahub-server 1.4.10__py3-none-any.whl → 1.5.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. infrahub/actions/tasks.py +208 -16
  2. infrahub/api/artifact.py +3 -0
  3. infrahub/api/diff/diff.py +1 -1
  4. infrahub/api/query.py +2 -0
  5. infrahub/api/schema.py +3 -0
  6. infrahub/auth.py +5 -5
  7. infrahub/cli/db.py +26 -2
  8. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  9. infrahub/config.py +7 -2
  10. infrahub/core/attribute.py +25 -22
  11. infrahub/core/branch/models.py +2 -2
  12. infrahub/core/branch/needs_rebase_status.py +11 -0
  13. infrahub/core/branch/tasks.py +4 -3
  14. infrahub/core/changelog/models.py +4 -12
  15. infrahub/core/constants/__init__.py +1 -0
  16. infrahub/core/constants/infrahubkind.py +1 -0
  17. infrahub/core/convert_object_type/object_conversion.py +201 -0
  18. infrahub/core/convert_object_type/repository_conversion.py +89 -0
  19. infrahub/core/convert_object_type/schema_mapping.py +27 -3
  20. infrahub/core/diff/model/path.py +4 -0
  21. infrahub/core/diff/payload_builder.py +1 -1
  22. infrahub/core/diff/query/artifact.py +1 -1
  23. infrahub/core/graph/__init__.py +1 -1
  24. infrahub/core/initialization.py +2 -2
  25. infrahub/core/ipam/utilization.py +1 -1
  26. infrahub/core/manager.py +9 -84
  27. infrahub/core/migrations/graph/__init__.py +6 -0
  28. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +166 -0
  29. infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
  30. infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
  31. infrahub/core/migrations/schema/node_attribute_add.py +5 -2
  32. infrahub/core/migrations/shared.py +5 -6
  33. infrahub/core/node/__init__.py +165 -42
  34. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  35. infrahub/core/node/create.py +67 -35
  36. infrahub/core/node/lock_utils.py +98 -0
  37. infrahub/core/node/node_property_attribute.py +230 -0
  38. infrahub/core/node/standard.py +1 -1
  39. infrahub/core/property.py +11 -0
  40. infrahub/core/protocols.py +8 -1
  41. infrahub/core/query/attribute.py +27 -15
  42. infrahub/core/query/node.py +61 -185
  43. infrahub/core/query/relationship.py +43 -26
  44. infrahub/core/query/subquery.py +0 -8
  45. infrahub/core/registry.py +2 -2
  46. infrahub/core/relationship/constraints/count.py +1 -1
  47. infrahub/core/relationship/model.py +60 -20
  48. infrahub/core/schema/attribute_schema.py +0 -2
  49. infrahub/core/schema/basenode_schema.py +42 -2
  50. infrahub/core/schema/definitions/core/__init__.py +2 -0
  51. infrahub/core/schema/definitions/core/generator.py +2 -0
  52. infrahub/core/schema/definitions/core/group.py +16 -2
  53. infrahub/core/schema/definitions/core/repository.py +7 -0
  54. infrahub/core/schema/definitions/internal.py +14 -1
  55. infrahub/core/schema/generated/base_node_schema.py +6 -1
  56. infrahub/core/schema/node_schema.py +5 -2
  57. infrahub/core/schema/relationship_schema.py +0 -1
  58. infrahub/core/schema/schema_branch.py +137 -2
  59. infrahub/core/schema/schema_branch_display.py +123 -0
  60. infrahub/core/schema/schema_branch_hfid.py +114 -0
  61. infrahub/core/validators/aggregated_checker.py +1 -1
  62. infrahub/core/validators/determiner.py +12 -1
  63. infrahub/core/validators/relationship/peer.py +1 -1
  64. infrahub/core/validators/tasks.py +1 -1
  65. infrahub/display_labels/__init__.py +0 -0
  66. infrahub/display_labels/gather.py +48 -0
  67. infrahub/display_labels/models.py +240 -0
  68. infrahub/display_labels/tasks.py +186 -0
  69. infrahub/display_labels/triggers.py +22 -0
  70. infrahub/events/group_action.py +1 -1
  71. infrahub/events/node_action.py +1 -1
  72. infrahub/generators/constants.py +7 -0
  73. infrahub/generators/models.py +38 -12
  74. infrahub/generators/tasks.py +34 -16
  75. infrahub/git/base.py +38 -1
  76. infrahub/git/integrator.py +22 -14
  77. infrahub/graphql/analyzer.py +1 -1
  78. infrahub/graphql/api/dependencies.py +2 -4
  79. infrahub/graphql/api/endpoints.py +2 -2
  80. infrahub/graphql/app.py +2 -4
  81. infrahub/graphql/initialization.py +2 -3
  82. infrahub/graphql/manager.py +212 -137
  83. infrahub/graphql/middleware.py +12 -0
  84. infrahub/graphql/mutations/branch.py +11 -0
  85. infrahub/graphql/mutations/computed_attribute.py +110 -3
  86. infrahub/graphql/mutations/convert_object_type.py +34 -13
  87. infrahub/graphql/mutations/display_label.py +111 -0
  88. infrahub/graphql/mutations/generator.py +25 -7
  89. infrahub/graphql/mutations/hfid.py +118 -0
  90. infrahub/graphql/mutations/ipam.py +21 -8
  91. infrahub/graphql/mutations/main.py +37 -153
  92. infrahub/graphql/mutations/profile.py +195 -0
  93. infrahub/graphql/mutations/proposed_change.py +2 -1
  94. infrahub/graphql/mutations/relationship.py +2 -2
  95. infrahub/graphql/mutations/repository.py +22 -83
  96. infrahub/graphql/mutations/resource_manager.py +2 -2
  97. infrahub/graphql/mutations/schema.py +5 -5
  98. infrahub/graphql/mutations/webhook.py +1 -1
  99. infrahub/graphql/queries/resource_manager.py +1 -1
  100. infrahub/graphql/registry.py +173 -0
  101. infrahub/graphql/resolvers/resolver.py +2 -0
  102. infrahub/graphql/schema.py +8 -1
  103. infrahub/groups/tasks.py +1 -1
  104. infrahub/hfid/__init__.py +0 -0
  105. infrahub/hfid/gather.py +48 -0
  106. infrahub/hfid/models.py +240 -0
  107. infrahub/hfid/tasks.py +185 -0
  108. infrahub/hfid/triggers.py +22 -0
  109. infrahub/lock.py +67 -30
  110. infrahub/locks/__init__.py +0 -0
  111. infrahub/locks/tasks.py +37 -0
  112. infrahub/middleware.py +26 -1
  113. infrahub/patch/plan_writer.py +2 -2
  114. infrahub/profiles/__init__.py +0 -0
  115. infrahub/profiles/node_applier.py +101 -0
  116. infrahub/profiles/queries/__init__.py +0 -0
  117. infrahub/profiles/queries/get_profile_data.py +99 -0
  118. infrahub/profiles/tasks.py +63 -0
  119. infrahub/proposed_change/tasks.py +10 -1
  120. infrahub/repositories/__init__.py +0 -0
  121. infrahub/repositories/create_repository.py +113 -0
  122. infrahub/server.py +16 -3
  123. infrahub/services/__init__.py +8 -5
  124. infrahub/tasks/registry.py +6 -4
  125. infrahub/trigger/catalogue.py +4 -0
  126. infrahub/trigger/models.py +2 -0
  127. infrahub/trigger/tasks.py +3 -0
  128. infrahub/webhook/models.py +1 -1
  129. infrahub/workflows/catalogue.py +110 -3
  130. infrahub/workflows/initialization.py +16 -0
  131. infrahub/workflows/models.py +17 -2
  132. infrahub_sdk/branch.py +5 -8
  133. infrahub_sdk/checks.py +1 -1
  134. infrahub_sdk/client.py +364 -84
  135. infrahub_sdk/convert_object_type.py +61 -0
  136. infrahub_sdk/ctl/check.py +2 -3
  137. infrahub_sdk/ctl/cli_commands.py +18 -12
  138. infrahub_sdk/ctl/config.py +8 -2
  139. infrahub_sdk/ctl/generator.py +6 -3
  140. infrahub_sdk/ctl/graphql.py +184 -0
  141. infrahub_sdk/ctl/repository.py +39 -1
  142. infrahub_sdk/ctl/schema.py +18 -3
  143. infrahub_sdk/ctl/utils.py +4 -0
  144. infrahub_sdk/ctl/validate.py +5 -3
  145. infrahub_sdk/diff.py +4 -5
  146. infrahub_sdk/exceptions.py +2 -0
  147. infrahub_sdk/generator.py +7 -1
  148. infrahub_sdk/graphql/__init__.py +12 -0
  149. infrahub_sdk/graphql/constants.py +1 -0
  150. infrahub_sdk/graphql/plugin.py +85 -0
  151. infrahub_sdk/graphql/query.py +77 -0
  152. infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
  153. infrahub_sdk/graphql/utils.py +40 -0
  154. infrahub_sdk/node/attribute.py +2 -0
  155. infrahub_sdk/node/node.py +28 -20
  156. infrahub_sdk/playback.py +1 -2
  157. infrahub_sdk/protocols.py +54 -6
  158. infrahub_sdk/pytest_plugin/plugin.py +7 -4
  159. infrahub_sdk/pytest_plugin/utils.py +40 -0
  160. infrahub_sdk/repository.py +1 -2
  161. infrahub_sdk/schema/__init__.py +38 -0
  162. infrahub_sdk/schema/main.py +1 -0
  163. infrahub_sdk/schema/repository.py +8 -0
  164. infrahub_sdk/spec/object.py +120 -7
  165. infrahub_sdk/spec/range_expansion.py +118 -0
  166. infrahub_sdk/timestamp.py +18 -6
  167. infrahub_sdk/transforms.py +1 -1
  168. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +9 -11
  169. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +177 -134
  170. infrahub_testcontainers/container.py +1 -1
  171. infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
  172. infrahub_testcontainers/docker-compose.test.yml +1 -1
  173. infrahub_testcontainers/models.py +2 -2
  174. infrahub_testcontainers/performance_test.py +4 -4
  175. infrahub/core/convert_object_type/conversion.py +0 -134
  176. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
  177. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
  178. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
infrahub/actions/tasks.py CHANGED
@@ -1,17 +1,114 @@
1
1
  from __future__ import annotations
2
2
 
3
- from infrahub_sdk.graphql import Mutation
3
+ from collections import defaultdict
4
+ from typing import TYPE_CHECKING, Any
5
+
6
+ from infrahub_sdk.graphql import Mutation, Query
7
+ from infrahub_sdk.types import Order
4
8
  from prefect import flow
5
9
 
6
10
  from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
11
+ from infrahub.core.constants import InfrahubKind
12
+ from infrahub.generators.models import (
13
+ GeneratorDefinitionModel,
14
+ RequestGeneratorRun,
15
+ )
7
16
  from infrahub.services import InfrahubServices # noqa: TC001 needed for prefect flow
8
17
  from infrahub.trigger.models import TriggerType
9
18
  from infrahub.trigger.setup import setup_triggers_specific
19
+ from infrahub.workers.dependencies import get_client, get_workflow
20
+ from infrahub.workflows.catalogue import REQUEST_GENERATOR_RUN
10
21
  from infrahub.workflows.utils import add_tags
11
22
 
12
23
  from .gather import gather_trigger_action_rules
13
24
  from .models import EventGroupMember # noqa: TC001 needed for prefect flow
14
25
 
26
+ if TYPE_CHECKING:
27
+ from infrahub_sdk.client import InfrahubClient
28
+ from infrahub_sdk.node import InfrahubNode
29
+
30
+
31
+ def get_generator_run_query(definition_id: str, target_ids: list[str]) -> Query:
32
+ return Query(
33
+ name=InfrahubKind.GENERATORDEFINITION,
34
+ query={
35
+ InfrahubKind.GENERATORDEFINITION: {
36
+ "@filters": {
37
+ "ids": [definition_id],
38
+ },
39
+ "edges": {
40
+ "node": {
41
+ "id": None,
42
+ "name": {
43
+ "value": None,
44
+ },
45
+ "class_name": {
46
+ "value": None,
47
+ },
48
+ "file_path": {
49
+ "value": None,
50
+ },
51
+ "query": {
52
+ "node": {
53
+ "name": {
54
+ "value": None,
55
+ },
56
+ },
57
+ },
58
+ "convert_query_response": {
59
+ "value": None,
60
+ },
61
+ "parameters": {
62
+ "value": None,
63
+ },
64
+ "execute_in_proposed_change": {
65
+ "value": None,
66
+ },
67
+ "execute_after_merge": {
68
+ "value": None,
69
+ },
70
+ "targets": {
71
+ "node": {
72
+ "id": None,
73
+ "members": {
74
+ "@filters": {
75
+ "ids": target_ids,
76
+ },
77
+ "edges": {
78
+ "node": {
79
+ "__typename": None,
80
+ "id": None,
81
+ "display_label": None,
82
+ },
83
+ },
84
+ },
85
+ },
86
+ },
87
+ "repository": {
88
+ "node": {
89
+ "__typename": None,
90
+ "id": None,
91
+ "name": {
92
+ "value": None,
93
+ },
94
+ f"... on {InfrahubKind.REPOSITORY}": {
95
+ "commit": {
96
+ "value": None,
97
+ },
98
+ },
99
+ f"... on {InfrahubKind.READONLYREPOSITORY}": {
100
+ "commit": {
101
+ "value": None,
102
+ },
103
+ },
104
+ },
105
+ },
106
+ },
107
+ },
108
+ },
109
+ },
110
+ )
111
+
15
112
 
16
113
  @flow(
17
114
  name="action-add-node-to-group",
@@ -65,12 +162,19 @@ async def run_generator(
65
162
  branch_name: str,
66
163
  node_ids: list[str],
67
164
  generator_definition_id: str,
68
- context: InfrahubContext, # noqa: ARG001
69
- service: InfrahubServices,
165
+ context: InfrahubContext,
166
+ service: InfrahubServices, # noqa: ARG001
70
167
  ) -> None:
71
168
  await add_tags(branches=[branch_name], nodes=node_ids + [generator_definition_id])
72
- await _run_generator(
73
- branch_name=branch_name, generator_definition_id=generator_definition_id, node_ids=node_ids, service=service
169
+
170
+ client = get_client()
171
+
172
+ await _run_generators(
173
+ branch_name=branch_name,
174
+ node_ids=node_ids,
175
+ generator_definition_id=generator_definition_id,
176
+ client=client,
177
+ context=context,
74
178
  )
75
179
 
76
180
 
@@ -82,13 +186,20 @@ async def run_generator_group_event(
82
186
  branch_name: str,
83
187
  members: list[EventGroupMember],
84
188
  generator_definition_id: str,
85
- context: InfrahubContext, # noqa: ARG001
86
- service: InfrahubServices,
189
+ context: InfrahubContext,
190
+ service: InfrahubServices, # noqa: ARG001
87
191
  ) -> None:
88
192
  node_ids = [node.id for node in members]
89
193
  await add_tags(branches=[branch_name], nodes=node_ids + [generator_definition_id])
90
- await _run_generator(
91
- branch_name=branch_name, generator_definition_id=generator_definition_id, node_ids=node_ids, service=service
194
+
195
+ client = get_client()
196
+
197
+ await _run_generators(
198
+ branch_name=branch_name,
199
+ node_ids=node_ids,
200
+ generator_definition_id=generator_definition_id,
201
+ client=client,
202
+ context=context,
92
203
  )
93
204
 
94
205
 
@@ -104,16 +215,97 @@ async def configure_action_rules(
104
215
  ) # type: ignore[misc]
105
216
 
106
217
 
107
- async def _run_generator(
218
+ async def _get_targets(
219
+ branch_name: str,
220
+ targets: list[dict[str, Any]],
221
+ client: InfrahubClient,
222
+ ) -> dict[str, dict[str, InfrahubNode]]:
223
+ """Get the targets per kind in order to extract the variables."""
224
+
225
+ targets_per_kind: dict[str, dict[str, InfrahubNode]] = defaultdict(dict)
226
+
227
+ for target in targets:
228
+ targets_per_kind[target["node"]["__typename"]][target["node"]["id"]] = None
229
+
230
+ for kind, values in targets_per_kind.items():
231
+ nodes = await client.filters(
232
+ kind=kind, branch=branch_name, ids=list(values.keys()), populate_store=False, order=Order(disable=True)
233
+ )
234
+ for node in nodes:
235
+ targets_per_kind[kind][node.id] = node
236
+
237
+ return targets_per_kind
238
+
239
+
240
+ async def _run_generators(
108
241
  branch_name: str,
109
242
  node_ids: list[str],
110
243
  generator_definition_id: str,
111
- service: InfrahubServices,
244
+ client: InfrahubClient,
245
+ context: InfrahubContext | None = None,
112
246
  ) -> None:
113
- mutation = Mutation(
114
- mutation="CoreGeneratorDefinitionRun",
115
- input_data={"data": {"id": generator_definition_id, "nodes": node_ids}},
116
- query={"ok": None},
247
+ """Fetch generator metadata and submit per-target runs.
248
+
249
+ Args:
250
+ branch_name: Branch on which to execute.
251
+ node_ids: Node IDs to run against (restricts selection if provided).
252
+ generator_definition_id: Generator definition to execute.
253
+ client: InfrahubClient to query additional data.
254
+ context: Execution context passed to downstream workflow submissions.
255
+
256
+ Returns:
257
+ None
258
+
259
+ Raises:
260
+ ValueError: If the generator definition is not found or none of the requested
261
+ targets are members of the target group.
262
+ """
263
+ response = await client.execute_graphql(
264
+ query=get_generator_run_query(definition_id=generator_definition_id, target_ids=node_ids).render(),
265
+ branch_name=branch_name,
117
266
  )
267
+ if not response[InfrahubKind.GENERATORDEFINITION]["edges"]:
268
+ raise ValueError(f"Generator definition {generator_definition_id} not found")
118
269
 
119
- await service.client.execute_graphql(query=mutation.render(), branch_name=branch_name)
270
+ data = response[InfrahubKind.GENERATORDEFINITION]["edges"][0]["node"]
271
+
272
+ if not data["targets"]["node"]["members"]["edges"]:
273
+ raise ValueError(f"Target {node_ids[0]} is not part of the group {data['targets']['node']['id']}")
274
+
275
+ targets = data["targets"]["node"]["members"]["edges"]
276
+
277
+ targets_per_kind = await _get_targets(branch_name=branch_name, targets=targets, client=client)
278
+
279
+ workflow = get_workflow()
280
+
281
+ for target in targets:
282
+ node: InfrahubNode | None = None
283
+ if data["parameters"]["value"]:
284
+ node = targets_per_kind[target["node"]["__typename"]][target["node"]["id"]]
285
+
286
+ request_generator_run_model = RequestGeneratorRun(
287
+ generator_definition=GeneratorDefinitionModel(
288
+ definition_id=generator_definition_id,
289
+ definition_name=data["name"]["value"],
290
+ class_name=data["class_name"]["value"],
291
+ file_path=data["file_path"]["value"],
292
+ query_name=data["query"]["node"]["name"]["value"],
293
+ convert_query_response=data["convert_query_response"]["value"],
294
+ group_id=data["targets"]["node"]["id"],
295
+ parameters=data["parameters"]["value"],
296
+ execute_in_proposed_change=data["execute_in_proposed_change"]["value"],
297
+ execute_after_merge=data["execute_after_merge"]["value"],
298
+ ),
299
+ commit=data["repository"]["node"]["commit"]["value"],
300
+ repository_id=data["repository"]["node"]["id"],
301
+ repository_name=data["repository"]["node"]["name"]["value"],
302
+ repository_kind=data["repository"]["node"]["__typename"],
303
+ branch_name=branch_name,
304
+ query=data["query"]["node"]["name"]["value"],
305
+ variables=await node.extract(params=data["parameters"]["value"]) if node else {},
306
+ target_id=target["node"]["id"],
307
+ target_name=target["node"]["display_label"],
308
+ )
309
+ await workflow.submit_workflow(
310
+ workflow=REQUEST_GENERATOR_RUN, context=context, parameters={"model": request_generator_run_model}
311
+ )
infrahub/api/artifact.py CHANGED
@@ -15,6 +15,7 @@ from infrahub.api.dependencies import (
15
15
  )
16
16
  from infrahub.core import registry
17
17
  from infrahub.core.account import ObjectPermission
18
+ from infrahub.core.branch.needs_rebase_status import check_need_rebase_status
18
19
  from infrahub.core.constants import GLOBAL_BRANCH_NAME, InfrahubKind, PermissionAction
19
20
  from infrahub.core.protocols import CoreArtifactDefinition
20
21
  from infrahub.database import InfrahubDatabase # noqa: TC001
@@ -74,6 +75,8 @@ async def generate_artifact(
74
75
  permission_manager: PermissionManager = Depends(get_permission_manager),
75
76
  context: InfrahubContext = Depends(get_context),
76
77
  ) -> None:
78
+ check_need_rebase_status(branch_params.branch)
79
+
77
80
  permission_decision = (
78
81
  PermissionDecisionFlag.ALLOW_DEFAULT
79
82
  if branch_params.branch.name in (GLOBAL_BRANCH_NAME, registry.default_branch)
infrahub/api/diff/diff.py CHANGED
@@ -52,7 +52,7 @@ async def get_diff_files(
52
52
  for branch_name, items in diff_files.items():
53
53
  for item in items:
54
54
  repository_id = item.repository.get_id()
55
- display_label = await item.repository.render_display_label(db=db)
55
+ display_label = await item.repository.get_display_label(db=db)
56
56
  if repository_id not in response[branch_name]:
57
57
  response[branch_name][repository_id] = BranchDiffRepository(
58
58
  id=repository_id,
infrahub/api/query.py CHANGED
@@ -24,6 +24,7 @@ from infrahub.graphql.metrics import (
24
24
  GRAPHQL_RESPONSE_SIZE_METRICS,
25
25
  GRAPHQL_TOP_LEVEL_QUERIES_METRICS,
26
26
  )
27
+ from infrahub.graphql.middleware import raise_on_mutation_on_branch_needing_rebase
27
28
  from infrahub.graphql.utils import extract_data
28
29
  from infrahub.groups.models import RequestGraphQLQueryGroupUpdate
29
30
  from infrahub.log import get_logger
@@ -98,6 +99,7 @@ async def execute_query(
98
99
  context_value=gql_params.context,
99
100
  root_value=None,
100
101
  variable_values=params,
102
+ middleware=[raise_on_mutation_on_branch_needing_rebase],
101
103
  )
102
104
 
103
105
  data = extract_data(query_name=gql_query.name.value, result=result)
infrahub/api/schema.py CHANGED
@@ -18,6 +18,7 @@ from infrahub.api.exceptions import SchemaNotValidError
18
18
  from infrahub.core import registry
19
19
  from infrahub.core.account import GlobalPermission
20
20
  from infrahub.core.branch import Branch # noqa: TC001
21
+ from infrahub.core.branch.needs_rebase_status import check_need_rebase_status
21
22
  from infrahub.core.constants import GLOBAL_BRANCH_NAME, GlobalPermissions, PermissionDecision
22
23
  from infrahub.core.migrations.schema.models import SchemaApplyMigrationData
23
24
  from infrahub.core.models import ( # noqa: TC001
@@ -287,6 +288,8 @@ async def load_schema(
287
288
  permission_manager: PermissionManager = Depends(get_permission_manager),
288
289
  context: InfrahubContext = Depends(get_context),
289
290
  ) -> SchemaUpdate:
291
+ check_need_rebase_status(branch)
292
+
290
293
  permission_manager.raise_for_permission(
291
294
  permission=define_global_permission_from_branch(
292
295
  permission=GlobalPermissions.MANAGE_SCHEMA, branch_name=branch.name
infrahub/auth.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import uuid
4
- from datetime import datetime, timedelta, timezone
4
+ from datetime import UTC, datetime, timedelta
5
5
  from enum import Enum
6
6
  from typing import TYPE_CHECKING
7
7
 
@@ -78,7 +78,7 @@ async def authenticate_with_password(
78
78
  if not valid_credentials:
79
79
  raise AuthorizationError("Incorrect password")
80
80
 
81
- now = datetime.now(tz=timezone.utc)
81
+ now = datetime.now(tz=UTC)
82
82
  refresh_expires = now + timedelta(seconds=config.SETTINGS.security.refresh_token_lifetime)
83
83
 
84
84
  session_id = await create_db_refresh_token(db=db, account_id=account.id, expiration=refresh_expires)
@@ -139,7 +139,7 @@ async def signin_sso_account(db: InfrahubDatabase, account_name: str, sso_groups
139
139
  await group.members.add(db=db, data=account)
140
140
  await group.members.save(db=db)
141
141
 
142
- now = datetime.now(tz=timezone.utc)
142
+ now = datetime.now(tz=UTC)
143
143
  refresh_expires = now + timedelta(seconds=config.SETTINGS.security.refresh_token_lifetime)
144
144
  session_id = await create_db_refresh_token(db=db, account_id=account.id, expiration=refresh_expires)
145
145
  access_token = generate_access_token(account_id=account.id, session_id=session_id)
@@ -148,7 +148,7 @@ async def signin_sso_account(db: InfrahubDatabase, account_name: str, sso_groups
148
148
 
149
149
 
150
150
  def generate_access_token(account_id: str, session_id: uuid.UUID) -> str:
151
- now = datetime.now(tz=timezone.utc)
151
+ now = datetime.now(tz=UTC)
152
152
 
153
153
  access_expires = now + timedelta(seconds=config.SETTINGS.security.access_token_lifetime)
154
154
  access_data = {
@@ -165,7 +165,7 @@ def generate_access_token(account_id: str, session_id: uuid.UUID) -> str:
165
165
 
166
166
 
167
167
  def generate_refresh_token(account_id: str, session_id: uuid.UUID, expiration: datetime) -> str:
168
- now = datetime.now(tz=timezone.utc)
168
+ now = datetime.now(tz=UTC)
169
169
 
170
170
  refresh_data = {
171
171
  "sub": account_id,
infrahub/cli/db.py CHANGED
@@ -5,7 +5,7 @@ import logging
5
5
  import os
6
6
  from collections import defaultdict
7
7
  from csv import DictReader, DictWriter
8
- from datetime import datetime, timezone
8
+ from datetime import UTC, datetime
9
9
  from enum import Enum
10
10
  from pathlib import Path
11
11
  from typing import TYPE_CHECKING, Any, Sequence
@@ -54,12 +54,13 @@ from infrahub.log import get_logger
54
54
 
55
55
  from .constants import ERROR_BADGE, FAILED_BADGE, SUCCESS_BADGE
56
56
  from .db_commands.check_inheritance import check_inheritance
57
+ from .db_commands.clean_duplicate_schema_fields import clean_duplicate_schema_fields
57
58
  from .patch import patch_app
58
59
 
59
60
 
60
61
  def get_timestamp_string() -> str:
61
62
  """Generate a timestamp string in the format YYYYMMDD-HHMMSS."""
62
- return datetime.now(tz=timezone.utc).strftime("%Y%m%d-%H%M%S")
63
+ return datetime.now(tz=UTC).strftime("%Y%m%d-%H%M%S")
63
64
 
64
65
 
65
66
  if TYPE_CHECKING:
@@ -200,6 +201,29 @@ async def check_inheritance_cmd(
200
201
  await dbdriver.close()
201
202
 
202
203
 
204
+ @app.command(name="check-duplicate-schema-fields")
205
+ async def check_duplicate_schema_fields_cmd(
206
+ ctx: typer.Context,
207
+ fix: bool = typer.Option(False, help="Fix the duplicate schema fields on the default branch."),
208
+ config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
209
+ ) -> None:
210
+ """Check for any duplicate schema attributes or relationships on the default branch"""
211
+ logging.getLogger("infrahub").setLevel(logging.WARNING)
212
+ logging.getLogger("neo4j").setLevel(logging.ERROR)
213
+ logging.getLogger("prefect").setLevel(logging.ERROR)
214
+
215
+ config.load_and_exit(config_file_name=config_file)
216
+
217
+ context: CliContext = ctx.obj
218
+ dbdriver = await context.init_db(retry=1)
219
+
220
+ success = await clean_duplicate_schema_fields(db=dbdriver, fix=fix)
221
+ if not success:
222
+ raise typer.Exit(code=1)
223
+
224
+ await dbdriver.close()
225
+
226
+
203
227
  @app.command(name="update-core-schema")
204
228
  async def update_core_schema_cmd(
205
229
  ctx: typer.Context,
@@ -0,0 +1,212 @@
1
+ from dataclasses import dataclass
2
+ from enum import Enum
3
+ from typing import Any
4
+
5
+ from rich import print as rprint
6
+ from rich.console import Console
7
+ from rich.table import Table
8
+
9
+ from infrahub.cli.constants import FAILED_BADGE, SUCCESS_BADGE
10
+ from infrahub.core.query import Query, QueryType
11
+ from infrahub.database import InfrahubDatabase
12
+
13
+
14
+ class SchemaFieldType(str, Enum):
15
+ ATTRIBUTE = "attribute"
16
+ RELATIONSHIP = "relationship"
17
+
18
+
19
+ @dataclass
20
+ class SchemaFieldDetails:
21
+ schema_kind: str
22
+ schema_uuid: str
23
+ field_type: SchemaFieldType
24
+ field_name: str
25
+
26
+
27
+ class DuplicateSchemaFields(Query):
28
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
29
+ query = """
30
+ MATCH (root:Root)
31
+ LIMIT 1
32
+ WITH root.default_branch AS default_branch
33
+ MATCH (field:SchemaAttribute|SchemaRelationship)
34
+ CALL (default_branch, field) {
35
+ MATCH (field)-[is_part_of:IS_PART_OF]->(:Root)
36
+ WHERE is_part_of.branch = default_branch
37
+ ORDER BY is_part_of.from DESC
38
+ RETURN is_part_of
39
+ LIMIT 1
40
+ }
41
+ WITH default_branch, field, CASE
42
+ WHEN is_part_of.status = "active" AND is_part_of.to IS NULL THEN is_part_of.from
43
+ ELSE NULL
44
+ END AS active_from
45
+ WHERE active_from IS NOT NULL
46
+ WITH default_branch, field, active_from, "SchemaAttribute" IN labels(field) AS is_attribute
47
+ CALL (field, default_branch) {
48
+ MATCH (field)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
49
+ WHERE r1.branch = default_branch AND r2.branch = default_branch
50
+ AND r1.status = "active" AND r2.status = "active"
51
+ AND r1.to IS NULL AND r2.to IS NULL
52
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
53
+ LIMIT 1
54
+ RETURN name_value.value AS field_name
55
+ }
56
+ CALL (field, default_branch) {
57
+ MATCH (field)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(peer:SchemaNode|SchemaGeneric)
58
+ WHERE rel.name IN ["schema__node__relationships", "schema__node__attributes"]
59
+ AND r1.branch = default_branch AND r2.branch = default_branch
60
+ AND r1.status = "active" AND r2.status = "active"
61
+ AND r1.to IS NULL AND r2.to IS NULL
62
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
63
+ LIMIT 1
64
+ RETURN peer AS schema_vertex
65
+ }
66
+ WITH default_branch, field, field_name, is_attribute, active_from, schema_vertex
67
+ ORDER BY active_from DESC
68
+ WITH default_branch, field_name, is_attribute, schema_vertex, collect(field) AS fields_reverse_chron
69
+ WHERE size(fields_reverse_chron) > 1
70
+ """
71
+ self.add_to_query(query)
72
+
73
+
74
+ class GetDuplicateSchemaFields(DuplicateSchemaFields):
75
+ """
76
+ Get the kind, field type, and field name for any duplicated attributes or relationships on a given schema
77
+ on the default branch
78
+ """
79
+
80
+ name = "get_duplicate_schema_fields"
81
+ type = QueryType.READ
82
+ insert_return = False
83
+
84
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None:
85
+ await super().query_init(db=db, **kwargs)
86
+ query = """
87
+ CALL (schema_vertex, default_branch) {
88
+ MATCH (schema_vertex)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "namespace"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
89
+ WHERE r1.branch = default_branch AND r2.branch = default_branch
90
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
91
+ LIMIT 1
92
+ RETURN name_value.value AS schema_namespace
93
+ }
94
+ CALL (schema_vertex, default_branch) {
95
+ MATCH (schema_vertex)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
96
+ WHERE r1.branch = default_branch AND r2.branch = default_branch
97
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
98
+ LIMIT 1
99
+ RETURN name_value.value AS schema_name
100
+ }
101
+ RETURN schema_namespace + schema_name AS schema_kind, schema_vertex.uuid AS schema_uuid, field_name, is_attribute
102
+ ORDER BY schema_kind ASC, is_attribute DESC, field_name ASC
103
+ """
104
+ self.return_labels = ["schema_kind", "schema_uuid", "field_name", "is_attribute"]
105
+ self.add_to_query(query)
106
+
107
+ def get_schema_field_details(self) -> list[SchemaFieldDetails]:
108
+ schema_field_details: list[SchemaFieldDetails] = []
109
+ for result in self.results:
110
+ schema_kind = result.get_as_type(label="schema_kind", return_type=str)
111
+ schema_uuid = result.get_as_type(label="schema_uuid", return_type=str)
112
+ field_name = result.get_as_type(label="field_name", return_type=str)
113
+ is_attribute = result.get_as_type(label="is_attribute", return_type=bool)
114
+ schema_field_details.append(
115
+ SchemaFieldDetails(
116
+ schema_kind=schema_kind,
117
+ schema_uuid=schema_uuid,
118
+ field_name=field_name,
119
+ field_type=SchemaFieldType.ATTRIBUTE if is_attribute else SchemaFieldType.RELATIONSHIP,
120
+ )
121
+ )
122
+ return schema_field_details
123
+
124
+
125
+ class FixDuplicateSchemaFields(DuplicateSchemaFields):
126
+ """
127
+ Fix the duplicate schema fields by hard deleting the earlier duplicate(s)
128
+ """
129
+
130
+ name = "fix_duplicate_schema_fields"
131
+ type = QueryType.WRITE
132
+ insert_return = False
133
+
134
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None:
135
+ await super().query_init(db=db, **kwargs)
136
+ query = """
137
+ WITH default_branch, tail(fields_reverse_chron) AS fields_to_delete
138
+ UNWIND fields_to_delete AS field_to_delete
139
+ CALL (field_to_delete, default_branch) {
140
+ MATCH (field_to_delete)-[r:IS_PART_OF {branch: default_branch}]-()
141
+ DELETE r
142
+ WITH field_to_delete
143
+ MATCH (field_to_delete)-[:IS_RELATED {branch: default_branch}]-(rel:Relationship)
144
+ WITH DISTINCT field_to_delete, rel
145
+ MATCH (rel)-[r {branch: default_branch}]-()
146
+ DELETE r
147
+ WITH field_to_delete, rel
148
+ OPTIONAL MATCH (rel)
149
+ WHERE NOT exists((rel)--())
150
+ DELETE rel
151
+ WITH DISTINCT field_to_delete
152
+ MATCH (field_to_delete)-[:HAS_ATTRIBUTE {branch: default_branch}]->(attr:Attribute)
153
+ MATCH (attr)-[r {branch: default_branch}]-()
154
+ DELETE r
155
+ WITH field_to_delete, attr
156
+ OPTIONAL MATCH (attr)
157
+ WHERE NOT exists((attr)--())
158
+ DELETE attr
159
+ WITH DISTINCT field_to_delete
160
+ OPTIONAL MATCH (field_to_delete)
161
+ WHERE NOT exists((field_to_delete)--())
162
+ DELETE field_to_delete
163
+ }
164
+ """
165
+ self.add_to_query(query)
166
+
167
+
168
+ def display_duplicate_schema_fields(duplicate_schema_fields: list[SchemaFieldDetails]) -> None:
169
+ console = Console()
170
+
171
+ table = Table(title="Duplicate Schema Fields on Default Branch")
172
+
173
+ table.add_column("Schema Kind")
174
+ table.add_column("Schema UUID")
175
+ table.add_column("Field Name")
176
+ table.add_column("Field Type")
177
+
178
+ for duplicate_schema_field in duplicate_schema_fields:
179
+ table.add_row(
180
+ duplicate_schema_field.schema_kind,
181
+ duplicate_schema_field.schema_uuid,
182
+ duplicate_schema_field.field_name,
183
+ duplicate_schema_field.field_type.value,
184
+ )
185
+
186
+ console.print(table)
187
+
188
+
189
+ async def clean_duplicate_schema_fields(db: InfrahubDatabase, fix: bool = False) -> bool:
190
+ """
191
+ Identify any attributes or relationships that are duplicated in a schema on the default branch
192
+ If fix is True, runs cypher queries to hard delete the earlier duplicate
193
+ """
194
+
195
+ duplicate_schema_fields_query = await GetDuplicateSchemaFields.init(db=db)
196
+ await duplicate_schema_fields_query.execute(db=db)
197
+ duplicate_schema_fields = duplicate_schema_fields_query.get_schema_field_details()
198
+
199
+ if not duplicate_schema_fields:
200
+ rprint(f"{SUCCESS_BADGE} No duplicate schema fields found")
201
+ return True
202
+
203
+ display_duplicate_schema_fields(duplicate_schema_fields)
204
+
205
+ if not fix:
206
+ rprint(f"{FAILED_BADGE} Use the --fix flag to fix the duplicate schema fields")
207
+ return False
208
+
209
+ fix_duplicate_schema_fields_query = await FixDuplicateSchemaFields.init(db=db)
210
+ await fix_duplicate_schema_fields_query.execute(db=db)
211
+ rprint(f"{SUCCESS_BADGE} Duplicate schema fields deleted from the default branch")
212
+ return True
infrahub/config.py CHANGED
@@ -8,7 +8,7 @@ from enum import Enum
8
8
  from pathlib import Path
9
9
  from typing import TYPE_CHECKING, Any
10
10
 
11
- import toml
11
+ import tomllib
12
12
  from infrahub_sdk.utils import generate_uuid
13
13
  from pydantic import (
14
14
  AliasChoices,
@@ -371,6 +371,11 @@ class CacheSettings(BaseSettings):
371
371
  tls_enabled: bool = Field(default=False, description="Indicates if TLS is enabled for the connection")
372
372
  tls_insecure: bool = Field(default=False, description="Indicates if TLS certificates are verified")
373
373
  tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
374
+ clean_up_deadlocks_interval_mins: int = Field(
375
+ default=15,
376
+ ge=1,
377
+ description="Age threshold in minutes: locks older than this and owned by inactive workers are deleted by the cleanup task.",
378
+ )
374
379
 
375
380
  @property
376
381
  def service_port(self) -> int:
@@ -975,7 +980,7 @@ def load(config_file_name: Path | str = "infrahub.toml", config_data: dict[str,
975
980
 
976
981
  if config_file.exists():
977
982
  config_string = config_file.read_text(encoding="utf-8")
978
- config_tmp = toml.loads(config_string)
983
+ config_tmp = tomllib.loads(config_string)
979
984
 
980
985
  return Settings(**config_tmp)
981
986