infrahub-server 1.4.13__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (222) hide show
  1. infrahub/actions/tasks.py +208 -16
  2. infrahub/api/artifact.py +3 -0
  3. infrahub/api/diff/diff.py +1 -1
  4. infrahub/api/internal.py +2 -0
  5. infrahub/api/query.py +2 -0
  6. infrahub/api/schema.py +27 -3
  7. infrahub/auth.py +5 -5
  8. infrahub/cli/__init__.py +2 -0
  9. infrahub/cli/db.py +160 -157
  10. infrahub/cli/dev.py +118 -0
  11. infrahub/cli/upgrade.py +56 -9
  12. infrahub/computed_attribute/tasks.py +19 -7
  13. infrahub/config.py +7 -2
  14. infrahub/core/attribute.py +35 -24
  15. infrahub/core/branch/enums.py +1 -1
  16. infrahub/core/branch/models.py +9 -5
  17. infrahub/core/branch/needs_rebase_status.py +11 -0
  18. infrahub/core/branch/tasks.py +72 -10
  19. infrahub/core/changelog/models.py +2 -10
  20. infrahub/core/constants/__init__.py +4 -0
  21. infrahub/core/constants/infrahubkind.py +1 -0
  22. infrahub/core/convert_object_type/object_conversion.py +201 -0
  23. infrahub/core/convert_object_type/repository_conversion.py +89 -0
  24. infrahub/core/convert_object_type/schema_mapping.py +27 -3
  25. infrahub/core/diff/model/path.py +4 -0
  26. infrahub/core/diff/payload_builder.py +1 -1
  27. infrahub/core/diff/query/artifact.py +1 -0
  28. infrahub/core/diff/query/field_summary.py +1 -0
  29. infrahub/core/graph/__init__.py +1 -1
  30. infrahub/core/initialization.py +7 -4
  31. infrahub/core/manager.py +3 -81
  32. infrahub/core/migrations/__init__.py +3 -0
  33. infrahub/core/migrations/exceptions.py +4 -0
  34. infrahub/core/migrations/graph/__init__.py +11 -10
  35. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  36. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  37. infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
  38. infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
  39. infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
  40. infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
  41. infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
  42. infrahub/core/migrations/query/__init__.py +7 -8
  43. infrahub/core/migrations/query/attribute_add.py +8 -6
  44. infrahub/core/migrations/query/attribute_remove.py +134 -0
  45. infrahub/core/migrations/runner.py +54 -0
  46. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  47. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  48. infrahub/core/migrations/schema/node_attribute_add.py +26 -5
  49. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  50. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  51. infrahub/core/migrations/schema/node_remove.py +2 -1
  52. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  53. infrahub/core/migrations/shared.py +66 -19
  54. infrahub/core/models.py +2 -2
  55. infrahub/core/node/__init__.py +207 -54
  56. infrahub/core/node/create.py +53 -49
  57. infrahub/core/node/lock_utils.py +124 -0
  58. infrahub/core/node/node_property_attribute.py +230 -0
  59. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  60. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  61. infrahub/core/node/resource_manager/number_pool.py +2 -1
  62. infrahub/core/node/standard.py +1 -1
  63. infrahub/core/property.py +11 -0
  64. infrahub/core/protocols.py +8 -1
  65. infrahub/core/query/attribute.py +82 -15
  66. infrahub/core/query/ipam.py +16 -4
  67. infrahub/core/query/node.py +66 -188
  68. infrahub/core/query/relationship.py +44 -26
  69. infrahub/core/query/subquery.py +0 -8
  70. infrahub/core/relationship/model.py +69 -24
  71. infrahub/core/schema/__init__.py +56 -0
  72. infrahub/core/schema/attribute_schema.py +4 -2
  73. infrahub/core/schema/basenode_schema.py +42 -2
  74. infrahub/core/schema/definitions/core/__init__.py +2 -0
  75. infrahub/core/schema/definitions/core/check.py +1 -1
  76. infrahub/core/schema/definitions/core/generator.py +2 -0
  77. infrahub/core/schema/definitions/core/group.py +16 -2
  78. infrahub/core/schema/definitions/core/repository.py +7 -0
  79. infrahub/core/schema/definitions/core/transform.py +1 -1
  80. infrahub/core/schema/definitions/internal.py +12 -3
  81. infrahub/core/schema/generated/attribute_schema.py +2 -2
  82. infrahub/core/schema/generated/base_node_schema.py +6 -1
  83. infrahub/core/schema/manager.py +3 -0
  84. infrahub/core/schema/node_schema.py +1 -0
  85. infrahub/core/schema/relationship_schema.py +0 -1
  86. infrahub/core/schema/schema_branch.py +295 -10
  87. infrahub/core/schema/schema_branch_display.py +135 -0
  88. infrahub/core/schema/schema_branch_hfid.py +120 -0
  89. infrahub/core/validators/aggregated_checker.py +1 -1
  90. infrahub/database/graph.py +21 -0
  91. infrahub/display_labels/__init__.py +0 -0
  92. infrahub/display_labels/gather.py +48 -0
  93. infrahub/display_labels/models.py +240 -0
  94. infrahub/display_labels/tasks.py +192 -0
  95. infrahub/display_labels/triggers.py +22 -0
  96. infrahub/events/branch_action.py +27 -1
  97. infrahub/events/group_action.py +1 -1
  98. infrahub/events/node_action.py +1 -1
  99. infrahub/generators/constants.py +7 -0
  100. infrahub/generators/models.py +38 -12
  101. infrahub/generators/tasks.py +34 -16
  102. infrahub/git/base.py +38 -1
  103. infrahub/git/integrator.py +22 -14
  104. infrahub/graphql/api/dependencies.py +2 -4
  105. infrahub/graphql/api/endpoints.py +16 -6
  106. infrahub/graphql/app.py +2 -4
  107. infrahub/graphql/initialization.py +2 -3
  108. infrahub/graphql/manager.py +213 -137
  109. infrahub/graphql/middleware.py +12 -0
  110. infrahub/graphql/mutations/branch.py +16 -0
  111. infrahub/graphql/mutations/computed_attribute.py +110 -3
  112. infrahub/graphql/mutations/convert_object_type.py +44 -13
  113. infrahub/graphql/mutations/display_label.py +118 -0
  114. infrahub/graphql/mutations/generator.py +25 -7
  115. infrahub/graphql/mutations/hfid.py +125 -0
  116. infrahub/graphql/mutations/ipam.py +73 -41
  117. infrahub/graphql/mutations/main.py +61 -178
  118. infrahub/graphql/mutations/profile.py +195 -0
  119. infrahub/graphql/mutations/proposed_change.py +8 -1
  120. infrahub/graphql/mutations/relationship.py +2 -2
  121. infrahub/graphql/mutations/repository.py +22 -83
  122. infrahub/graphql/mutations/resource_manager.py +2 -2
  123. infrahub/graphql/mutations/webhook.py +1 -1
  124. infrahub/graphql/queries/resource_manager.py +1 -1
  125. infrahub/graphql/registry.py +173 -0
  126. infrahub/graphql/resolvers/resolver.py +2 -0
  127. infrahub/graphql/schema.py +8 -1
  128. infrahub/graphql/schema_sort.py +170 -0
  129. infrahub/graphql/types/branch.py +4 -1
  130. infrahub/graphql/types/enums.py +3 -0
  131. infrahub/groups/tasks.py +1 -1
  132. infrahub/hfid/__init__.py +0 -0
  133. infrahub/hfid/gather.py +48 -0
  134. infrahub/hfid/models.py +240 -0
  135. infrahub/hfid/tasks.py +191 -0
  136. infrahub/hfid/triggers.py +22 -0
  137. infrahub/lock.py +119 -42
  138. infrahub/locks/__init__.py +0 -0
  139. infrahub/locks/tasks.py +37 -0
  140. infrahub/patch/plan_writer.py +2 -2
  141. infrahub/permissions/constants.py +2 -0
  142. infrahub/profiles/__init__.py +0 -0
  143. infrahub/profiles/node_applier.py +101 -0
  144. infrahub/profiles/queries/__init__.py +0 -0
  145. infrahub/profiles/queries/get_profile_data.py +98 -0
  146. infrahub/profiles/tasks.py +63 -0
  147. infrahub/proposed_change/tasks.py +24 -5
  148. infrahub/repositories/__init__.py +0 -0
  149. infrahub/repositories/create_repository.py +113 -0
  150. infrahub/server.py +9 -1
  151. infrahub/services/__init__.py +8 -5
  152. infrahub/services/adapters/workflow/worker.py +5 -2
  153. infrahub/task_manager/event.py +5 -0
  154. infrahub/task_manager/models.py +7 -0
  155. infrahub/tasks/registry.py +6 -4
  156. infrahub/trigger/catalogue.py +4 -0
  157. infrahub/trigger/models.py +2 -0
  158. infrahub/trigger/setup.py +13 -4
  159. infrahub/trigger/tasks.py +6 -0
  160. infrahub/webhook/models.py +1 -1
  161. infrahub/workers/dependencies.py +3 -1
  162. infrahub/workers/infrahub_async.py +5 -1
  163. infrahub/workflows/catalogue.py +118 -3
  164. infrahub/workflows/initialization.py +21 -0
  165. infrahub/workflows/models.py +17 -2
  166. infrahub_sdk/branch.py +17 -8
  167. infrahub_sdk/checks.py +1 -1
  168. infrahub_sdk/client.py +376 -95
  169. infrahub_sdk/config.py +29 -2
  170. infrahub_sdk/convert_object_type.py +61 -0
  171. infrahub_sdk/ctl/branch.py +3 -0
  172. infrahub_sdk/ctl/check.py +2 -3
  173. infrahub_sdk/ctl/cli_commands.py +20 -12
  174. infrahub_sdk/ctl/config.py +8 -2
  175. infrahub_sdk/ctl/generator.py +6 -3
  176. infrahub_sdk/ctl/graphql.py +184 -0
  177. infrahub_sdk/ctl/repository.py +39 -1
  178. infrahub_sdk/ctl/schema.py +40 -10
  179. infrahub_sdk/ctl/task.py +110 -0
  180. infrahub_sdk/ctl/utils.py +4 -0
  181. infrahub_sdk/ctl/validate.py +5 -3
  182. infrahub_sdk/diff.py +4 -5
  183. infrahub_sdk/exceptions.py +2 -0
  184. infrahub_sdk/generator.py +7 -1
  185. infrahub_sdk/graphql/__init__.py +12 -0
  186. infrahub_sdk/graphql/constants.py +1 -0
  187. infrahub_sdk/graphql/plugin.py +85 -0
  188. infrahub_sdk/graphql/query.py +77 -0
  189. infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
  190. infrahub_sdk/graphql/utils.py +40 -0
  191. infrahub_sdk/node/attribute.py +2 -0
  192. infrahub_sdk/node/node.py +28 -20
  193. infrahub_sdk/node/relationship.py +1 -3
  194. infrahub_sdk/playback.py +1 -2
  195. infrahub_sdk/protocols.py +54 -6
  196. infrahub_sdk/pytest_plugin/plugin.py +7 -4
  197. infrahub_sdk/pytest_plugin/utils.py +40 -0
  198. infrahub_sdk/repository.py +1 -2
  199. infrahub_sdk/schema/__init__.py +70 -4
  200. infrahub_sdk/schema/main.py +1 -0
  201. infrahub_sdk/schema/repository.py +8 -0
  202. infrahub_sdk/spec/models.py +7 -0
  203. infrahub_sdk/spec/object.py +54 -6
  204. infrahub_sdk/spec/processors/__init__.py +0 -0
  205. infrahub_sdk/spec/processors/data_processor.py +10 -0
  206. infrahub_sdk/spec/processors/factory.py +34 -0
  207. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  208. infrahub_sdk/spec/range_expansion.py +118 -0
  209. infrahub_sdk/task/models.py +6 -4
  210. infrahub_sdk/timestamp.py +18 -6
  211. infrahub_sdk/transforms.py +1 -1
  212. {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
  213. {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +221 -165
  214. infrahub_testcontainers/container.py +114 -2
  215. infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
  216. infrahub_testcontainers/docker-compose.test.yml +5 -0
  217. infrahub_testcontainers/models.py +2 -2
  218. infrahub_testcontainers/performance_test.py +4 -4
  219. infrahub/core/convert_object_type/conversion.py +0 -134
  220. {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
  221. {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
  222. {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
@@ -8,7 +8,7 @@ from pathlib import Path
8
8
  from typing import TYPE_CHECKING
9
9
 
10
10
  import pytest
11
- from infrahub_sdk.exceptions import ModuleImportError
11
+ from infrahub_sdk.exceptions import ModuleImportError, NodeNotFoundError, URLNotFoundError
12
12
  from infrahub_sdk.node import InfrahubNode
13
13
  from infrahub_sdk.protocols import (
14
14
  CoreArtifactDefinition,
@@ -311,6 +311,7 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
311
311
  populate_store=True,
312
312
  branch=model.source_branch,
313
313
  )
314
+
314
315
  generator_definitions = [
315
316
  ProposedChangeGeneratorDefinition(
316
317
  definition_id=generator.id,
@@ -323,8 +324,11 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
323
324
  parameters=generator.parameters.value,
324
325
  group_id=generator.targets.peer.id,
325
326
  convert_query_response=generator.convert_query_response.value,
327
+ execute_in_proposed_change=generator.execute_in_proposed_change.value,
328
+ execute_after_merge=generator.execute_after_merge.value,
326
329
  )
327
330
  for generator in generators
331
+ if generator.execute_in_proposed_change.value
328
332
  ]
329
333
 
330
334
  diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id)
@@ -528,7 +532,11 @@ async def run_proposed_change_user_tests(model: RequestProposedChangeUserTests)
528
532
  log = get_run_logger()
529
533
  client = get_client()
530
534
 
531
- proposed_change = await client.get(kind=InfrahubKind.PROPOSEDCHANGE, id=model.proposed_change)
535
+ try:
536
+ proposed_change = await client.get(kind=CoreProposedChange, id=model.proposed_change)
537
+ except NodeNotFoundError:
538
+ log.warning(f"Proposed change ({model.proposed_change}) not found, skipping user tests execution")
539
+ return
532
540
 
533
541
  def _execute(
534
542
  directory: Path, repository: ProposedChangeRepository, proposed_change: InfrahubNode
@@ -792,6 +800,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
792
800
  query=model.generator_definition.query_name,
793
801
  targets=model.generator_definition.group_id,
794
802
  convert_query_response=model.generator_definition.convert_query_response,
803
+ execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
804
+ execute_after_merge=model.generator_definition.execute_after_merge,
795
805
  )
796
806
 
797
807
  commit_worktree = repository.get_commit_worktree(commit=model.commit)
@@ -818,6 +828,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
818
828
  params=model.variables,
819
829
  generator_instance=generator_instance.id,
820
830
  convert_query_response=generator_definition.convert_query_response,
831
+ execute_after_merge=generator_definition.execute_after_merge,
832
+ execute_in_proposed_change=generator_definition.execute_in_proposed_change,
821
833
  infrahub_node=InfrahubNode,
822
834
  )
823
835
  generator._init_client.request_context = context.to_request_context()
@@ -961,7 +973,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
961
973
  requested_instances = 0
962
974
  impacted_instances = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.GENERATORINSTANCE)
963
975
 
964
- check_generator_run_models = []
976
+ check_generator_run_models: list[RunGeneratorAsCheckModel] = []
965
977
  for relationship in group.members.peers:
966
978
  member = relationship.peer
967
979
  generator_instance = instance_by_member.get(member.id)
@@ -997,6 +1009,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
997
1009
  context=context,
998
1010
  )
999
1011
  for check_generator_run_model in check_generator_run_models
1012
+ if check_generator_run_model.generator_definition.execute_in_proposed_change
1000
1013
  ]
1001
1014
 
1002
1015
  await run_checks_and_update_validator(
@@ -1523,8 +1536,14 @@ async def _get_proposed_change_repositories(
1523
1536
  destination_all = await client.execute_graphql(
1524
1537
  query=DESTINATION_ALLREPOSITORIES, branch_name=model.destination_branch
1525
1538
  )
1526
- source_managed = await client.execute_graphql(query=SOURCE_REPOSITORIES, branch_name=model.source_branch)
1527
- source_readonly = await client.execute_graphql(query=SOURCE_READONLY_REPOSITORIES, branch_name=model.source_branch)
1539
+ try:
1540
+ source_managed = await client.execute_graphql(query=SOURCE_REPOSITORIES, branch_name=model.source_branch)
1541
+ source_readonly = await client.execute_graphql(
1542
+ query=SOURCE_READONLY_REPOSITORIES, branch_name=model.source_branch
1543
+ )
1544
+ except URLNotFoundError:
1545
+ # If the URL is not found it means that the source branch has been deleted after the proposed change was created
1546
+ return []
1528
1547
 
1529
1548
  destination_all = destination_all[InfrahubKind.GENERICREPOSITORY]["edges"]
1530
1549
  source_all = (
File without changes
@@ -0,0 +1,113 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, cast
4
+
5
+ from infrahub.core.constants import RepositoryInternalStatus
6
+ from infrahub.core.constants.infrahubkind import READONLYREPOSITORY, REPOSITORY
7
+ from infrahub.core.protocols import CoreGenericRepository, CoreReadOnlyRepository, CoreRepository
8
+ from infrahub.exceptions import ValidationError
9
+ from infrahub.git.models import GitRepositoryAdd, GitRepositoryAddReadOnly
10
+ from infrahub.log import get_logger
11
+ from infrahub.message_bus import messages
12
+ from infrahub.message_bus.messages.git_repository_connectivity import GitRepositoryConnectivityResponse
13
+ from infrahub.workflows.catalogue import GIT_REPOSITORY_ADD, GIT_REPOSITORY_ADD_READ_ONLY
14
+
15
+ if TYPE_CHECKING:
16
+ from infrahub.auth import AccountSession
17
+ from infrahub.context import InfrahubContext
18
+ from infrahub.core.branch import Branch
19
+ from infrahub.database import InfrahubDatabase
20
+ from infrahub.services import InfrahubServices
21
+
22
+ log = get_logger()
23
+
24
+
25
+ class RepositoryFinalizer:
26
+ def __init__(
27
+ self,
28
+ account_session: AccountSession,
29
+ services: InfrahubServices,
30
+ context: InfrahubContext,
31
+ ) -> None:
32
+ self.account_session = account_session
33
+ self.services = services
34
+ self.context = context
35
+
36
+ async def post_create(
37
+ self,
38
+ obj: CoreGenericRepository,
39
+ branch: Branch,
40
+ db: InfrahubDatabase,
41
+ delete_on_connectivity_failure: bool = True,
42
+ ) -> None:
43
+ """
44
+ Method meant to be called after a repository has been created in the database.
45
+ It mainly checks the connectivity to the remote repository and submit the workflow to create the repository in the local filesystem.
46
+ """
47
+
48
+ # If the connectivity is not good, we remove the repository to allow the user to add a new one
49
+ if delete_on_connectivity_failure:
50
+ message = messages.GitRepositoryConnectivity(
51
+ repository_name=obj.name.value,
52
+ repository_location=obj.location.value,
53
+ )
54
+ response = await self.services.message_bus.rpc(
55
+ message=message, response_class=GitRepositoryConnectivityResponse
56
+ )
57
+
58
+ if response.data.success is False:
59
+ await obj.delete(db=db)
60
+ raise ValidationError(response.data.message)
61
+
62
+ # If we are in the default branch, we set the sync status to Active
63
+ # If we are in another branch, we set the sync status to Staging
64
+ if branch.is_default:
65
+ obj.internal_status.value = RepositoryInternalStatus.ACTIVE.value
66
+ else:
67
+ obj.internal_status.value = RepositoryInternalStatus.STAGING.value
68
+ await obj.save(db=db)
69
+
70
+ # Create the new repository in the filesystem.
71
+ log.info("create_repository", name=obj.name.value)
72
+ authenticated_user = None
73
+ if self.account_session and self.account_session.authenticated:
74
+ authenticated_user = self.account_session.account_id
75
+
76
+ if obj.get_kind() == READONLYREPOSITORY:
77
+ obj = cast(CoreReadOnlyRepository, obj)
78
+ model = GitRepositoryAddReadOnly(
79
+ repository_id=obj.id,
80
+ repository_name=obj.name.value,
81
+ location=obj.location.value,
82
+ ref=obj.ref.value,
83
+ infrahub_branch_name=branch.name,
84
+ infrahub_branch_id=str(branch.get_uuid()),
85
+ internal_status=obj.internal_status.value,
86
+ created_by=authenticated_user,
87
+ )
88
+ await self.services.workflow.submit_workflow(
89
+ workflow=GIT_REPOSITORY_ADD_READ_ONLY,
90
+ context=self.context,
91
+ parameters={"model": model},
92
+ )
93
+
94
+ elif obj.get_kind() == REPOSITORY:
95
+ obj = cast(CoreRepository, obj)
96
+ git_repo_add_model = GitRepositoryAdd(
97
+ repository_id=obj.id,
98
+ repository_name=obj.name.value,
99
+ location=obj.location.value,
100
+ default_branch_name=obj.default_branch.value,
101
+ infrahub_branch_name=branch.name,
102
+ infrahub_branch_id=str(branch.get_uuid()),
103
+ internal_status=obj.internal_status.value,
104
+ created_by=authenticated_user,
105
+ )
106
+
107
+ await self.services.workflow.submit_workflow(
108
+ workflow=GIT_REPOSITORY_ADD,
109
+ context=self.context,
110
+ parameters={"model": git_repo_add_model},
111
+ )
112
+ else:
113
+ raise ValueError(f"Unknown repository kind: {obj.get_kind()}")
infrahub/server.py CHANGED
@@ -24,6 +24,7 @@ from infrahub.api.exception_handlers import generic_api_exception_handler
24
24
  from infrahub.components import ComponentType
25
25
  from infrahub.constants.environment import INSTALLATION_TYPE
26
26
  from infrahub.core.initialization import initialization
27
+ from infrahub.database.graph import validate_graph_version
27
28
  from infrahub.dependencies.registry import build_component_registry
28
29
  from infrahub.exceptions import Error, ValidationError
29
30
  from infrahub.graphql.api.endpoints import router as graphql_router
@@ -83,10 +84,17 @@ async def app_initialization(application: FastAPI, enable_scheduler: bool = True
83
84
  initialize_lock(service=service)
84
85
  # We must initialize DB after initialize lock and initialize lock depends on cache initialization
85
86
  async with application.state.db.start_session() as db:
86
- await initialization(db=db, add_database_indexes=True)
87
+ is_initial_setup = await initialization(db=db, add_database_indexes=True)
88
+
89
+ async with database.start_session() as dbs:
90
+ await validate_graph_version(db=dbs)
91
+
92
+ # Initialize the workflow after the registry has been setup
93
+ await service.initialize_workflow(is_initial_setup=is_initial_setup)
87
94
 
88
95
  application.state.service = service
89
96
  application.state.response_delay = config.SETTINGS.miscellaneous.response_delay
97
+
90
98
  if enable_scheduler:
91
99
  await service.scheduler.start_schedule()
92
100
 
@@ -110,14 +110,17 @@ class InfrahubServices:
110
110
  # This circular dependency could be removed if InfrahubScheduler only depends on what it needs.
111
111
  scheduler.service = service
112
112
 
113
- if workflow is not None and isinstance(workflow, WorkflowWorkerExecution):
114
- assert service.component is not None
113
+ return service
114
+
115
+ async def initialize_workflow(self, is_initial_setup: bool = False) -> None:
116
+ if self.workflow is not None and isinstance(self.workflow, WorkflowWorkerExecution):
117
+ assert self.component is not None
115
118
  # Ideally `WorkflowWorkerExecution.initialize` would be directly part of WorkflowWorkerExecution
116
119
  # constructor but this requires some redesign as it depends on InfrahubComponent which is instantiated
117
120
  # after workflow instantiation.
118
- await workflow.initialize(component_is_primary_server=await service.component.is_primary_gunicorn_worker())
119
-
120
- return service
121
+ await self.component.refresh_heartbeat()
122
+ is_primary = await self.component.is_primary_gunicorn_worker()
123
+ await self.workflow.initialize(component_is_primary_server=is_primary, is_initial_setup=is_initial_setup)
121
124
 
122
125
  @property
123
126
  def component(self) -> InfrahubComponent:
@@ -8,7 +8,7 @@ from prefect.deployments import run_deployment
8
8
 
9
9
  from infrahub.services.adapters.http.httpx import HttpxAdapter
10
10
  from infrahub.workers.utils import inject_context_parameter
11
- from infrahub.workflows.initialization import setup_task_manager
11
+ from infrahub.workflows.initialization import setup_task_manager, setup_task_manager_identifiers
12
12
  from infrahub.workflows.models import WorkflowInfo
13
13
 
14
14
  from . import InfrahubWorkflow, Return
@@ -27,10 +27,13 @@ class WorkflowWorkerExecution(InfrahubWorkflow):
27
27
  _http_adapter = HttpxAdapter()
28
28
 
29
29
  @staticmethod
30
- async def initialize(component_is_primary_server: bool) -> None:
30
+ async def initialize(component_is_primary_server: bool, is_initial_setup: bool = False) -> None:
31
31
  if component_is_primary_server:
32
32
  await setup_task_manager()
33
33
 
34
+ if is_initial_setup:
35
+ await setup_task_manager_identifiers()
36
+
34
37
  @overload
35
38
  async def execute_workflow(
36
39
  self,
@@ -160,6 +160,9 @@ class PrefectEventData(PrefectEventModel):
160
160
  def _return_branch_rebased(self) -> dict[str, Any]:
161
161
  return {"rebased_branch": self._get_branch_name_from_resource()}
162
162
 
163
+ def _return_branch_migrated(self) -> dict[str, Any]:
164
+ return {"migrated_branch": self._get_branch_name_from_resource()}
165
+
163
166
  def _return_group_event(self) -> dict[str, Any]:
164
167
  members = []
165
168
  ancestors = []
@@ -228,6 +231,8 @@ class PrefectEventData(PrefectEventModel):
228
231
  event_specifics = self._return_branch_deleted()
229
232
  case "infrahub.branch.merged":
230
233
  event_specifics = self._return_branch_merged()
234
+ case "infrahub.branch.migrated":
235
+ event_specifics = self._return_branch_migrated()
231
236
  case "infrahub.branch.rebased":
232
237
  event_specifics = self._return_branch_rebased()
233
238
  case "infrahub.group.member_added" | "infrahub.group.member_removed":
@@ -141,6 +141,13 @@ class InfrahubEventFilter(EventFilter):
141
141
  if branches:
142
142
  self.resource = EventResourceFilter(labels=ResourceSpecification({"infrahub.branch.name": branches}))
143
143
 
144
+ if branch_migrated := event_type_filter.get("branch_migrated"):
145
+ branches = branch_migrated.get("branches") or []
146
+ if "infrahub.branch.created" not in event_type:
147
+ event_type.append("infrahub.branch.migrated")
148
+ if branches:
149
+ self.resource = EventResourceFilter(labels=ResourceSpecification({"infrahub.branch.name": branches}))
150
+
144
151
  if branch_rebased := event_type_filter.get("branch_rebased"):
145
152
  branches = branch_rebased.get("branches") or []
146
153
  if "infrahub.branch.created" not in event_type:
@@ -5,6 +5,7 @@ from typing import TYPE_CHECKING
5
5
  from infrahub import lock
6
6
  from infrahub.core import registry
7
7
  from infrahub.core.constants import GLOBAL_BRANCH_NAME
8
+ from infrahub.graphql.registry import registry as graphql_registry
8
9
  from infrahub.log import get_logger
9
10
  from infrahub.worker import WORKER_IDENTITY
10
11
 
@@ -20,9 +21,8 @@ def update_graphql_schema(branch: Branch, schema_branch: SchemaBranch) -> None:
20
21
  """
21
22
  Update the GraphQL schema for the given branch.
22
23
  """
23
- from infrahub.graphql.manager import GraphQLSchemaManager
24
24
 
25
- gqlm = GraphQLSchemaManager.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
25
+ gqlm = graphql_registry.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
26
26
  gqlm.get_graphql_schema(
27
27
  include_query=True,
28
28
  include_mutation=True,
@@ -67,6 +67,9 @@ async def update_branch_registry(db: InfrahubDatabase, branch: Branch) -> None:
67
67
  worker=WORKER_IDENTITY,
68
68
  )
69
69
  registry.branch[branch.name] = branch
70
+ elif existing_branch.status != branch.status:
71
+ log.info(f"Updating registry branch cache for {branch.name=}")
72
+ registry.branch[branch.name] = branch
70
73
  return
71
74
 
72
75
  log.info(
@@ -89,7 +92,6 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
89
92
  If a branch is already present with a different value for the hash
90
93
  We pull the new schema from the database and we update the registry.
91
94
  """
92
- from infrahub.graphql.manager import GraphQLSchemaManager
93
95
 
94
96
  async with lock.registry.local_schema_lock():
95
97
  active_branches = await registry.branch_object.get_list(db=db)
@@ -106,7 +108,7 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
106
108
 
107
109
  purged_branches = await registry.purge_inactive_branches(db=db, active_branches=active_branches)
108
110
  purged_branches.update(
109
- GraphQLSchemaManager.purge_inactive(active_branches=[branch.name for branch in active_branches])
111
+ graphql_registry.purge_inactive(active_branches=[branch.name for branch in active_branches])
110
112
  )
111
113
  for branch_name in sorted(purged_branches):
112
114
  log.info(f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY)
@@ -4,6 +4,8 @@ from infrahub.computed_attribute.triggers import (
4
4
  TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
5
5
  TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
6
6
  )
7
+ from infrahub.display_labels.triggers import TRIGGER_DISPLAY_LABELS_ALL_SCHEMA
8
+ from infrahub.hfid.triggers import TRIGGER_HFID_ALL_SCHEMA
7
9
  from infrahub.schema.triggers import TRIGGER_SCHEMA_UPDATED
8
10
  from infrahub.trigger.models import TriggerDefinition
9
11
  from infrahub.webhook.triggers import TRIGGER_WEBHOOK_DELETE, TRIGGER_WEBHOOK_SETUP_UPDATE
@@ -13,6 +15,8 @@ builtin_triggers: list[TriggerDefinition] = [
13
15
  TRIGGER_BRANCH_MERGED,
14
16
  TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
15
17
  TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
18
+ TRIGGER_DISPLAY_LABELS_ALL_SCHEMA,
19
+ TRIGGER_HFID_ALL_SCHEMA,
16
20
  TRIGGER_SCHEMA_UPDATED,
17
21
  TRIGGER_WEBHOOK_DELETE,
18
22
  TRIGGER_WEBHOOK_SETUP_UPDATE,
@@ -37,6 +37,8 @@ class TriggerType(str, Enum):
37
37
  COMPUTED_ATTR_JINJA2 = "computed_attr_jinja2"
38
38
  COMPUTED_ATTR_PYTHON = "computed_attr_python"
39
39
  COMPUTED_ATTR_PYTHON_QUERY = "computed_attr_python_query"
40
+ DISPLAY_LABEL_JINJA2 = "display_label_jinja2"
41
+ HUMAN_FRIENDLY_ID = "human_friendly_id"
40
42
  # OBJECT = "object"
41
43
 
42
44
 
infrahub/trigger/setup.py CHANGED
@@ -6,6 +6,7 @@ from prefect.cache_policies import NONE
6
6
  from prefect.client.orchestration import PrefectClient, get_client
7
7
  from prefect.client.schemas.filters import DeploymentFilter, DeploymentFilterName
8
8
  from prefect.events.schemas.automations import Automation
9
+ from prefect.exceptions import PrefectHTTPStatusError
9
10
 
10
11
  from infrahub import lock
11
12
  from infrahub.database import InfrahubDatabase
@@ -51,7 +52,7 @@ async def setup_triggers_specific(
51
52
  ) # type: ignore[misc]
52
53
 
53
54
 
54
- @task(name="trigger-setup", task_run_name="Setup triggers", cache_policy=NONE) # type: ignore[arg-type]
55
+ @task(name="trigger-setup", task_run_name="Setup triggers", cache_policy=NONE)
55
56
  async def setup_triggers(
56
57
  client: PrefectClient,
57
58
  triggers: list[TriggerDefinition],
@@ -83,7 +84,9 @@ async def setup_triggers(
83
84
  existing_automations: dict[str, Automation] = {}
84
85
  if trigger_type:
85
86
  existing_automations = {
86
- item.name: item for item in await client.read_automations() if item.name.startswith(trigger_type.value)
87
+ item.name: item
88
+ for item in await client.read_automations()
89
+ if item.name.startswith(f"{trigger_type.value}::")
87
90
  }
88
91
  else:
89
92
  existing_automations = {item.name: item for item in await client.read_automations()}
@@ -133,8 +136,14 @@ async def setup_triggers(
133
136
  continue
134
137
 
135
138
  report.deleted.append(existing_automation)
136
- await client.delete_automation(automation_id=existing_automation.id)
137
- log.info(f"{item_to_delete} Deleted")
139
+ try:
140
+ await client.delete_automation(automation_id=existing_automation.id)
141
+ log.info(f"{item_to_delete} Deleted")
142
+ except PrefectHTTPStatusError as exc:
143
+ if exc.response.status_code == 404:
144
+ log.info(f"{item_to_delete} was already deleted")
145
+ else:
146
+ raise
138
147
 
139
148
  if trigger_type:
140
149
  log.info(
infrahub/trigger/tasks.py CHANGED
@@ -6,6 +6,8 @@ from infrahub.computed_attribute.gather import (
6
6
  gather_trigger_computed_attribute_jinja2,
7
7
  gather_trigger_computed_attribute_python,
8
8
  )
9
+ from infrahub.display_labels.gather import gather_trigger_display_labels_jinja2
10
+ from infrahub.hfid.gather import gather_trigger_hfid
9
11
  from infrahub.trigger.catalogue import builtin_triggers
10
12
  from infrahub.webhook.gather import gather_trigger_webhook
11
13
  from infrahub.workers.dependencies import get_database
@@ -18,6 +20,8 @@ async def trigger_configure_all() -> None:
18
20
  database = await get_database()
19
21
  async with database.start_session() as db:
20
22
  webhook_trigger = await gather_trigger_webhook(db=db)
23
+ display_label_triggers = await gather_trigger_display_labels_jinja2()
24
+ human_friendly_id_triggers = await gather_trigger_hfid()
21
25
  computed_attribute_j2_triggers = await gather_trigger_computed_attribute_jinja2()
22
26
  (
23
27
  computed_attribute_python_triggers,
@@ -28,6 +32,8 @@ async def trigger_configure_all() -> None:
28
32
  computed_attribute_j2_triggers
29
33
  + computed_attribute_python_triggers
30
34
  + computed_attribute_python_query_triggers
35
+ + display_label_triggers
36
+ + human_friendly_id_triggers
31
37
  + builtin_triggers
32
38
  + webhook_trigger
33
39
  + action_rules
@@ -231,7 +231,7 @@ class TransformWebhook(Webhook):
231
231
  commit=commit,
232
232
  location=f"{self.transform_file}::{self.transform_class}",
233
233
  convert_query_response=self.convert_query_response,
234
- data={"data": data, **context.model_dump()},
234
+ data={"data": {"data": data, **context.model_dump()}},
235
235
  client=client,
236
236
  ) # type: ignore[misc]
237
237
 
@@ -35,7 +35,9 @@ def get_component_type() -> ComponentType:
35
35
 
36
36
 
37
37
  def build_client() -> InfrahubClient:
38
- client = InfrahubClient(config=Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True))
38
+ client_config = Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True)
39
+ client_config.set_ssl_context(context=get_http().verify_tls())
40
+ client = InfrahubClient(config=client_config)
39
41
  # Populate client schema cache using our internal schema cache
40
42
  if registry.schema:
41
43
  for branch in registry.schema.get_branches():
@@ -19,6 +19,7 @@ from infrahub import config
19
19
  from infrahub.components import ComponentType
20
20
  from infrahub.core import registry
21
21
  from infrahub.core.initialization import initialization
22
+ from infrahub.database.graph import validate_graph_version
22
23
  from infrahub.dependencies.registry import build_component_registry
23
24
  from infrahub.git import initialize_repositories_directory
24
25
  from infrahub.lock import initialize_lock
@@ -131,6 +132,9 @@ class InfrahubWorkerAsync(BaseWorker):
131
132
 
132
133
  await self.service.component.refresh_schema_hash()
133
134
 
135
+ async with self.service.database.start_session() as dbs:
136
+ await validate_graph_version(db=dbs)
137
+
134
138
  initialize_repositories_directory()
135
139
  build_component_registry()
136
140
  await self.service.scheduler.start_schedule()
@@ -140,7 +144,7 @@ class InfrahubWorkerAsync(BaseWorker):
140
144
  self,
141
145
  flow_run: FlowRun,
142
146
  configuration: BaseJobConfiguration,
143
- task_status: TaskStatus | None = None,
147
+ task_status: TaskStatus[int] | None = None,
144
148
  ) -> BaseWorkerResult:
145
149
  flow_run_logger = self.get_flow_run_logger(flow_run)
146
150