infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (183) hide show
  1. infrahub/actions/tasks.py +8 -0
  2. infrahub/api/diff/diff.py +1 -1
  3. infrahub/api/internal.py +2 -0
  4. infrahub/api/oauth2.py +13 -19
  5. infrahub/api/oidc.py +15 -21
  6. infrahub/api/schema.py +24 -3
  7. infrahub/artifacts/models.py +2 -1
  8. infrahub/auth.py +137 -3
  9. infrahub/cli/__init__.py +2 -0
  10. infrahub/cli/db.py +103 -98
  11. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  12. infrahub/cli/dev.py +118 -0
  13. infrahub/cli/tasks.py +46 -0
  14. infrahub/cli/upgrade.py +30 -3
  15. infrahub/computed_attribute/tasks.py +20 -8
  16. infrahub/core/attribute.py +13 -5
  17. infrahub/core/branch/enums.py +1 -1
  18. infrahub/core/branch/models.py +7 -3
  19. infrahub/core/branch/tasks.py +70 -8
  20. infrahub/core/changelog/models.py +4 -12
  21. infrahub/core/constants/__init__.py +3 -0
  22. infrahub/core/constants/infrahubkind.py +1 -0
  23. infrahub/core/diff/model/path.py +4 -0
  24. infrahub/core/diff/payload_builder.py +1 -1
  25. infrahub/core/diff/query/artifact.py +1 -0
  26. infrahub/core/diff/query/field_summary.py +1 -0
  27. infrahub/core/graph/__init__.py +1 -1
  28. infrahub/core/initialization.py +5 -2
  29. infrahub/core/ipam/utilization.py +1 -1
  30. infrahub/core/manager.py +6 -3
  31. infrahub/core/migrations/__init__.py +3 -0
  32. infrahub/core/migrations/exceptions.py +4 -0
  33. infrahub/core/migrations/graph/__init__.py +12 -11
  34. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  35. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  36. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  37. infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
  38. infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
  39. infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
  40. infrahub/core/migrations/query/__init__.py +7 -8
  41. infrahub/core/migrations/query/attribute_add.py +8 -6
  42. infrahub/core/migrations/query/attribute_remove.py +134 -0
  43. infrahub/core/migrations/runner.py +54 -0
  44. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  45. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  46. infrahub/core/migrations/schema/node_attribute_add.py +35 -4
  47. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  48. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  49. infrahub/core/migrations/schema/node_remove.py +2 -1
  50. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  51. infrahub/core/migrations/shared.py +52 -19
  52. infrahub/core/node/__init__.py +158 -51
  53. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  54. infrahub/core/node/create.py +46 -63
  55. infrahub/core/node/lock_utils.py +70 -44
  56. infrahub/core/node/node_property_attribute.py +230 -0
  57. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  58. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  59. infrahub/core/node/resource_manager/number_pool.py +2 -1
  60. infrahub/core/node/standard.py +1 -1
  61. infrahub/core/protocols.py +7 -1
  62. infrahub/core/query/attribute.py +55 -0
  63. infrahub/core/query/ipam.py +1 -0
  64. infrahub/core/query/node.py +23 -4
  65. infrahub/core/query/relationship.py +1 -0
  66. infrahub/core/registry.py +2 -2
  67. infrahub/core/relationship/constraints/count.py +1 -1
  68. infrahub/core/relationship/model.py +1 -1
  69. infrahub/core/schema/__init__.py +56 -0
  70. infrahub/core/schema/attribute_schema.py +4 -0
  71. infrahub/core/schema/basenode_schema.py +42 -2
  72. infrahub/core/schema/definitions/core/__init__.py +2 -0
  73. infrahub/core/schema/definitions/core/generator.py +2 -0
  74. infrahub/core/schema/definitions/core/group.py +16 -2
  75. infrahub/core/schema/definitions/internal.py +16 -3
  76. infrahub/core/schema/generated/attribute_schema.py +2 -2
  77. infrahub/core/schema/generated/base_node_schema.py +6 -1
  78. infrahub/core/schema/manager.py +22 -1
  79. infrahub/core/schema/node_schema.py +5 -2
  80. infrahub/core/schema/schema_branch.py +300 -8
  81. infrahub/core/schema/schema_branch_display.py +123 -0
  82. infrahub/core/schema/schema_branch_hfid.py +114 -0
  83. infrahub/core/validators/aggregated_checker.py +1 -1
  84. infrahub/core/validators/determiner.py +12 -1
  85. infrahub/core/validators/relationship/peer.py +1 -1
  86. infrahub/core/validators/tasks.py +1 -1
  87. infrahub/database/graph.py +21 -0
  88. infrahub/display_labels/__init__.py +0 -0
  89. infrahub/display_labels/gather.py +48 -0
  90. infrahub/display_labels/models.py +240 -0
  91. infrahub/display_labels/tasks.py +192 -0
  92. infrahub/display_labels/triggers.py +22 -0
  93. infrahub/events/branch_action.py +27 -1
  94. infrahub/events/group_action.py +1 -1
  95. infrahub/events/node_action.py +1 -1
  96. infrahub/generators/constants.py +7 -0
  97. infrahub/generators/models.py +7 -0
  98. infrahub/generators/tasks.py +34 -22
  99. infrahub/git/base.py +4 -1
  100. infrahub/git/integrator.py +23 -15
  101. infrahub/git/models.py +2 -1
  102. infrahub/git/repository.py +22 -5
  103. infrahub/git/tasks.py +66 -10
  104. infrahub/git/utils.py +123 -1
  105. infrahub/graphql/analyzer.py +1 -1
  106. infrahub/graphql/api/endpoints.py +14 -4
  107. infrahub/graphql/manager.py +4 -9
  108. infrahub/graphql/mutations/convert_object_type.py +11 -1
  109. infrahub/graphql/mutations/display_label.py +118 -0
  110. infrahub/graphql/mutations/generator.py +25 -7
  111. infrahub/graphql/mutations/hfid.py +125 -0
  112. infrahub/graphql/mutations/ipam.py +54 -35
  113. infrahub/graphql/mutations/main.py +27 -28
  114. infrahub/graphql/mutations/relationship.py +2 -2
  115. infrahub/graphql/mutations/resource_manager.py +2 -2
  116. infrahub/graphql/mutations/schema.py +5 -5
  117. infrahub/graphql/queries/resource_manager.py +1 -1
  118. infrahub/graphql/resolvers/resolver.py +2 -0
  119. infrahub/graphql/schema.py +4 -0
  120. infrahub/graphql/schema_sort.py +170 -0
  121. infrahub/graphql/types/branch.py +4 -1
  122. infrahub/graphql/types/enums.py +3 -0
  123. infrahub/groups/tasks.py +1 -1
  124. infrahub/hfid/__init__.py +0 -0
  125. infrahub/hfid/gather.py +48 -0
  126. infrahub/hfid/models.py +240 -0
  127. infrahub/hfid/tasks.py +191 -0
  128. infrahub/hfid/triggers.py +22 -0
  129. infrahub/lock.py +67 -16
  130. infrahub/message_bus/types.py +2 -1
  131. infrahub/middleware.py +26 -1
  132. infrahub/permissions/constants.py +2 -0
  133. infrahub/proposed_change/tasks.py +35 -17
  134. infrahub/server.py +21 -4
  135. infrahub/services/__init__.py +8 -5
  136. infrahub/services/adapters/http/__init__.py +5 -0
  137. infrahub/services/adapters/workflow/worker.py +14 -3
  138. infrahub/task_manager/event.py +5 -0
  139. infrahub/task_manager/models.py +7 -0
  140. infrahub/task_manager/task.py +73 -0
  141. infrahub/trigger/catalogue.py +4 -0
  142. infrahub/trigger/models.py +2 -0
  143. infrahub/trigger/setup.py +13 -4
  144. infrahub/trigger/tasks.py +6 -0
  145. infrahub/workers/dependencies.py +10 -1
  146. infrahub/workers/infrahub_async.py +10 -2
  147. infrahub/workflows/catalogue.py +80 -0
  148. infrahub/workflows/initialization.py +21 -0
  149. infrahub/workflows/utils.py +2 -1
  150. infrahub_sdk/checks.py +1 -1
  151. infrahub_sdk/client.py +13 -10
  152. infrahub_sdk/config.py +29 -2
  153. infrahub_sdk/ctl/cli_commands.py +2 -0
  154. infrahub_sdk/ctl/generator.py +4 -0
  155. infrahub_sdk/ctl/graphql.py +184 -0
  156. infrahub_sdk/ctl/schema.py +28 -9
  157. infrahub_sdk/generator.py +7 -1
  158. infrahub_sdk/graphql/__init__.py +12 -0
  159. infrahub_sdk/graphql/constants.py +1 -0
  160. infrahub_sdk/graphql/plugin.py +85 -0
  161. infrahub_sdk/graphql/query.py +77 -0
  162. infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
  163. infrahub_sdk/graphql/utils.py +40 -0
  164. infrahub_sdk/protocols.py +14 -0
  165. infrahub_sdk/schema/__init__.py +70 -4
  166. infrahub_sdk/schema/repository.py +8 -0
  167. infrahub_sdk/spec/models.py +7 -0
  168. infrahub_sdk/spec/object.py +53 -44
  169. infrahub_sdk/spec/processors/__init__.py +0 -0
  170. infrahub_sdk/spec/processors/data_processor.py +10 -0
  171. infrahub_sdk/spec/processors/factory.py +34 -0
  172. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  173. infrahub_sdk/spec/range_expansion.py +1 -1
  174. infrahub_sdk/transforms.py +1 -1
  175. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
  176. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
  177. infrahub_testcontainers/container.py +115 -3
  178. infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
  179. infrahub_testcontainers/docker-compose.test.yml +6 -1
  180. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  181. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
  182. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
  183. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub/hfid/tasks.py ADDED
@@ -0,0 +1,191 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import cast
4
+
5
+ from infrahub_sdk.exceptions import URLNotFoundError
6
+ from prefect import flow
7
+ from prefect.logging import get_run_logger
8
+
9
+ from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
10
+ from infrahub.core.registry import registry
11
+ from infrahub.events import BranchDeletedEvent
12
+ from infrahub.trigger.models import TriggerSetupReport, TriggerType
13
+ from infrahub.trigger.setup import setup_triggers_specific
14
+ from infrahub.workers.dependencies import get_client, get_component, get_database, get_workflow
15
+ from infrahub.workflows.catalogue import HFID_PROCESS, TRIGGER_UPDATE_HFID
16
+ from infrahub.workflows.utils import add_tags, wait_for_schema_to_converge
17
+
18
+ from .gather import gather_trigger_hfid
19
+ from .models import HFIDGraphQL, HFIDGraphQLResponse, HFIDTriggerDefinition
20
+
21
+ UPDATE_HFID = """
22
+ mutation UpdateHFID(
23
+ $id: String!,
24
+ $kind: String!,
25
+ $value: [String!]!
26
+ ) {
27
+ InfrahubUpdateHFID(
28
+ data: {id: $id, value: $value, kind: $kind}
29
+ ) {
30
+ ok
31
+ }
32
+ }
33
+ """
34
+
35
+
36
+ @flow(
37
+ name="hfid-update-value",
38
+ flow_run_name="Update value for hfid on {node_kind}",
39
+ )
40
+ async def hfid_update_value(
41
+ branch_name: str,
42
+ obj: HFIDGraphQLResponse,
43
+ node_kind: str,
44
+ hfid_definition: list[str],
45
+ ) -> None:
46
+ log = get_run_logger()
47
+ client = get_client()
48
+
49
+ await add_tags(branches=[branch_name], nodes=[obj.node_id], db_change=True)
50
+
51
+ rendered_hfid: list[str] = []
52
+ for hfid_component in hfid_definition:
53
+ if hfid_component in obj.variables:
54
+ rendered_hfid.append(obj.variables[hfid_component])
55
+ # value = await template.render(variables=obj.variables)
56
+ if rendered_hfid == obj.hfid_value:
57
+ log.debug(f"Ignoring to update {obj} with existing value on human_friendly_id={obj.hfid_value}")
58
+ return
59
+
60
+ try:
61
+ await client.execute_graphql(
62
+ query=UPDATE_HFID,
63
+ variables={"id": obj.node_id, "kind": node_kind, "value": rendered_hfid},
64
+ branch_name=branch_name,
65
+ )
66
+ log.info(f"Updating {node_kind}.human_friendly_id='{rendered_hfid}' ({obj.node_id})")
67
+ except URLNotFoundError:
68
+ log.warning(
69
+ f"Updating {node_kind}.human_friendly_id='{rendered_hfid}' ({obj.node_id}) failed for branch {branch_name} (branch not found)"
70
+ )
71
+
72
+
73
+ @flow(
74
+ name="hfid-process",
75
+ flow_run_name="Process human friendly ids for {target_kind}",
76
+ )
77
+ async def process_hfid(
78
+ branch_name: str,
79
+ node_kind: str,
80
+ object_id: str,
81
+ target_kind: str,
82
+ context: InfrahubContext, # noqa: ARG001
83
+ ) -> None:
84
+ log = get_run_logger()
85
+ client = get_client()
86
+
87
+ await add_tags(branches=[branch_name])
88
+
89
+ target_schema = branch_name if branch_name in registry.get_altered_schema_branches() else registry.default_branch
90
+ schema_branch = registry.schema.get_schema_branch(name=target_schema)
91
+ node_schema = schema_branch.get_node(name=target_kind, duplicate=False)
92
+
93
+ if node_kind == target_kind:
94
+ hfid_definition = schema_branch.hfids.get_node_definition(kind=node_kind)
95
+ else:
96
+ hfid_definition = schema_branch.hfids.get_related_definition(related_kind=node_kind, target_kind=target_kind)
97
+
98
+ # jinja_template = Jinja2Template(template=display_label_template.template)
99
+ # variables = jinja_template.get_variables()
100
+ hfid_graphql = HFIDGraphQL(
101
+ node_schema=node_schema, variables=hfid_definition.hfid, filter_key=hfid_definition.filter_key
102
+ )
103
+
104
+ query = hfid_graphql.render_graphql_query(filter_id=object_id)
105
+ response = await client.execute_graphql(query=query, branch_name=branch_name)
106
+ update_candidates = hfid_graphql.parse_response(response=response)
107
+
108
+ if not update_candidates:
109
+ log.debug("No nodes found that requires updates")
110
+ return
111
+
112
+ batch = await client.create_batch()
113
+ for node in update_candidates:
114
+ batch.add(
115
+ task=hfid_update_value,
116
+ branch_name=branch_name,
117
+ obj=node,
118
+ node_kind=node_schema.kind,
119
+ hfid_definition=hfid_definition.hfid,
120
+ )
121
+
122
+ _ = [response async for _, response in batch.execute()]
123
+
124
+
125
+ @flow(name="hfid-setup", flow_run_name="Setup human friendly ids in task-manager")
126
+ async def hfid_setup(context: InfrahubContext, branch_name: str | None = None, event_name: str | None = None) -> None:
127
+ database = await get_database()
128
+ async with database.start_session() as db:
129
+ log = get_run_logger()
130
+
131
+ if branch_name:
132
+ await add_tags(branches=[branch_name])
133
+ component = await get_component()
134
+ await wait_for_schema_to_converge(branch_name=branch_name, component=component, db=db, log=log)
135
+
136
+ report: TriggerSetupReport = await setup_triggers_specific(
137
+ gatherer=gather_trigger_hfid, trigger_type=TriggerType.HUMAN_FRIENDLY_ID
138
+ ) # type: ignore[misc]
139
+
140
+ # Configure all DisplayLabelTriggerDefinitions in Prefect
141
+ hfid_reports = [cast(HFIDTriggerDefinition, entry) for entry in report.updated + report.created]
142
+ direct_target_triggers = [hfid_report for hfid_report in hfid_reports if hfid_report.target_kind]
143
+
144
+ for display_report in direct_target_triggers:
145
+ if event_name != BranchDeletedEvent.event_name and display_report.branch == branch_name:
146
+ await get_workflow().submit_workflow(
147
+ workflow=TRIGGER_UPDATE_HFID,
148
+ context=context,
149
+ parameters={
150
+ "branch_name": display_report.branch,
151
+ "kind": display_report.target_kind,
152
+ },
153
+ )
154
+
155
+ log.info(f"{report.in_use_count} HFID automation configurations completed")
156
+
157
+
158
+ @flow(
159
+ name="trigger-update-hfid",
160
+ flow_run_name="Trigger updates for HFID for {kind}",
161
+ )
162
+ async def trigger_update_hfid(
163
+ branch_name: str,
164
+ kind: str,
165
+ context: InfrahubContext,
166
+ ) -> None:
167
+ await add_tags(branches=[branch_name])
168
+
169
+ client = get_client()
170
+
171
+ # NOTE we only need the id of the nodes, this query will still query for the HFID
172
+ node_schema = registry.schema.get_node_schema(name=kind, branch=branch_name)
173
+ nodes = await client.all(
174
+ kind=kind,
175
+ branch=branch_name,
176
+ exclude=node_schema.attribute_names + node_schema.relationship_names,
177
+ populate_store=False,
178
+ )
179
+
180
+ for node in nodes:
181
+ await get_workflow().submit_workflow(
182
+ workflow=HFID_PROCESS,
183
+ context=context,
184
+ parameters={
185
+ "branch_name": branch_name,
186
+ "node_kind": kind,
187
+ "target_kind": kind,
188
+ "object_id": node.id,
189
+ "context": context,
190
+ },
191
+ )
@@ -0,0 +1,22 @@
1
+ from infrahub.events.branch_action import BranchDeletedEvent
2
+ from infrahub.events.schema_action import SchemaUpdatedEvent
3
+ from infrahub.trigger.models import BuiltinTriggerDefinition, EventTrigger, ExecuteWorkflow
4
+ from infrahub.workflows.catalogue import HFID_SETUP
5
+
6
+ TRIGGER_HFID_ALL_SCHEMA = BuiltinTriggerDefinition(
7
+ name="hfid-setup-all",
8
+ trigger=EventTrigger(events={SchemaUpdatedEvent.event_name, BranchDeletedEvent.event_name}),
9
+ actions=[
10
+ ExecuteWorkflow(
11
+ workflow=HFID_SETUP,
12
+ parameters={
13
+ "branch_name": "{{ event.resource['infrahub.branch.name'] }}",
14
+ "event_name": "{{ event.event }}",
15
+ "context": {
16
+ "__prefect_kind": "json",
17
+ "value": {"__prefect_kind": "jinja", "template": "{{ event.payload['context'] | tojson }}"},
18
+ },
19
+ },
20
+ ),
21
+ ],
22
+ )
infrahub/lock.py CHANGED
@@ -5,6 +5,7 @@ import time
5
5
  import uuid
6
6
  from asyncio import Lock as LocalLock
7
7
  from asyncio import sleep
8
+ from contextvars import ContextVar
8
9
  from typing import TYPE_CHECKING
9
10
 
10
11
  import redis.asyncio as redis
@@ -50,9 +51,12 @@ GLOBAL_GRAPH_LOCK = "global.graph"
50
51
  class InfrahubMultiLock:
51
52
  """Context manager to allow multiple locks to be reserved together"""
52
53
 
53
- def __init__(self, lock_registry: InfrahubLockRegistry, locks: list[str] | None = None) -> None:
54
+ def __init__(
55
+ self, lock_registry: InfrahubLockRegistry, locks: list[str] | None = None, metrics: bool = True
56
+ ) -> None:
54
57
  self.registry = lock_registry
55
58
  self.locks = locks or []
59
+ self.metrics = metrics
56
60
 
57
61
  async def __aenter__(self):
58
62
  await self.acquire()
@@ -67,11 +71,11 @@ class InfrahubMultiLock:
67
71
 
68
72
  async def acquire(self) -> None:
69
73
  for lock in self.locks:
70
- await self.registry.get(name=lock).acquire()
74
+ await self.registry.get(name=lock, metrics=self.metrics).acquire()
71
75
 
72
76
  async def release(self) -> None:
73
77
  for lock in reversed(self.locks):
74
- await self.registry.get(name=lock).release()
78
+ await self.registry.get(name=lock, metrics=self.metrics).release()
75
79
 
76
80
 
77
81
  class NATSLock:
@@ -98,10 +102,10 @@ class NATSLock:
98
102
  while True:
99
103
  if await self.do_acquire(token):
100
104
  self.token = token
101
- return True
105
+ return
102
106
  await sleep(0.1) # default Redis GlobalLock value
103
107
 
104
- async def do_acquire(self, token: str) -> bool:
108
+ async def do_acquire(self, token: str) -> bool | None:
105
109
  return await self.service.cache.set(key=self.name, value=token, not_exists=True)
106
110
 
107
111
  async def release(self) -> None:
@@ -123,16 +127,19 @@ class InfrahubLock:
123
127
  connection: redis.Redis | InfrahubServices | None = None,
124
128
  local: bool | None = None,
125
129
  in_multi: bool = False,
130
+ metrics: bool = True,
126
131
  ) -> None:
127
- self.use_local: bool = local
132
+ self.use_local: bool | None = local
128
133
  self.local: LocalLock = None
129
134
  self.remote: GlobalLock = None
130
135
  self.name: str = name
131
136
  self.connection: redis.Redis | None = connection
132
137
  self.in_multi: bool = in_multi
133
138
  self.lock_type: str = "multi" if self.in_multi else "individual"
134
- self.acquire_time: int | None = None
139
+ self._acquire_time: int | None = None
135
140
  self.event = asyncio.Event()
141
+ self._recursion_var: ContextVar[int | None] = ContextVar(f"infrahub_lock_recursion_{self.name}", default=None)
142
+ self.metrics = metrics
136
143
 
137
144
  if not self.connection or (self.use_local is None and name.startswith("local.")):
138
145
  self.use_local = True
@@ -144,6 +151,17 @@ class InfrahubLock:
144
151
  else:
145
152
  self.remote = NATSLock(service=self.connection, name=f"{LOCK_PREFIX}.{self.name}")
146
153
 
154
+ @property
155
+ def acquire_time(self) -> int:
156
+ if self._acquire_time is not None:
157
+ return self._acquire_time
158
+
159
+ raise ValueError("The lock has not been initialized")
160
+
161
+ @acquire_time.setter
162
+ def acquire_time(self, value: int) -> None:
163
+ self._acquire_time = value
164
+
147
165
  async def __aenter__(self):
148
166
  await self.acquire()
149
167
 
@@ -156,21 +174,47 @@ class InfrahubLock:
156
174
  await self.release()
157
175
 
158
176
  async def acquire(self) -> None:
159
- with LOCK_ACQUIRE_TIME_METRICS.labels(self.name, self.lock_type).time():
160
- if not self.use_local:
161
- await self.remote.acquire(token=f"{current_timestamp()}::{WORKER_IDENTITY}")
162
- else:
163
- await self.local.acquire()
177
+ depth = self._recursion_var.get()
178
+ if depth is not None:
179
+ self._recursion_var.set(depth + 1)
180
+ return
181
+
182
+ if self.metrics:
183
+ with LOCK_ACQUIRE_TIME_METRICS.labels(self.name, self.lock_type).time():
184
+ if not self.use_local:
185
+ await self.remote.acquire(token=f"{current_timestamp()}::{WORKER_IDENTITY}")
186
+ else:
187
+ await self.local.acquire()
188
+ elif not self.use_local:
189
+ await self.remote.acquire(token=f"{current_timestamp()}::{WORKER_IDENTITY}")
190
+ else:
191
+ await self.local.acquire()
192
+
164
193
  self.acquire_time = time.time_ns()
165
194
  self.event.clear()
195
+ self._recursion_var.set(1)
166
196
 
167
197
  async def release(self) -> None:
168
- duration_ns = time.time_ns() - self.acquire_time
169
- LOCK_RESERVE_TIME_METRICS.labels(self.name, self.lock_type).observe(duration_ns / 1000000000)
198
+ depth = self._recursion_var.get()
199
+ if depth is None:
200
+ raise RuntimeError("Lock release attempted without ownership context.")
201
+
202
+ if depth > 1:
203
+ self._recursion_var.set(depth - 1)
204
+ return
205
+
206
+ if self.acquire_time is not None:
207
+ duration_ns = time.time_ns() - self.acquire_time
208
+ if self.metrics:
209
+ LOCK_RESERVE_TIME_METRICS.labels(self.name, self.lock_type).observe(duration_ns / 1000000000)
210
+ self.acquire_time = None
211
+
170
212
  if not self.use_local:
171
213
  await self.remote.release()
172
214
  else:
173
215
  self.local.release()
216
+
217
+ self._recursion_var.set(None)
174
218
  self.event.set()
175
219
 
176
220
  async def locked(self) -> bool:
@@ -261,11 +305,18 @@ class InfrahubLockRegistry:
261
305
  return self.locks[lock_name]
262
306
 
263
307
  def get(
264
- self, name: str, namespace: str | None = None, local: bool | None = None, in_multi: bool = False
308
+ self,
309
+ name: str,
310
+ namespace: str | None = None,
311
+ local: bool | None = None,
312
+ in_multi: bool = False,
313
+ metrics: bool = True,
265
314
  ) -> InfrahubLock:
266
315
  lock_name = self.name_generator.generate_name(name=name, namespace=namespace, local=local)
267
316
  if lock_name not in self.locks:
268
- self.locks[lock_name] = InfrahubLock(name=lock_name, connection=self.connection, in_multi=in_multi)
317
+ self.locks[lock_name] = InfrahubLock(
318
+ name=lock_name, connection=self.connection, in_multi=in_multi, metrics=metrics
319
+ )
269
320
  return self.locks[lock_name]
270
321
 
271
322
  def local_schema_lock(self) -> LocalLock:
@@ -89,7 +89,8 @@ class ProposedChangeArtifactDefinition(BaseModel):
89
89
  definition_id: str
90
90
  definition_name: str
91
91
  artifact_name: str
92
- query_name: str
92
+ query_name: str # Deprecated
93
+ query_id: str
93
94
  query_models: list[str]
94
95
  repository_id: str
95
96
  transform_kind: str
infrahub/middleware.py CHANGED
@@ -1,7 +1,8 @@
1
1
  from typing import Any
2
2
 
3
+ from fastapi.middleware.gzip import GZipMiddleware
3
4
  from starlette.middleware.cors import CORSMiddleware
4
- from starlette.types import ASGIApp
5
+ from starlette.types import ASGIApp, Receive, Scope, Send
5
6
 
6
7
  from infrahub import config
7
8
 
@@ -15,3 +16,27 @@ class InfrahubCORSMiddleware(CORSMiddleware):
15
16
  kwargs["allow_headers"] = config.SETTINGS.api.cors_allow_headers
16
17
 
17
18
  super().__init__(app, *args, **kwargs)
19
+
20
+
21
+ class ConditionalGZipMiddleware(GZipMiddleware):
22
+ def __init__(
23
+ self,
24
+ app: ASGIApp,
25
+ *,
26
+ minimum_size: int = 500,
27
+ compresslevel: int = 9,
28
+ include_paths: tuple[str, ...] = (),
29
+ ) -> None:
30
+ super().__init__(app, minimum_size=minimum_size, compresslevel=compresslevel)
31
+ self.include_paths = include_paths
32
+
33
+ async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: # type: ignore[override]
34
+ if scope["type"] != "http":
35
+ await self.app(scope, receive, send)
36
+ return
37
+
38
+ path = scope.get("path", "")
39
+ if any(path.startswith(include) for include in self.include_paths):
40
+ await super().__call__(scope, receive, send)
41
+ else:
42
+ await self.app(scope, receive, send)
@@ -30,6 +30,7 @@ GLOBAL_PERMISSION_DENIAL_MESSAGE = {
30
30
  GlobalPermissions.MANAGE_ACCOUNTS.value: "You are not allowed to manage user accounts, groups or roles",
31
31
  GlobalPermissions.MANAGE_PERMISSIONS.value: "You are not allowed to manage permissions",
32
32
  GlobalPermissions.MANAGE_REPOSITORIES.value: "You are not allowed to manage repositories",
33
+ GlobalPermissions.UPDATE_OBJECT_HFID_DISPLAY_LABEL.value: "You are not allowed to update human friendly IDs and display labels ad hoc",
33
34
  }
34
35
 
35
36
  GLOBAL_PERMISSION_DESCRIPTION = {
@@ -42,4 +43,5 @@ GLOBAL_PERMISSION_DESCRIPTION = {
42
43
  GlobalPermissions.MANAGE_PERMISSIONS: "Allow a user to manage permissions",
43
44
  GlobalPermissions.MANAGE_REPOSITORIES: "Allow a user to manage repositories",
44
45
  GlobalPermissions.SUPER_ADMIN: "Allow a user to do anything",
46
+ GlobalPermissions.UPDATE_OBJECT_HFID_DISPLAY_LABEL: "Allow a user to update objects' display labels and human friendly IDs ad hoc",
45
47
  }
@@ -8,7 +8,7 @@ from pathlib import Path
8
8
  from typing import TYPE_CHECKING
9
9
 
10
10
  import pytest
11
- from infrahub_sdk.exceptions import ModuleImportError
11
+ from infrahub_sdk.exceptions import ModuleImportError, NodeNotFoundError, URLNotFoundError
12
12
  from infrahub_sdk.node import InfrahubNode
13
13
  from infrahub_sdk.protocols import (
14
14
  CoreArtifactValidator,
@@ -44,7 +44,7 @@ from infrahub.core.diff.model.diff import DiffElementType, SchemaConflict
44
44
  from infrahub.core.diff.model.path import NodeDiffFieldSummary
45
45
  from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder
46
46
  from infrahub.core.manager import NodeManager
47
- from infrahub.core.protocols import CoreDataCheck, CoreValidator
47
+ from infrahub.core.protocols import CoreArtifactDefinition, CoreDataCheck, CoreValidator
48
48
  from infrahub.core.protocols import CoreProposedChange as InternalCoreProposedChange
49
49
  from infrahub.core.timestamp import Timestamp
50
50
  from infrahub.core.validators.checks_runner import run_checks_and_update_validator
@@ -58,6 +58,7 @@ from infrahub.generators.models import ProposedChangeGeneratorDefinition
58
58
  from infrahub.git.base import extract_repo_file_information
59
59
  from infrahub.git.models import TriggerRepositoryInternalChecks, TriggerRepositoryUserChecks
60
60
  from infrahub.git.repository import InfrahubRepository, get_initialized_repo
61
+ from infrahub.git.utils import fetch_artifact_definition_targets, fetch_proposed_change_generator_definition_targets
61
62
  from infrahub.log import get_logger
62
63
  from infrahub.message_bus.types import (
63
64
  ProposedChangeArtifactDefinition,
@@ -307,6 +308,7 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
307
308
  populate_store=True,
308
309
  branch=model.source_branch,
309
310
  )
311
+
310
312
  generator_definitions = [
311
313
  ProposedChangeGeneratorDefinition(
312
314
  definition_id=generator.id,
@@ -319,8 +321,11 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
319
321
  parameters=generator.parameters.value,
320
322
  group_id=generator.targets.peer.id,
321
323
  convert_query_response=generator.convert_query_response.value,
324
+ execute_in_proposed_change=generator.execute_in_proposed_change.value,
325
+ execute_after_merge=generator.execute_after_merge.value,
322
326
  )
323
327
  for generator in generators
328
+ if generator.execute_in_proposed_change.value
324
329
  ]
325
330
 
326
331
  diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id)
@@ -524,7 +529,11 @@ async def run_proposed_change_user_tests(model: RequestProposedChangeUserTests)
524
529
  log = get_run_logger()
525
530
  client = get_client()
526
531
 
527
- proposed_change = await client.get(kind=InfrahubKind.PROPOSEDCHANGE, id=model.proposed_change)
532
+ try:
533
+ proposed_change = await client.get(kind=CoreProposedChange, id=model.proposed_change)
534
+ except NodeNotFoundError:
535
+ log.warning(f"Proposed change ({model.proposed_change}) not found, skipping user tests execution")
536
+ return
528
537
 
529
538
  def _execute(
530
539
  directory: Path, repository: ProposedChangeRepository, proposed_change: InfrahubNode
@@ -612,7 +621,7 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
612
621
  client = get_client()
613
622
 
614
623
  artifact_definition = await client.get(
615
- kind=InfrahubKind.ARTIFACTDEFINITION,
624
+ kind=CoreArtifactDefinition,
616
625
  id=model.artifact_definition.definition_id,
617
626
  branch=model.source_branch,
618
627
  )
@@ -652,9 +661,9 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
652
661
  branch=model.source_branch,
653
662
  )
654
663
 
655
- await artifact_definition.targets.fetch()
656
- group = artifact_definition.targets.peer
657
- await group.members.fetch()
664
+ group = await fetch_artifact_definition_targets(
665
+ client=client, branch=model.source_branch, definition=artifact_definition
666
+ )
658
667
 
659
668
  artifacts_by_member = {}
660
669
  for artifact in existing_artifacts:
@@ -691,6 +700,7 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
691
700
  repository_kind=repository.kind,
692
701
  branch_name=model.source_branch,
693
702
  query=model.artifact_definition.query_name,
703
+ query_id=model.artifact_definition.query_id,
694
704
  variables=await member.extract(params=artifact_definition.parameters.value),
695
705
  target_id=member.id,
696
706
  target_kind=member.get_kind(),
@@ -760,6 +770,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
760
770
  query=model.generator_definition.query_name,
761
771
  targets=model.generator_definition.group_id,
762
772
  convert_query_response=model.generator_definition.convert_query_response,
773
+ execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
774
+ execute_after_merge=model.generator_definition.execute_after_merge,
763
775
  )
764
776
 
765
777
  commit_worktree = repository.get_commit_worktree(commit=model.commit)
@@ -786,6 +798,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
786
798
  params=model.variables,
787
799
  generator_instance=generator_instance.id,
788
800
  convert_query_response=generator_definition.convert_query_response,
801
+ execute_after_merge=generator_definition.execute_after_merge,
802
+ execute_in_proposed_change=generator_definition.execute_in_proposed_change,
789
803
  infrahub_node=InfrahubNode,
790
804
  )
791
805
  generator._init_client.request_context = context.to_request_context()
@@ -917,14 +931,9 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
917
931
  branch=model.source_branch,
918
932
  )
919
933
 
920
- group = await client.get(
921
- kind=InfrahubKind.GENERICGROUP,
922
- prefetch_relationships=True,
923
- populate_store=True,
924
- id=model.generator_definition.group_id,
925
- branch=model.source_branch,
934
+ group = await fetch_proposed_change_generator_definition_targets(
935
+ client=client, branch=model.source_branch, definition=model.generator_definition
926
936
  )
927
- await group.members.fetch()
928
937
 
929
938
  instance_by_member = {}
930
939
  for instance in existing_instances:
@@ -934,7 +943,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
934
943
  requested_instances = 0
935
944
  impacted_instances = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.GENERATORINSTANCE)
936
945
 
937
- check_generator_run_models = []
946
+ check_generator_run_models: list[RunGeneratorAsCheckModel] = []
938
947
  for relationship in group.members.peers:
939
948
  member = relationship.peer
940
949
  generator_instance = instance_by_member.get(member.id)
@@ -970,6 +979,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
970
979
  context=context,
971
980
  )
972
981
  for check_generator_run_model in check_generator_run_models
982
+ if check_generator_run_model.generator_definition.execute_in_proposed_change
973
983
  ]
974
984
 
975
985
  await run_checks_and_update_validator(
@@ -1245,6 +1255,7 @@ query GatherArtifactDefinitions {
1245
1255
  }
1246
1256
  query {
1247
1257
  node {
1258
+ id
1248
1259
  models {
1249
1260
  value
1250
1261
  }
@@ -1466,6 +1477,7 @@ def _parse_artifact_definitions(definitions: list[dict]) -> list[ProposedChangeA
1466
1477
  content_type=definition["node"]["content_type"]["value"],
1467
1478
  timeout=definition["node"]["transformation"]["node"]["timeout"]["value"],
1468
1479
  query_name=definition["node"]["transformation"]["node"]["query"]["node"]["name"]["value"],
1480
+ query_id=definition["node"]["transformation"]["node"]["query"]["node"]["id"],
1469
1481
  query_models=definition["node"]["transformation"]["node"]["query"]["node"]["models"]["value"] or [],
1470
1482
  repository_id=definition["node"]["transformation"]["node"]["repository"]["node"]["id"],
1471
1483
  transform_kind=definition["node"]["transformation"]["node"]["__typename"],
@@ -1490,8 +1502,14 @@ async def _get_proposed_change_repositories(
1490
1502
  destination_all = await client.execute_graphql(
1491
1503
  query=DESTINATION_ALLREPOSITORIES, branch_name=model.destination_branch
1492
1504
  )
1493
- source_managed = await client.execute_graphql(query=SOURCE_REPOSITORIES, branch_name=model.source_branch)
1494
- source_readonly = await client.execute_graphql(query=SOURCE_READONLY_REPOSITORIES, branch_name=model.source_branch)
1505
+ try:
1506
+ source_managed = await client.execute_graphql(query=SOURCE_REPOSITORIES, branch_name=model.source_branch)
1507
+ source_readonly = await client.execute_graphql(
1508
+ query=SOURCE_READONLY_REPOSITORIES, branch_name=model.source_branch
1509
+ )
1510
+ except URLNotFoundError:
1511
+ # If the URL is not found it means that the source branch has been deleted after the proposed change was created
1512
+ return []
1495
1513
 
1496
1514
  destination_all = destination_all[InfrahubKind.GENERICREPOSITORY]["edges"]
1497
1515
  source_all = (
infrahub/server.py CHANGED
@@ -10,7 +10,6 @@ from asgi_correlation_id import CorrelationIdMiddleware
10
10
  from asgi_correlation_id.context import correlation_id
11
11
  from fastapi import FastAPI, Request, Response
12
12
  from fastapi.logger import logger
13
- from fastapi.middleware.gzip import GZipMiddleware
14
13
  from fastapi.responses import RedirectResponse
15
14
  from fastapi.staticfiles import StaticFiles
16
15
  from fastapi.templating import Jinja2Templates
@@ -25,12 +24,13 @@ from infrahub.api.exception_handlers import generic_api_exception_handler
25
24
  from infrahub.components import ComponentType
26
25
  from infrahub.constants.environment import INSTALLATION_TYPE
27
26
  from infrahub.core.initialization import initialization
27
+ from infrahub.database.graph import validate_graph_version
28
28
  from infrahub.dependencies.registry import build_component_registry
29
29
  from infrahub.exceptions import Error, ValidationError
30
30
  from infrahub.graphql.api.endpoints import router as graphql_router
31
31
  from infrahub.lock import initialize_lock
32
32
  from infrahub.log import clear_log_context, get_logger, set_log_data
33
- from infrahub.middleware import InfrahubCORSMiddleware
33
+ from infrahub.middleware import ConditionalGZipMiddleware, InfrahubCORSMiddleware
34
34
  from infrahub.services import InfrahubServices
35
35
  from infrahub.trace import add_span_exception, configure_trace, get_traceid
36
36
  from infrahub.worker import WORKER_IDENTITY
@@ -84,10 +84,17 @@ async def app_initialization(application: FastAPI, enable_scheduler: bool = True
84
84
  initialize_lock(service=service)
85
85
  # We must initialize DB after initialize lock and initialize lock depends on cache initialization
86
86
  async with application.state.db.start_session() as db:
87
- await initialization(db=db, add_database_indexes=True)
87
+ is_initial_setup = await initialization(db=db, add_database_indexes=True)
88
+
89
+ async with database.start_session() as dbs:
90
+ await validate_graph_version(db=dbs)
91
+
92
+ # Initialize the workflow after the registry has been setup
93
+ await service.initialize_workflow(is_initial_setup=is_initial_setup)
88
94
 
89
95
  application.state.service = service
90
96
  application.state.response_delay = config.SETTINGS.miscellaneous.response_delay
97
+
91
98
  if enable_scheduler:
92
99
  await service.scheduler.start_schedule()
93
100
 
@@ -184,7 +191,17 @@ app.add_middleware(
184
191
  skip_paths=["/health"],
185
192
  )
186
193
  app.add_middleware(InfrahubCORSMiddleware)
187
- app.add_middleware(GZipMiddleware, minimum_size=100_000)
194
+ app.add_middleware(
195
+ ConditionalGZipMiddleware,
196
+ minimum_size=100_000,
197
+ compresslevel=1,
198
+ include_paths=(
199
+ "/assets",
200
+ "/favicons",
201
+ "/docs",
202
+ "/api/schema",
203
+ ),
204
+ )
188
205
 
189
206
  app.add_exception_handler(Error, generic_api_exception_handler)
190
207
  app.add_exception_handler(TimestampFormatError, partial(generic_api_exception_handler, http_code=400))