infrahub-server 1.4.9__py3-none-any.whl → 1.5.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. infrahub/actions/tasks.py +200 -16
  2. infrahub/api/artifact.py +3 -0
  3. infrahub/api/query.py +2 -0
  4. infrahub/api/schema.py +3 -0
  5. infrahub/auth.py +5 -5
  6. infrahub/cli/db.py +2 -2
  7. infrahub/config.py +7 -2
  8. infrahub/core/attribute.py +22 -19
  9. infrahub/core/branch/models.py +2 -2
  10. infrahub/core/branch/needs_rebase_status.py +11 -0
  11. infrahub/core/branch/tasks.py +2 -2
  12. infrahub/core/constants/__init__.py +1 -0
  13. infrahub/core/convert_object_type/object_conversion.py +201 -0
  14. infrahub/core/convert_object_type/repository_conversion.py +89 -0
  15. infrahub/core/convert_object_type/schema_mapping.py +27 -3
  16. infrahub/core/diff/query/artifact.py +12 -9
  17. infrahub/core/graph/__init__.py +1 -1
  18. infrahub/core/initialization.py +2 -2
  19. infrahub/core/manager.py +3 -81
  20. infrahub/core/migrations/graph/__init__.py +2 -0
  21. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +166 -0
  22. infrahub/core/node/__init__.py +26 -3
  23. infrahub/core/node/create.py +79 -38
  24. infrahub/core/node/lock_utils.py +98 -0
  25. infrahub/core/property.py +11 -0
  26. infrahub/core/protocols.py +1 -0
  27. infrahub/core/query/attribute.py +27 -15
  28. infrahub/core/query/node.py +47 -184
  29. infrahub/core/query/relationship.py +43 -26
  30. infrahub/core/query/subquery.py +0 -8
  31. infrahub/core/relationship/model.py +59 -19
  32. infrahub/core/schema/attribute_schema.py +0 -2
  33. infrahub/core/schema/definitions/core/repository.py +7 -0
  34. infrahub/core/schema/relationship_schema.py +0 -1
  35. infrahub/core/schema/schema_branch.py +3 -2
  36. infrahub/generators/models.py +31 -12
  37. infrahub/generators/tasks.py +3 -1
  38. infrahub/git/base.py +38 -1
  39. infrahub/graphql/api/dependencies.py +2 -4
  40. infrahub/graphql/api/endpoints.py +2 -2
  41. infrahub/graphql/app.py +2 -4
  42. infrahub/graphql/initialization.py +2 -3
  43. infrahub/graphql/manager.py +212 -137
  44. infrahub/graphql/middleware.py +12 -0
  45. infrahub/graphql/mutations/branch.py +11 -0
  46. infrahub/graphql/mutations/computed_attribute.py +110 -3
  47. infrahub/graphql/mutations/convert_object_type.py +34 -13
  48. infrahub/graphql/mutations/ipam.py +21 -8
  49. infrahub/graphql/mutations/main.py +37 -153
  50. infrahub/graphql/mutations/profile.py +195 -0
  51. infrahub/graphql/mutations/proposed_change.py +2 -1
  52. infrahub/graphql/mutations/repository.py +22 -83
  53. infrahub/graphql/mutations/webhook.py +1 -1
  54. infrahub/graphql/registry.py +173 -0
  55. infrahub/graphql/schema.py +4 -1
  56. infrahub/lock.py +52 -26
  57. infrahub/locks/__init__.py +0 -0
  58. infrahub/locks/tasks.py +37 -0
  59. infrahub/patch/plan_writer.py +2 -2
  60. infrahub/profiles/__init__.py +0 -0
  61. infrahub/profiles/node_applier.py +101 -0
  62. infrahub/profiles/queries/__init__.py +0 -0
  63. infrahub/profiles/queries/get_profile_data.py +99 -0
  64. infrahub/profiles/tasks.py +63 -0
  65. infrahub/repositories/__init__.py +0 -0
  66. infrahub/repositories/create_repository.py +113 -0
  67. infrahub/tasks/registry.py +6 -4
  68. infrahub/webhook/models.py +1 -1
  69. infrahub/workflows/catalogue.py +38 -3
  70. infrahub/workflows/models.py +17 -2
  71. infrahub_sdk/branch.py +5 -8
  72. infrahub_sdk/client.py +364 -84
  73. infrahub_sdk/convert_object_type.py +61 -0
  74. infrahub_sdk/ctl/check.py +2 -3
  75. infrahub_sdk/ctl/cli_commands.py +16 -12
  76. infrahub_sdk/ctl/config.py +8 -2
  77. infrahub_sdk/ctl/generator.py +2 -3
  78. infrahub_sdk/ctl/repository.py +39 -1
  79. infrahub_sdk/ctl/schema.py +12 -1
  80. infrahub_sdk/ctl/utils.py +4 -0
  81. infrahub_sdk/ctl/validate.py +5 -3
  82. infrahub_sdk/diff.py +4 -5
  83. infrahub_sdk/exceptions.py +2 -0
  84. infrahub_sdk/graphql.py +7 -2
  85. infrahub_sdk/node/attribute.py +2 -0
  86. infrahub_sdk/node/node.py +28 -20
  87. infrahub_sdk/playback.py +1 -2
  88. infrahub_sdk/protocols.py +40 -6
  89. infrahub_sdk/pytest_plugin/plugin.py +7 -4
  90. infrahub_sdk/pytest_plugin/utils.py +40 -0
  91. infrahub_sdk/repository.py +1 -2
  92. infrahub_sdk/schema/main.py +1 -0
  93. infrahub_sdk/spec/object.py +43 -4
  94. infrahub_sdk/spec/range_expansion.py +118 -0
  95. infrahub_sdk/timestamp.py +18 -6
  96. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/METADATA +20 -24
  97. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/RECORD +102 -84
  98. infrahub_testcontainers/models.py +2 -2
  99. infrahub_testcontainers/performance_test.py +4 -4
  100. infrahub/core/convert_object_type/conversion.py +0 -134
  101. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/LICENSE.txt +0 -0
  102. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/WHEEL +0 -0
  103. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/entry_points.txt +0 -0
infrahub/lock.py CHANGED
@@ -25,6 +25,7 @@ registry: InfrahubLockRegistry = None
25
25
 
26
26
 
27
27
  METRIC_PREFIX = "infrahub_lock"
28
+ LOCK_PREFIX = "lock"
28
29
 
29
30
  LOCK_ACQUIRE_TIME_METRICS = Histogram(
30
31
  f"{METRIC_PREFIX}_acquire_seconds",
@@ -139,9 +140,9 @@ class InfrahubLock:
139
140
  if self.use_local:
140
141
  self.local = LocalLock()
141
142
  elif config.SETTINGS.cache.driver == config.CacheDriver.Redis:
142
- self.remote = GlobalLock(redis=self.connection, name=f"lock.{self.name}")
143
+ self.remote = GlobalLock(redis=self.connection, name=f"{LOCK_PREFIX}.{self.name}")
143
144
  else:
144
- self.remote = NATSLock(service=self.connection, name=f"lock.{self.name}")
145
+ self.remote = NATSLock(service=self.connection, name=f"{LOCK_PREFIX}.{self.name}")
145
146
 
146
147
  async def __aenter__(self):
147
148
  await self.acquire()
@@ -179,9 +180,54 @@ class InfrahubLock:
179
180
  return self.local.locked()
180
181
 
181
182
 
183
+ class LockNameGenerator:
184
+ local = "local"
185
+ _global = "global"
186
+
187
+ def generate_name(self, name: str, namespace: str | None = None, local: bool | None = None) -> str:
188
+ if namespace is None and local is None:
189
+ return name
190
+
191
+ new_name = ""
192
+ if local is True:
193
+ new_name = f"{self.local}."
194
+ elif local is False:
195
+ new_name = f"{self._global}."
196
+
197
+ if namespace is not None:
198
+ new_name += f"{namespace}."
199
+ new_name += name
200
+
201
+ return new_name
202
+
203
+ def unpack_name(self, name: str) -> tuple[str, str | None, bool | None]:
204
+ local = None
205
+ namespace = None
206
+
207
+ parts = name.split(".")
208
+ if parts[0] == self.local:
209
+ local = True
210
+ parts = parts[1:]
211
+ elif parts[0] == self._global:
212
+ local = False
213
+ parts = parts[1:]
214
+
215
+ if len(parts) > 1:
216
+ namespace = parts[0]
217
+ original_name = ".".join(parts[1:])
218
+ else:
219
+ original_name = parts[0]
220
+
221
+ return original_name, namespace, local
222
+
223
+
182
224
  class InfrahubLockRegistry:
183
225
  def __init__(
184
- self, token: str | None = None, local_only: bool = False, service: InfrahubServices | None = None
226
+ self,
227
+ token: str | None = None,
228
+ local_only: bool = False,
229
+ service: InfrahubServices | None = None,
230
+ name_generator: LockNameGenerator | None = None,
185
231
  ) -> None:
186
232
  if config.SETTINGS.cache.enable and not local_only:
187
233
  if config.SETTINGS.cache.driver == config.CacheDriver.Redis:
@@ -201,23 +247,7 @@ class InfrahubLockRegistry:
201
247
 
202
248
  self.token = token or str(uuid.uuid4())
203
249
  self.locks: dict[str, InfrahubLock] = {}
204
-
205
- @classmethod
206
- def _generate_name(cls, name: str, namespace: str | None = None, local: bool | None = None) -> str:
207
- if namespace is None and local is None:
208
- return name
209
-
210
- new_name = ""
211
- if local is True:
212
- new_name = "local."
213
- elif local is False:
214
- new_name = "global."
215
-
216
- if namespace is not None:
217
- new_name += f"{namespace}."
218
- new_name += name
219
-
220
- return new_name
250
+ self.name_generator = name_generator or LockNameGenerator()
221
251
 
222
252
  def get_existing(
223
253
  self,
@@ -225,7 +255,7 @@ class InfrahubLockRegistry:
225
255
  namespace: str | None,
226
256
  local: bool | None = None,
227
257
  ) -> InfrahubLock | None:
228
- lock_name = self._generate_name(name=name, namespace=namespace, local=local)
258
+ lock_name = self.name_generator.generate_name(name=name, namespace=namespace, local=local)
229
259
  if lock_name not in self.locks:
230
260
  return None
231
261
  return self.locks[lock_name]
@@ -233,7 +263,7 @@ class InfrahubLockRegistry:
233
263
  def get(
234
264
  self, name: str, namespace: str | None = None, local: bool | None = None, in_multi: bool = False
235
265
  ) -> InfrahubLock:
236
- lock_name = self._generate_name(name=name, namespace=namespace, local=local)
266
+ lock_name = self.name_generator.generate_name(name=name, namespace=namespace, local=local)
237
267
  if lock_name not in self.locks:
238
268
  self.locks[lock_name] = InfrahubLock(name=lock_name, connection=self.connection, in_multi=in_multi)
239
269
  return self.locks[lock_name]
@@ -257,7 +287,3 @@ class InfrahubLockRegistry:
257
287
  def initialize_lock(local_only: bool = False, service: InfrahubServices | None = None) -> None:
258
288
  global registry
259
289
  registry = InfrahubLockRegistry(local_only=local_only, service=service)
260
-
261
-
262
- def build_object_lock_name(name: str) -> str:
263
- return f"global.object.{name}"
File without changes
@@ -0,0 +1,37 @@
1
+ from __future__ import annotations
2
+
3
+ from prefect import flow
4
+ from prefect.logging import get_run_logger
5
+
6
+ from infrahub import config
7
+ from infrahub.core.registry import registry
8
+ from infrahub.core.timestamp import Timestamp
9
+ from infrahub.lock import LOCK_PREFIX
10
+ from infrahub.services import InfrahubServices # noqa: TC001 needed for prefect flow
11
+
12
+
13
+ @flow(
14
+ name="clean-up-deadlocks",
15
+ flow_run_name="Clean up deadlocks",
16
+ )
17
+ async def clean_up_deadlocks(service: InfrahubServices) -> None:
18
+ """Remove stale distributed locks left behind by inactive workers"""
19
+ keys = await service.cache.list_keys(filter_pattern=f"{LOCK_PREFIX}*")
20
+ if not keys:
21
+ return
22
+
23
+ log = get_run_logger()
24
+ values = await service.cache.get_values(keys=keys)
25
+ workers = await service.component.list_workers(branch=registry.default_branch, schema_hash=False)
26
+ workers_active = {worker.id for worker in workers if worker.active}
27
+
28
+ for key, value in zip(keys, values, strict=False):
29
+ if not key or not value:
30
+ continue
31
+
32
+ timestamp, worker_id = value.split("::", 1)
33
+ if worker_id not in workers_active and Timestamp() > Timestamp(timestamp).add(
34
+ minutes=config.SETTINGS.cache.clean_up_deadlocks_interval_mins
35
+ ):
36
+ await service.cache.delete(key)
37
+ log.info(f"Deleted deadlock key={key} worker={worker_id}")
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  from dataclasses import asdict
3
- from datetime import datetime, timezone
3
+ from datetime import UTC, datetime
4
4
  from pathlib import Path
5
5
  from typing import Any
6
6
 
@@ -10,7 +10,7 @@ from .models import EdgeToAdd, EdgeToDelete, EdgeToUpdate, PatchPlan, VertexToAd
10
10
 
11
11
  class PatchPlanWriter:
12
12
  def write(self, patches_directory: Path, patch_plan: PatchPlan) -> Path:
13
- timestamp_str = datetime.now(tz=timezone.utc).strftime("%Y%m%d-%H%M%S")
13
+ timestamp_str = datetime.now(tz=UTC).strftime("%Y%m%d-%H%M%S")
14
14
  patch_name = f"patch-{patch_plan.name}-{timestamp_str}"
15
15
  patch_plan_directory = patches_directory / Path(patch_name)
16
16
  if not patch_plan_directory.exists():
File without changes
@@ -0,0 +1,101 @@
1
+ from typing import Any
2
+
3
+ from infrahub.core.attribute import BaseAttribute
4
+ from infrahub.core.branch import Branch
5
+ from infrahub.core.node import Node
6
+ from infrahub.database import InfrahubDatabase
7
+
8
+ from .queries.get_profile_data import GetProfileDataQuery, ProfileData
9
+
10
+
11
+ class NodeProfilesApplier:
12
+ def __init__(self, db: InfrahubDatabase, branch: Branch):
13
+ self.db = db
14
+ self.branch = branch
15
+
16
+ async def _get_profile_ids(self, node: Node) -> list[str]:
17
+ try:
18
+ profiles_rel = node.get_relationship("profiles")
19
+ except ValueError:
20
+ return []
21
+ profile_rels = await profiles_rel.get_relationships(db=self.db)
22
+ return [pr.peer_id for pr in profile_rels if pr.peer_id]
23
+
24
+ async def _get_attr_names_for_profiles(self, node: Node) -> list[str]:
25
+ node_schema = node.get_schema()
26
+
27
+ # get the names of attributes that could be affected by profile changes
28
+ attr_names_for_profiles: list[str] = []
29
+ for attr_schema in node_schema.attributes:
30
+ attr_name = attr_schema.name
31
+ node_attr: BaseAttribute = getattr(node, attr_name)
32
+ if node_attr.is_from_profile or node_attr.is_default:
33
+ attr_names_for_profiles.append(attr_name)
34
+ return attr_names_for_profiles
35
+
36
+ async def _get_sorted_profile_data(
37
+ self, profile_ids: list[str], attr_names_for_profiles: list[str]
38
+ ) -> list[ProfileData]:
39
+ if not profile_ids:
40
+ return []
41
+ query = await GetProfileDataQuery.init(
42
+ db=self.db, branch=self.branch, profile_ids=profile_ids, attr_names=attr_names_for_profiles
43
+ )
44
+ await query.execute(db=self.db)
45
+ profile_data_list = query.get_profile_data()
46
+ return sorted(profile_data_list, key=lambda x: (x.priority, x.uuid))
47
+
48
+ def _apply_profile_to_attribute(self, node_attr: BaseAttribute, profile_value: Any, profile_id: str) -> bool:
49
+ is_changed = False
50
+ if node_attr.value != profile_value:
51
+ node_attr.value = profile_value
52
+ is_changed = True
53
+ if node_attr.is_default is not False:
54
+ node_attr.is_default = False
55
+ is_changed = True
56
+ if node_attr.is_from_profile is not True:
57
+ node_attr.is_from_profile = True
58
+ is_changed = True
59
+ if node_attr.source_id != profile_id: # type: ignore[attr-defined]
60
+ node_attr.set_source(value=profile_id)
61
+ is_changed = True
62
+ return is_changed
63
+
64
+ def _remove_profile_from_attribute(self, node_attr: BaseAttribute) -> None:
65
+ node_attr.clear_source()
66
+ node_attr.value = node_attr.schema.default_value
67
+ node_attr.is_default = True
68
+ node_attr.is_from_profile = False
69
+
70
+ async def apply_profiles(self, node: Node) -> list[str]:
71
+ profile_ids = await self._get_profile_ids(node=node)
72
+ attr_names_for_profiles = await self._get_attr_names_for_profiles(node=node)
73
+
74
+ if not attr_names_for_profiles:
75
+ return []
76
+
77
+ # get profiles priorities and attribute values on branch
78
+ sorted_profile_data = await self._get_sorted_profile_data(
79
+ profile_ids=profile_ids, attr_names_for_profiles=attr_names_for_profiles
80
+ )
81
+
82
+ updated_field_names = []
83
+ # set attribute values/is_default/is_from_profile on nodes
84
+ for attr_name in attr_names_for_profiles:
85
+ has_profile_data = False
86
+ node_attr = node.get_attribute(attr_name)
87
+ for profile_data in sorted_profile_data:
88
+ profile_value = profile_data.attribute_values.get(attr_name)
89
+ if profile_value is not None:
90
+ has_profile_data = True
91
+ is_changed = False
92
+ is_changed = self._apply_profile_to_attribute(
93
+ node_attr=node_attr, profile_value=profile_value, profile_id=profile_data.uuid
94
+ )
95
+ if is_changed:
96
+ updated_field_names.append(attr_name)
97
+ break
98
+ if not has_profile_data and node_attr.is_from_profile:
99
+ self._remove_profile_from_attribute(node_attr=node_attr)
100
+ updated_field_names.append(attr_name)
101
+ return updated_field_names
File without changes
@@ -0,0 +1,99 @@
1
+ from dataclasses import dataclass
2
+ from typing import Any
3
+
4
+ from infrahub.core.constants import NULL_VALUE
5
+ from infrahub.core.query import Query, QueryType
6
+ from infrahub.database import InfrahubDatabase
7
+
8
+
9
+ @dataclass
10
+ class ProfileData:
11
+ uuid: str
12
+ priority: float | int
13
+ attribute_values: dict[str, Any]
14
+
15
+
16
+ class GetProfileDataQuery(Query):
17
+ type: QueryType = QueryType.READ
18
+ insert_return: bool = False
19
+
20
+ def __init__(self, *args: Any, profile_ids: list[str], attr_names: list[str], **kwargs: Any):
21
+ super().__init__(*args, **kwargs)
22
+ self.profile_ids = profile_ids
23
+ self.attr_names = attr_names
24
+
25
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
26
+ branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string())
27
+ self.params.update(branch_params)
28
+ self.params["profile_ids"] = self.profile_ids
29
+ self.params["attr_names"] = self.attr_names + ["profile_priority"]
30
+
31
+ query = """
32
+ // --------------
33
+ // get the Profile nodes
34
+ // --------------
35
+ MATCH (profile:Node)
36
+ WHERE profile.uuid IN $profile_ids
37
+ // --------------
38
+ // make sure we only use the active ones
39
+ // --------------
40
+ CALL (profile) {
41
+ MATCH (profile)-[r:IS_PART_OF]->(:Root)
42
+ WHERE %(branch_filter)s
43
+ ORDER BY r.branch_level DESC, r.from DESC, r.status ASC
44
+ RETURN r.status = "active" AS is_active
45
+ }
46
+ WITH profile
47
+ WHERE is_active = TRUE
48
+ // --------------
49
+ // get the attributes that we care about
50
+ // --------------
51
+ MATCH (profile)-[:HAS_ATTRIBUTE]-(attr:Attribute)
52
+ WHERE attr.name IN $attr_names
53
+ WITH DISTINCT profile, attr
54
+ CALL (profile, attr) {
55
+ MATCH (profile)-[r:HAS_ATTRIBUTE]->(attr)
56
+ WHERE %(branch_filter)s
57
+ ORDER BY r.branch_level DESC, r.from DESC, r.status ASC
58
+ RETURN r.status = "active" AS is_active
59
+ }
60
+ WITH profile, attr, is_active
61
+ WHERE is_active = TRUE
62
+ // --------------
63
+ // get the attribute values
64
+ // --------------
65
+ MATCH (attr)-[:HAS_VALUE]->(av:AttributeValue)
66
+ WITH DISTINCT profile, attr, av
67
+ CALL (attr, av) {
68
+ MATCH (attr)-[r:HAS_VALUE]->(av)
69
+ WHERE %(branch_filter)s
70
+ ORDER BY r.branch_level DESC, r.from DESC, r.status ASC
71
+ RETURN r.status = "active" AS is_active
72
+ }
73
+ WITH profile, attr, av
74
+ WHERE is_active = TRUE
75
+ RETURN profile.uuid AS profile_uuid, attr.name AS attr_name, av.value AS attr_value
76
+ """ % {"branch_filter": branch_filter}
77
+ self.add_to_query(query)
78
+ self.return_labels = ["profile_uuid", "attr_name", "attr_value"]
79
+
80
+ def get_profile_data(self) -> list[ProfileData]:
81
+ profile_data_by_uuid: dict[str, ProfileData] = {}
82
+ for result in self.results:
83
+ profile_uuid = result.get_as_type(label="profile_uuid", return_type=str)
84
+ if profile_uuid not in profile_data_by_uuid:
85
+ profile_data_by_uuid[profile_uuid] = ProfileData(
86
+ uuid=profile_uuid, priority=float("inf"), attribute_values={}
87
+ )
88
+ profile_data = profile_data_by_uuid[profile_uuid]
89
+ attr_name = result.get_as_type(label="attr_name", return_type=str)
90
+ attr_value: Any = result.get(label="attr_value")
91
+ if attr_value == NULL_VALUE:
92
+ attr_value = None
93
+ if attr_name == "profile_priority":
94
+ if attr_value is not None and not isinstance(attr_value, int):
95
+ attr_value = int(attr_value)
96
+ profile_data.priority = attr_value
97
+ else:
98
+ profile_data.attribute_values[attr_name] = attr_value
99
+ return list(profile_data_by_uuid.values())
@@ -0,0 +1,63 @@
1
+ from __future__ import annotations
2
+
3
+ from prefect import flow
4
+ from prefect.logging import get_run_logger
5
+
6
+ from infrahub.workers.dependencies import get_client, get_workflow
7
+ from infrahub.workflows.catalogue import PROFILE_REFRESH
8
+ from infrahub.workflows.utils import add_tags
9
+
10
+ REFRESH_PROFILES_MUTATION = """
11
+ mutation RefreshProfiles(
12
+ $id: String!,
13
+ ) {
14
+ InfrahubProfilesRefresh(
15
+ data: {id: $id}
16
+ ) {
17
+ ok
18
+ }
19
+ }
20
+ """
21
+
22
+
23
+ @flow(
24
+ name="object-profiles-refresh",
25
+ flow_run_name="Refresh profiles for {node_id}",
26
+ )
27
+ async def object_profiles_refresh(
28
+ branch_name: str,
29
+ node_id: str,
30
+ ) -> None:
31
+ log = get_run_logger()
32
+ client = get_client()
33
+
34
+ await add_tags(branches=[branch_name], nodes=[node_id], db_change=True)
35
+ await client.execute_graphql(
36
+ query=REFRESH_PROFILES_MUTATION,
37
+ variables={"id": node_id},
38
+ branch_name=branch_name,
39
+ )
40
+ log.info(f"Profiles refreshed for {node_id}")
41
+
42
+
43
+ @flow(
44
+ name="objects-profiles-refresh-multiple",
45
+ flow_run_name="Refresh profiles for multiple objects",
46
+ )
47
+ async def objects_profiles_refresh_multiple(
48
+ branch_name: str,
49
+ node_ids: list[str],
50
+ ) -> None:
51
+ log = get_run_logger()
52
+
53
+ await add_tags(branches=[branch_name])
54
+
55
+ for node_id in node_ids:
56
+ log.info(f"Requesting profile refresh for {node_id}")
57
+ await get_workflow().submit_workflow(
58
+ workflow=PROFILE_REFRESH,
59
+ parameters={
60
+ "branch_name": branch_name,
61
+ "node_id": node_id,
62
+ },
63
+ )
File without changes
@@ -0,0 +1,113 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, cast
4
+
5
+ from infrahub.core.constants import RepositoryInternalStatus
6
+ from infrahub.core.constants.infrahubkind import READONLYREPOSITORY, REPOSITORY
7
+ from infrahub.core.protocols import CoreGenericRepository, CoreReadOnlyRepository, CoreRepository
8
+ from infrahub.exceptions import ValidationError
9
+ from infrahub.git.models import GitRepositoryAdd, GitRepositoryAddReadOnly
10
+ from infrahub.log import get_logger
11
+ from infrahub.message_bus import messages
12
+ from infrahub.message_bus.messages.git_repository_connectivity import GitRepositoryConnectivityResponse
13
+ from infrahub.workflows.catalogue import GIT_REPOSITORY_ADD, GIT_REPOSITORY_ADD_READ_ONLY
14
+
15
+ if TYPE_CHECKING:
16
+ from infrahub.auth import AccountSession
17
+ from infrahub.context import InfrahubContext
18
+ from infrahub.core.branch import Branch
19
+ from infrahub.database import InfrahubDatabase
20
+ from infrahub.services import InfrahubServices
21
+
22
+ log = get_logger()
23
+
24
+
25
+ class RepositoryFinalizer:
26
+ def __init__(
27
+ self,
28
+ account_session: AccountSession,
29
+ services: InfrahubServices,
30
+ context: InfrahubContext,
31
+ ) -> None:
32
+ self.account_session = account_session
33
+ self.services = services
34
+ self.context = context
35
+
36
+ async def post_create(
37
+ self,
38
+ obj: CoreGenericRepository,
39
+ branch: Branch,
40
+ db: InfrahubDatabase,
41
+ delete_on_connectivity_failure: bool = True,
42
+ ) -> None:
43
+ """
44
+ Method meant to be called after a repository has been created in the database.
45
+ It mainly checks the connectivity to the remote repository and submit the workflow to create the repository in the local filesystem.
46
+ """
47
+
48
+ # If the connectivity is not good, we remove the repository to allow the user to add a new one
49
+ if delete_on_connectivity_failure:
50
+ message = messages.GitRepositoryConnectivity(
51
+ repository_name=obj.name.value,
52
+ repository_location=obj.location.value,
53
+ )
54
+ response = await self.services.message_bus.rpc(
55
+ message=message, response_class=GitRepositoryConnectivityResponse
56
+ )
57
+
58
+ if response.data.success is False:
59
+ await obj.delete(db=db)
60
+ raise ValidationError(response.data.message)
61
+
62
+ # If we are in the default branch, we set the sync status to Active
63
+ # If we are in another branch, we set the sync status to Staging
64
+ if branch.is_default:
65
+ obj.internal_status.value = RepositoryInternalStatus.ACTIVE.value
66
+ else:
67
+ obj.internal_status.value = RepositoryInternalStatus.STAGING.value
68
+ await obj.save(db=db)
69
+
70
+ # Create the new repository in the filesystem.
71
+ log.info("create_repository", name=obj.name.value)
72
+ authenticated_user = None
73
+ if self.account_session and self.account_session.authenticated:
74
+ authenticated_user = self.account_session.account_id
75
+
76
+ if obj.get_kind() == READONLYREPOSITORY:
77
+ obj = cast(CoreReadOnlyRepository, obj)
78
+ model = GitRepositoryAddReadOnly(
79
+ repository_id=obj.id,
80
+ repository_name=obj.name.value,
81
+ location=obj.location.value,
82
+ ref=obj.ref.value,
83
+ infrahub_branch_name=branch.name,
84
+ infrahub_branch_id=str(branch.get_uuid()),
85
+ internal_status=obj.internal_status.value,
86
+ created_by=authenticated_user,
87
+ )
88
+ await self.services.workflow.submit_workflow(
89
+ workflow=GIT_REPOSITORY_ADD_READ_ONLY,
90
+ context=self.context,
91
+ parameters={"model": model},
92
+ )
93
+
94
+ elif obj.get_kind() == REPOSITORY:
95
+ obj = cast(CoreRepository, obj)
96
+ git_repo_add_model = GitRepositoryAdd(
97
+ repository_id=obj.id,
98
+ repository_name=obj.name.value,
99
+ location=obj.location.value,
100
+ default_branch_name=obj.default_branch.value,
101
+ infrahub_branch_name=branch.name,
102
+ infrahub_branch_id=str(branch.get_uuid()),
103
+ internal_status=obj.internal_status.value,
104
+ created_by=authenticated_user,
105
+ )
106
+
107
+ await self.services.workflow.submit_workflow(
108
+ workflow=GIT_REPOSITORY_ADD,
109
+ context=self.context,
110
+ parameters={"model": git_repo_add_model},
111
+ )
112
+ else:
113
+ raise ValueError(f"Unknown repository kind: {obj.get_kind()}")
@@ -5,6 +5,7 @@ from typing import TYPE_CHECKING
5
5
  from infrahub import lock
6
6
  from infrahub.core import registry
7
7
  from infrahub.core.constants import GLOBAL_BRANCH_NAME
8
+ from infrahub.graphql.registry import registry as graphql_registry
8
9
  from infrahub.log import get_logger
9
10
  from infrahub.worker import WORKER_IDENTITY
10
11
 
@@ -20,9 +21,8 @@ def update_graphql_schema(branch: Branch, schema_branch: SchemaBranch) -> None:
20
21
  """
21
22
  Update the GraphQL schema for the given branch.
22
23
  """
23
- from infrahub.graphql.manager import GraphQLSchemaManager
24
24
 
25
- gqlm = GraphQLSchemaManager.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
25
+ gqlm = graphql_registry.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
26
26
  gqlm.get_graphql_schema(
27
27
  include_query=True,
28
28
  include_mutation=True,
@@ -67,6 +67,9 @@ async def update_branch_registry(db: InfrahubDatabase, branch: Branch) -> None:
67
67
  worker=WORKER_IDENTITY,
68
68
  )
69
69
  registry.branch[branch.name] = branch
70
+ elif existing_branch.status != branch.status:
71
+ log.info(f"Updating registry branch cache for {branch.name=}")
72
+ registry.branch[branch.name] = branch
70
73
  return
71
74
 
72
75
  log.info(
@@ -89,7 +92,6 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
89
92
  If a branch is already present with a different value for the hash
90
93
  We pull the new schema from the database and we update the registry.
91
94
  """
92
- from infrahub.graphql.manager import GraphQLSchemaManager
93
95
 
94
96
  async with lock.registry.local_schema_lock():
95
97
  active_branches = await registry.branch_object.get_list(db=db)
@@ -106,7 +108,7 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
106
108
 
107
109
  purged_branches = await registry.purge_inactive_branches(db=db, active_branches=active_branches)
108
110
  purged_branches.update(
109
- GraphQLSchemaManager.purge_inactive(active_branches=[branch.name for branch in active_branches])
111
+ graphql_registry.purge_inactive(active_branches=[branch.name for branch in active_branches])
110
112
  )
111
113
  for branch_name in sorted(purged_branches):
112
114
  log.info(f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY)
@@ -231,7 +231,7 @@ class TransformWebhook(Webhook):
231
231
  commit=commit,
232
232
  location=f"{self.transform_file}::{self.transform_class}",
233
233
  convert_query_response=self.convert_query_response,
234
- data={"data": data, **context.model_dump()},
234
+ data={"data": {"data": data, **context.model_dump()}},
235
235
  client=client,
236
236
  ) # type: ignore[misc]
237
237