infrahub-server 1.4.9__py3-none-any.whl → 1.5.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (103) hide show
  1. infrahub/actions/tasks.py +200 -16
  2. infrahub/api/artifact.py +3 -0
  3. infrahub/api/query.py +2 -0
  4. infrahub/api/schema.py +3 -0
  5. infrahub/auth.py +5 -5
  6. infrahub/cli/db.py +2 -2
  7. infrahub/config.py +7 -2
  8. infrahub/core/attribute.py +22 -19
  9. infrahub/core/branch/models.py +2 -2
  10. infrahub/core/branch/needs_rebase_status.py +11 -0
  11. infrahub/core/branch/tasks.py +2 -2
  12. infrahub/core/constants/__init__.py +1 -0
  13. infrahub/core/convert_object_type/object_conversion.py +201 -0
  14. infrahub/core/convert_object_type/repository_conversion.py +89 -0
  15. infrahub/core/convert_object_type/schema_mapping.py +27 -3
  16. infrahub/core/diff/query/artifact.py +12 -9
  17. infrahub/core/graph/__init__.py +1 -1
  18. infrahub/core/initialization.py +2 -2
  19. infrahub/core/manager.py +3 -81
  20. infrahub/core/migrations/graph/__init__.py +2 -0
  21. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +166 -0
  22. infrahub/core/node/__init__.py +26 -3
  23. infrahub/core/node/create.py +79 -38
  24. infrahub/core/node/lock_utils.py +98 -0
  25. infrahub/core/property.py +11 -0
  26. infrahub/core/protocols.py +1 -0
  27. infrahub/core/query/attribute.py +27 -15
  28. infrahub/core/query/node.py +47 -184
  29. infrahub/core/query/relationship.py +43 -26
  30. infrahub/core/query/subquery.py +0 -8
  31. infrahub/core/relationship/model.py +59 -19
  32. infrahub/core/schema/attribute_schema.py +0 -2
  33. infrahub/core/schema/definitions/core/repository.py +7 -0
  34. infrahub/core/schema/relationship_schema.py +0 -1
  35. infrahub/core/schema/schema_branch.py +3 -2
  36. infrahub/generators/models.py +31 -12
  37. infrahub/generators/tasks.py +3 -1
  38. infrahub/git/base.py +38 -1
  39. infrahub/graphql/api/dependencies.py +2 -4
  40. infrahub/graphql/api/endpoints.py +2 -2
  41. infrahub/graphql/app.py +2 -4
  42. infrahub/graphql/initialization.py +2 -3
  43. infrahub/graphql/manager.py +212 -137
  44. infrahub/graphql/middleware.py +12 -0
  45. infrahub/graphql/mutations/branch.py +11 -0
  46. infrahub/graphql/mutations/computed_attribute.py +110 -3
  47. infrahub/graphql/mutations/convert_object_type.py +34 -13
  48. infrahub/graphql/mutations/ipam.py +21 -8
  49. infrahub/graphql/mutations/main.py +37 -153
  50. infrahub/graphql/mutations/profile.py +195 -0
  51. infrahub/graphql/mutations/proposed_change.py +2 -1
  52. infrahub/graphql/mutations/repository.py +22 -83
  53. infrahub/graphql/mutations/webhook.py +1 -1
  54. infrahub/graphql/registry.py +173 -0
  55. infrahub/graphql/schema.py +4 -1
  56. infrahub/lock.py +52 -26
  57. infrahub/locks/__init__.py +0 -0
  58. infrahub/locks/tasks.py +37 -0
  59. infrahub/patch/plan_writer.py +2 -2
  60. infrahub/profiles/__init__.py +0 -0
  61. infrahub/profiles/node_applier.py +101 -0
  62. infrahub/profiles/queries/__init__.py +0 -0
  63. infrahub/profiles/queries/get_profile_data.py +99 -0
  64. infrahub/profiles/tasks.py +63 -0
  65. infrahub/repositories/__init__.py +0 -0
  66. infrahub/repositories/create_repository.py +113 -0
  67. infrahub/tasks/registry.py +6 -4
  68. infrahub/webhook/models.py +1 -1
  69. infrahub/workflows/catalogue.py +38 -3
  70. infrahub/workflows/models.py +17 -2
  71. infrahub_sdk/branch.py +5 -8
  72. infrahub_sdk/client.py +364 -84
  73. infrahub_sdk/convert_object_type.py +61 -0
  74. infrahub_sdk/ctl/check.py +2 -3
  75. infrahub_sdk/ctl/cli_commands.py +16 -12
  76. infrahub_sdk/ctl/config.py +8 -2
  77. infrahub_sdk/ctl/generator.py +2 -3
  78. infrahub_sdk/ctl/repository.py +39 -1
  79. infrahub_sdk/ctl/schema.py +12 -1
  80. infrahub_sdk/ctl/utils.py +4 -0
  81. infrahub_sdk/ctl/validate.py +5 -3
  82. infrahub_sdk/diff.py +4 -5
  83. infrahub_sdk/exceptions.py +2 -0
  84. infrahub_sdk/graphql.py +7 -2
  85. infrahub_sdk/node/attribute.py +2 -0
  86. infrahub_sdk/node/node.py +28 -20
  87. infrahub_sdk/playback.py +1 -2
  88. infrahub_sdk/protocols.py +40 -6
  89. infrahub_sdk/pytest_plugin/plugin.py +7 -4
  90. infrahub_sdk/pytest_plugin/utils.py +40 -0
  91. infrahub_sdk/repository.py +1 -2
  92. infrahub_sdk/schema/main.py +1 -0
  93. infrahub_sdk/spec/object.py +43 -4
  94. infrahub_sdk/spec/range_expansion.py +118 -0
  95. infrahub_sdk/timestamp.py +18 -6
  96. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/METADATA +20 -24
  97. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/RECORD +102 -84
  98. infrahub_testcontainers/models.py +2 -2
  99. infrahub_testcontainers/performance_test.py +4 -4
  100. infrahub/core/convert_object_type/conversion.py +0 -134
  101. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/LICENSE.txt +0 -0
  102. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/WHEEL +0 -0
  103. {infrahub_server-1.4.9.dist-info → infrahub_server-1.5.0b0.dist-info}/entry_points.txt +0 -0
@@ -2,20 +2,28 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING, Any
4
4
 
5
- from graphene import Boolean, InputObjectType, Mutation, String
5
+ from graphene import Boolean, InputObjectType, List, Mutation, NonNull, String
6
6
 
7
7
  from infrahub.core.account import ObjectPermission
8
8
  from infrahub.core.constants import ComputedAttributeKind, PermissionAction, PermissionDecision
9
9
  from infrahub.core.manager import NodeManager
10
+ from infrahub.core.protocols import CoreTransformPython
10
11
  from infrahub.core.registry import registry
11
12
  from infrahub.database import retry_db_transaction
12
13
  from infrahub.events import EventMeta
13
14
  from infrahub.events.node_action import NodeUpdatedEvent
14
- from infrahub.exceptions import NodeNotFoundError, ValidationError
15
+ from infrahub.exceptions import NodeNotFoundError, ProcessingError, ValidationError
15
16
  from infrahub.graphql.context import apply_external_context
16
17
  from infrahub.graphql.types.context import ContextInput
17
18
  from infrahub.log import get_log_data
18
19
  from infrahub.worker import WORKER_IDENTITY
20
+ from infrahub.workers.dependencies import get_workflow
21
+ from infrahub.workflows.catalogue import (
22
+ COMPUTED_ATTRIBUTE_PROCESS_JINJA2,
23
+ COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM,
24
+ TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
25
+ TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
26
+ )
19
27
 
20
28
  if TYPE_CHECKING:
21
29
  from graphql import GraphQLResolveInfo
@@ -89,7 +97,7 @@ class UpdateComputedAttribute(Mutation):
89
97
  raise NodeNotFoundError(
90
98
  node_type="target_node",
91
99
  identifier=str(data.id),
92
- message="The indicated not does not have the specified attribute_name",
100
+ message="The indicated node does not have the specified attribute_name",
93
101
  )
94
102
  if attribute_field.value != str(data.value):
95
103
  attribute_field.value = str(data.value)
@@ -117,3 +125,102 @@ class UpdateComputedAttribute(Mutation):
117
125
  result: dict[str, Any] = {"ok": True}
118
126
 
119
127
  return cls(**result)
128
+
129
+
130
+ class InfrahubComputedAttributeRecomputeInput(InputObjectType):
131
+ kind = String(required=True, description="Kind of the node to update")
132
+ attribute = String(required=True, description="Name of the computed attribute that must be recomputed")
133
+ node_ids = List(NonNull(String), description="ID of the nodes for which the attribute must be recomputed")
134
+
135
+
136
+ class RecomputeComputedAttribute(Mutation):
137
+ class Arguments:
138
+ data = InfrahubComputedAttributeRecomputeInput(required=True)
139
+ context = ContextInput(required=False)
140
+
141
+ ok = Boolean()
142
+
143
+ @classmethod
144
+ @retry_db_transaction(name="update_computed_attribute")
145
+ async def mutate(
146
+ cls,
147
+ _: dict,
148
+ info: GraphQLResolveInfo,
149
+ data: InfrahubComputedAttributeRecomputeInput,
150
+ context: ContextInput | None = None,
151
+ ) -> RecomputeComputedAttribute:
152
+ graphql_context: GraphqlContext = info.context
153
+ node_schema = registry.schema.get_node_schema(
154
+ name=str(data.kind), branch=graphql_context.branch.name, duplicate=False
155
+ )
156
+
157
+ graphql_context.active_permissions.raise_for_permission(
158
+ permission=ObjectPermission(
159
+ namespace=node_schema.namespace,
160
+ name=node_schema.name,
161
+ action=PermissionAction.UPDATE.value,
162
+ decision=PermissionDecision.ALLOW_DEFAULT.value
163
+ if graphql_context.branch.name == registry.default_branch
164
+ else PermissionDecision.ALLOW_OTHER.value,
165
+ )
166
+ )
167
+ await apply_external_context(graphql_context=graphql_context, context_input=context)
168
+
169
+ attribute = node_schema.get_attribute(name=str(data.attribute))
170
+
171
+ if not attribute:
172
+ raise ProcessingError(
173
+ message=f"The indicated node does not have the specified attribute '{data.attribute}'"
174
+ )
175
+ if not attribute.computed_attribute:
176
+ raise ProcessingError(
177
+ message=f"The indicated node does not use a computed attribute for the specified attribute '{data.attribute}'"
178
+ )
179
+
180
+ recalculate_single_workflow = COMPUTED_ATTRIBUTE_PROCESS_JINJA2
181
+ recalculate_all_workflow = TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES
182
+ if attribute.computed_attribute.kind == ComputedAttributeKind.TRANSFORM_PYTHON:
183
+ if not await NodeManager.query(
184
+ db=graphql_context.db,
185
+ branch=graphql_context.branch,
186
+ schema=CoreTransformPython,
187
+ filters={"name__value": attribute.computed_attribute.transform},
188
+ ):
189
+ raise ProcessingError(
190
+ message=f"The transform for the indicated node computed attribute for the specified attribute '{data.attribute}' does not exist"
191
+ )
192
+
193
+ recalculate_single_workflow = COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM
194
+ recalculate_all_workflow = TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES
195
+
196
+ if data.node_ids:
197
+ nodes = await NodeManager.get_many(
198
+ db=graphql_context.db, branch=graphql_context.branch, ids=list(data.node_ids)
199
+ )
200
+ for node in nodes.values():
201
+ await get_workflow().submit_workflow(
202
+ workflow=recalculate_single_workflow,
203
+ context=graphql_context.get_context(),
204
+ parameters={
205
+ "branch_name": graphql_context.branch.name,
206
+ "computed_attribute_name": str(data.attribute),
207
+ "computed_attribute_kind": node_schema.kind,
208
+ "node_kind": node_schema.kind,
209
+ "object_id": node.id,
210
+ "context": context,
211
+ },
212
+ )
213
+ else:
214
+ await get_workflow().submit_workflow(
215
+ workflow=recalculate_all_workflow,
216
+ context=graphql_context.get_context(),
217
+ parameters={
218
+ "branch_name": graphql_context.branch.name,
219
+ "computed_attribute_name": str(data.attribute),
220
+ "computed_attribute_kind": node_schema.kind,
221
+ "context": context,
222
+ },
223
+ )
224
+
225
+ result: dict[str, Any] = {"ok": True}
226
+ return cls(**result)
@@ -5,9 +5,12 @@ from graphene.types.generic import GenericScalar
5
5
  from graphql import GraphQLResolveInfo
6
6
 
7
7
  from infrahub.core import registry
8
- from infrahub.core.convert_object_type.conversion import InputForDestField, convert_object_type
8
+ from infrahub.core.constants.infrahubkind import READONLYREPOSITORY, REPOSITORY
9
+ from infrahub.core.convert_object_type.object_conversion import ConversionFieldInput, convert_and_validate_object_type
10
+ from infrahub.core.convert_object_type.repository_conversion import convert_repository_type
9
11
  from infrahub.core.convert_object_type.schema_mapping import get_schema_mapping
10
12
  from infrahub.core.manager import NodeManager
13
+ from infrahub.repositories.create_repository import RepositoryFinalizer
11
14
 
12
15
  if TYPE_CHECKING:
13
16
  from infrahub.graphql.initialization import GraphqlContext
@@ -44,26 +47,44 @@ class ConvertObjectType(Mutation):
44
47
  source_schema = registry.get_node_schema(name=node_to_convert.get_kind(), branch=graphql_context.branch)
45
48
  target_schema = registry.get_node_schema(name=str(data.target_kind), branch=graphql_context.branch)
46
49
 
47
- fields_mapping: dict[str, InputForDestField] = {}
50
+ fields_mapping: dict[str, ConversionFieldInput] = {}
48
51
  if not isinstance(data.fields_mapping, dict):
49
- raise ValueError(f"Expected `fields_mapping` to be a `dict`, got {type(fields_mapping)}")
52
+ raise ValueError(f"Expected `fields_mapping` to be a `dict`, got {type(data.fields_mapping)}")
50
53
 
51
54
  for field_name, input_for_dest_field_str in data.fields_mapping.items():
52
- fields_mapping[field_name] = InputForDestField(**input_for_dest_field_str)
55
+ fields_mapping[field_name] = ConversionFieldInput(**input_for_dest_field_str)
56
+
57
+ node_to_convert = await NodeManager.get_one(
58
+ id=str(data.node_id), db=graphql_context.db, branch=graphql_context.branch
59
+ )
53
60
 
54
61
  # Complete fields mapping with auto-mapping.
55
62
  mapping = get_schema_mapping(source_schema=source_schema, target_schema=target_schema)
56
63
  for field_name, mapping_value in mapping.items():
57
64
  if mapping_value.source_field_name is not None and field_name not in fields_mapping:
58
- fields_mapping[field_name] = InputForDestField(source_field=mapping_value.source_field_name)
59
-
60
- new_node = await convert_object_type(
61
- node=node_to_convert,
62
- target_schema=target_schema,
63
- mapping=fields_mapping,
64
- branch=graphql_context.branch,
65
- db=graphql_context.db,
66
- )
65
+ fields_mapping[field_name] = ConversionFieldInput(source_field=mapping_value.source_field_name)
66
+
67
+ if target_schema.kind in [REPOSITORY, READONLYREPOSITORY]:
68
+ new_node = await convert_repository_type(
69
+ repository=node_to_convert,
70
+ target_schema=target_schema,
71
+ mapping=fields_mapping,
72
+ branch=graphql_context.branch,
73
+ db=graphql_context.db,
74
+ repository_post_creator=RepositoryFinalizer(
75
+ account_session=graphql_context.active_account_session,
76
+ services=graphql_context.active_service,
77
+ context=graphql_context.get_context(),
78
+ ),
79
+ )
80
+ else:
81
+ new_node = await convert_and_validate_object_type(
82
+ node=node_to_convert,
83
+ target_schema=target_schema,
84
+ mapping=fields_mapping,
85
+ branch=graphql_context.branch,
86
+ db=graphql_context.db,
87
+ )
67
88
 
68
89
  dict_node = await new_node.to_graphql(db=graphql_context.db, fields={})
69
90
  result: dict[str, Any] = {"ok": True, "node": dict_node}
@@ -16,10 +16,12 @@ from infrahub.core.node import Node
16
16
  from infrahub.core.schema import NodeSchema
17
17
  from infrahub.database import InfrahubDatabase, retry_db_transaction
18
18
  from infrahub.exceptions import NodeNotFoundError, ValidationError
19
- from infrahub.lock import InfrahubMultiLock, build_object_lock_name
19
+ from infrahub.lock import InfrahubMultiLock
20
20
  from infrahub.log import get_logger
21
21
 
22
- from .main import DeleteResult, InfrahubMutationMixin, InfrahubMutationOptions
22
+ from ...core.node.create import create_node
23
+ from ...core.node.lock_utils import build_object_lock_name
24
+ from .main import DeleteResult, InfrahubMutationMixin, InfrahubMutationOptions, build_graphql_response
23
25
  from .node_getter.by_default_filter import MutationNodeGetterByDefaultFilter
24
26
 
25
27
  if TYPE_CHECKING:
@@ -121,7 +123,13 @@ class InfrahubIPAddressMutation(InfrahubMutationMixin, Mutation):
121
123
  ip_address: IPv4Interface | ipaddress.IPv6Interface,
122
124
  namespace_id: str,
123
125
  ) -> Node:
124
- address = await cls.mutate_create_object(data=data, db=db, branch=branch)
126
+ address = await create_node(
127
+ data=dict(data),
128
+ db=db,
129
+ branch=branch,
130
+ schema=cls._meta.active_schema,
131
+ )
132
+
125
133
  reconciler = IpamReconciler(db=db, branch=branch)
126
134
  reconciled_address = await reconciler.reconcile(
127
135
  ip_value=ip_address, namespace=namespace_id, node_uuid=address.get_id()
@@ -152,9 +160,9 @@ class InfrahubIPAddressMutation(InfrahubMutationMixin, Mutation):
152
160
  reconciled_address = await cls._mutate_create_object_and_reconcile(
153
161
  data=data, branch=branch, db=dbt, ip_address=ip_address, namespace_id=namespace_id
154
162
  )
155
- result = await cls.mutate_create_to_graphql(info=info, db=dbt, obj=reconciled_address)
163
+ graphql_response = await build_graphql_response(info=info, db=dbt, obj=reconciled_address)
156
164
 
157
- return reconciled_address, result
165
+ return reconciled_address, cls(**graphql_response)
158
166
 
159
167
  @classmethod
160
168
  async def _mutate_update_object_and_reconcile(
@@ -275,7 +283,12 @@ class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation):
275
283
  db: InfrahubDatabase,
276
284
  namespace_id: str,
277
285
  ) -> Node:
278
- prefix = await cls.mutate_create_object(data=data, db=db, branch=branch)
286
+ prefix = await create_node(
287
+ data=dict(data),
288
+ db=db,
289
+ branch=branch,
290
+ schema=cls._meta.active_schema,
291
+ )
279
292
  return await cls._reconcile_prefix(
280
293
  branch=branch, db=db, prefix=prefix, namespace_id=namespace_id, is_delete=False
281
294
  )
@@ -300,9 +313,9 @@ class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation):
300
313
  data=data, branch=branch, db=dbt, namespace_id=namespace_id
301
314
  )
302
315
 
303
- result = await cls.mutate_create_to_graphql(info=info, db=dbt, obj=reconciled_prefix)
316
+ graphql_response = await build_graphql_response(info=info, db=dbt, obj=reconciled_prefix)
304
317
 
305
- return reconciled_prefix, result
318
+ return reconciled_prefix, cls(**graphql_response)
306
319
 
307
320
  @classmethod
308
321
  async def _mutate_update_object_and_reconcile(
@@ -1,22 +1,18 @@
1
1
  from __future__ import annotations
2
2
 
3
- import hashlib
4
3
  from dataclasses import dataclass, field
5
4
  from typing import TYPE_CHECKING, Any
6
5
 
7
6
  from graphene import InputObjectType, Mutation
8
7
  from graphene.types.mutation import MutationOptions
8
+ from infrahub_sdk.utils import extract_fields_first_node
9
9
  from typing_extensions import Self
10
10
 
11
11
  from infrahub import config, lock
12
- from infrahub.core.constants import InfrahubKind, MutationAction
12
+ from infrahub.core.constants import MutationAction
13
13
  from infrahub.core.constraint.node.runner import NodeConstraintRunner
14
14
  from infrahub.core.manager import NodeManager
15
- from infrahub.core.node.create import (
16
- create_node,
17
- get_profile_ids,
18
- refresh_for_profile_update,
19
- )
15
+ from infrahub.core.node.create import create_node, get_profile_ids
20
16
  from infrahub.core.schema import MainSchemaTypes, NodeSchema
21
17
  from infrahub.core.schema.generic_schema import GenericSchema
22
18
  from infrahub.core.schema.profile_schema import ProfileSchema
@@ -28,9 +24,11 @@ from infrahub.events.generator import generate_node_mutation_events
28
24
  from infrahub.exceptions import HFIDViolatedError, InitializationError, NodeNotFoundError
29
25
  from infrahub.graphql.context import apply_external_context
30
26
  from infrahub.graphql.field_extractor import extract_graphql_fields
31
- from infrahub.lock import InfrahubMultiLock, build_object_lock_name
27
+ from infrahub.lock import InfrahubMultiLock
32
28
  from infrahub.log import get_log_data, get_logger
29
+ from infrahub.profiles.node_applier import NodeProfilesApplier
33
30
 
31
+ from ...core.node.lock_utils import get_kind_lock_names_on_object_mutation
34
32
  from .node_getter.by_default_filter import MutationNodeGetterByDefaultFilter
35
33
 
36
34
  if TYPE_CHECKING:
@@ -38,7 +36,6 @@ if TYPE_CHECKING:
38
36
 
39
37
  from infrahub.core.branch import Branch
40
38
  from infrahub.core.node import Node
41
- from infrahub.core.schema.schema_branch import SchemaBranch
42
39
  from infrahub.database import InfrahubDatabase
43
40
  from infrahub.graphql.types.context import ContextInput
44
41
 
@@ -47,8 +44,6 @@ if TYPE_CHECKING:
47
44
 
48
45
  log = get_logger()
49
46
 
50
- KINDS_CONCURRENT_MUTATIONS_NOT_ALLOWED = [InfrahubKind.GENERICGROUP]
51
-
52
47
 
53
48
  @dataclass
54
49
  class DeleteResult:
@@ -146,23 +141,6 @@ class InfrahubMutationMixin:
146
141
 
147
142
  return mutation
148
143
 
149
- @classmethod
150
- async def _call_mutate_create_object(
151
- cls, data: InputObjectType, db: InfrahubDatabase, branch: Branch, override_data: dict[str, Any] | None = None
152
- ) -> Node:
153
- """
154
- Wrapper around mutate_create_object to potentially activate locking.
155
- """
156
- schema_branch = db.schema.get_schema_branch(name=branch.name)
157
- lock_names = _get_kind_lock_names_on_object_mutation(
158
- kind=cls._meta.active_schema.kind, branch=branch, schema_branch=schema_branch, data=data
159
- )
160
- if lock_names:
161
- async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names):
162
- return await cls.mutate_create_object(data=data, db=db, branch=branch, override_data=override_data)
163
-
164
- return await cls.mutate_create_object(data=data, db=db, branch=branch, override_data=override_data)
165
-
166
144
  @classmethod
167
145
  async def mutate_create(
168
146
  cls,
@@ -172,40 +150,21 @@ class InfrahubMutationMixin:
172
150
  database: InfrahubDatabase | None = None,
173
151
  override_data: dict[str, Any] | None = None,
174
152
  ) -> tuple[Node, Self]:
175
- graphql_context: GraphqlContext = info.context
176
- db = database or graphql_context.db
177
- obj = await cls._call_mutate_create_object(data=data, db=db, branch=branch, override_data=override_data)
178
- result = await cls.mutate_create_to_graphql(info=info, db=db, obj=obj)
179
- return obj, result
180
-
181
- @classmethod
182
- @retry_db_transaction(name="object_create")
183
- async def mutate_create_object(
184
- cls,
185
- data: InputObjectType,
186
- db: InfrahubDatabase,
187
- branch: Branch,
188
- override_data: dict[str, Any] | None = None,
189
- ) -> Node:
153
+ db = database or info.context.db
190
154
  schema = cls._meta.active_schema
191
- if isinstance(schema, GenericSchema):
192
- raise ValueError(f"Node of generic schema `{schema.name=}` can not be instantiated.")
155
+
193
156
  create_data = dict(data)
194
157
  create_data.update(override_data or {})
195
- return await create_node(
158
+
159
+ obj = await create_node(
196
160
  data=create_data,
197
161
  db=db,
198
162
  branch=branch,
199
163
  schema=schema,
200
164
  )
201
165
 
202
- @classmethod
203
- async def mutate_create_to_graphql(cls, info: GraphQLResolveInfo, db: InfrahubDatabase, obj: Node) -> Self:
204
- fields = extract_graphql_fields(info=info)
205
- result: dict[str, Any] = {"ok": True}
206
- if "object" in fields:
207
- result["object"] = await obj.to_graphql(db=db, fields=fields.get("object", {}))
208
- return cls(**result)
166
+ graphql_response = await build_graphql_response(info=info, db=db, obj=obj)
167
+ return obj, cls(**graphql_response)
209
168
 
210
169
  @classmethod
211
170
  async def _call_mutate_update(
@@ -222,8 +181,8 @@ class InfrahubMutationMixin:
222
181
  """
223
182
 
224
183
  schema_branch = db.schema.get_schema_branch(name=branch.name)
225
- lock_names = _get_kind_lock_names_on_object_mutation(
226
- kind=cls._meta.active_schema.kind, branch=branch, schema_branch=schema_branch, data=data
184
+ lock_names = get_kind_lock_names_on_object_mutation(
185
+ kind=cls._meta.active_schema.kind, branch=branch, schema_branch=schema_branch, data=dict(data)
227
186
  )
228
187
 
229
188
  if db.is_transaction:
@@ -290,7 +249,6 @@ class InfrahubMutationMixin:
290
249
  component_registry = get_component_registry()
291
250
  node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch)
292
251
 
293
- before_mutate_profile_ids = await get_profile_ids(db=db, obj=obj)
294
252
  await obj.from_graphql(db=db, data=data)
295
253
  fields_to_validate = list(data)
296
254
  await node_constraint_runner.check(
@@ -302,15 +260,13 @@ class InfrahubMutationMixin:
302
260
  if field_to_remove in fields:
303
261
  fields.remove(field_to_remove)
304
262
 
263
+ after_mutate_profile_ids = await get_profile_ids(db=db, obj=obj)
264
+ if after_mutate_profile_ids or (not after_mutate_profile_ids and obj.uses_profiles()):
265
+ node_profiles_applier = NodeProfilesApplier(db=db, branch=branch)
266
+ updated_field_names = await node_profiles_applier.apply_profiles(node=obj)
267
+ fields += updated_field_names
305
268
  await obj.save(db=db, fields=fields)
306
269
 
307
- obj = await refresh_for_profile_update(
308
- db=db,
309
- branch=branch,
310
- obj=obj,
311
- previous_profile_ids=before_mutate_profile_ids,
312
- schema=cls._meta.active_schema,
313
- )
314
270
  return obj
315
271
 
316
272
  @classmethod
@@ -422,6 +378,15 @@ class InfrahubMutationMixin:
422
378
  )
423
379
  return updated_obj, mutation, False
424
380
 
381
+ @classmethod
382
+ async def _delete_obj(cls, graphql_context: GraphqlContext, branch: Branch, obj: Node) -> list[Node]:
383
+ db = graphql_context.db
384
+ async with db.start_transaction() as dbt:
385
+ deleted = await NodeManager.delete(db=dbt, branch=branch, nodes=[obj])
386
+ deleted_str = ", ".join([f"{d.get_kind()}({d.get_id()})" for d in deleted])
387
+ log.info(f"nodes deleted: {deleted_str}")
388
+ return deleted
389
+
425
390
  @classmethod
426
391
  @retry_db_transaction(name="object_delete")
427
392
  async def mutate_delete(
@@ -440,11 +405,7 @@ class InfrahubMutationMixin:
440
405
  branch=branch,
441
406
  )
442
407
 
443
- async with graphql_context.db.start_transaction() as db:
444
- deleted = await NodeManager.delete(db=db, branch=branch, nodes=[obj])
445
-
446
- deleted_str = ", ".join([f"{d.get_kind()}({d.get_id()})" for d in deleted])
447
- log.info(f"nodes deleted: {deleted_str}")
408
+ deleted = await cls._delete_obj(graphql_context=graphql_context, branch=branch, obj=obj)
448
409
 
449
410
  ok = True
450
411
 
@@ -471,90 +432,13 @@ class InfrahubMutation(InfrahubMutationMixin, Mutation):
471
432
  super().__init_subclass_with_meta__(_meta=_meta, **options)
472
433
 
473
434
 
474
- def _get_kinds_to_lock_on_object_mutation(kind: str, schema_branch: SchemaBranch) -> list[str]:
475
- """
476
- Return kinds for which we want to lock during creating / updating an object of a given schema node.
477
- Lock should be performed on schema kind and its generics having a uniqueness_constraint defined.
478
- If a generic uniqueness constraint is the same as the node schema one,
479
- it means node schema overrided this constraint, in which case we only need to lock on the generic.
480
- """
481
-
482
- node_schema = schema_branch.get(name=kind)
483
-
484
- schema_uc = None
485
- kinds = []
486
- if node_schema.uniqueness_constraints:
487
- kinds.append(node_schema.kind)
488
- schema_uc = node_schema.uniqueness_constraints
489
-
490
- if node_schema.is_generic_schema:
491
- return kinds
492
-
493
- generics_kinds = node_schema.inherit_from
494
-
495
- node_schema_kind_removed = False
496
- for generic_kind in generics_kinds:
497
- generic_uc = schema_branch.get(name=generic_kind).uniqueness_constraints
498
- if generic_uc:
499
- kinds.append(generic_kind)
500
- if not node_schema_kind_removed and generic_uc == schema_uc:
501
- # Check whether we should remove original schema kind as it simply overrides uniqueness_constraint
502
- # of a generic
503
- kinds.pop(0)
504
- node_schema_kind_removed = True
505
- return kinds
506
-
507
-
508
- def _should_kind_be_locked_on_any_branch(kind: str, schema_branch: SchemaBranch) -> bool:
509
- """
510
- Check whether kind or any kind generic is in KINDS_TO_LOCK_ON_ANY_BRANCH.
511
- """
512
-
513
- if kind in KINDS_CONCURRENT_MUTATIONS_NOT_ALLOWED:
514
- return True
515
-
516
- node_schema = schema_branch.get(name=kind)
517
- if node_schema.is_generic_schema:
518
- return False
519
-
520
- for generic_kind in node_schema.inherit_from:
521
- if generic_kind in KINDS_CONCURRENT_MUTATIONS_NOT_ALLOWED:
522
- return True
523
- return False
524
-
525
-
526
- def _hash(value: str) -> str:
527
- # Do not use builtin `hash` for lock names as due to randomization results would differ between
528
- # different processes.
529
- return hashlib.sha256(value.encode()).hexdigest()
530
-
531
-
532
- def _get_kind_lock_names_on_object_mutation(
533
- kind: str, branch: Branch, schema_branch: SchemaBranch, data: InputObjectType
534
- ) -> list[str]:
535
- """
536
- Return objects kind for which we want to avoid concurrent mutation (create/update). Except for some specific kinds,
537
- concurrent mutations are only allowed on non-main branch as objects validations will be performed at least when merging in main branch.
538
- """
539
-
540
- if not branch.is_default and not _should_kind_be_locked_on_any_branch(kind=kind, schema_branch=schema_branch):
541
- return []
542
-
543
- if kind == InfrahubKind.GRAPHQLQUERYGROUP:
544
- # Lock on name as well to improve performances
545
- try:
546
- name = data.name.value
547
- return [build_object_lock_name(kind + "." + _hash(name))]
548
- except AttributeError:
549
- # We might reach here if we are updating a CoreGraphQLQueryGroup without updating the name,
550
- # in which case we would not need to lock. This is not supposed to happen as current `update`
551
- # logic first fetches the node with its name.
552
- return []
553
-
554
- lock_kinds = _get_kinds_to_lock_on_object_mutation(kind, schema_branch)
555
- lock_names = [build_object_lock_name(kind) for kind in lock_kinds]
556
- return lock_names
557
-
558
-
559
435
  def _get_data_fields(data: InputObjectType) -> list[str]:
560
436
  return [field for field in data.keys() if field not in ["id", "hfid"]]
437
+
438
+
439
+ async def build_graphql_response(info: GraphQLResolveInfo, db: InfrahubDatabase, obj: Node) -> dict:
440
+ fields = await extract_fields_first_node(info)
441
+ result: dict[str, Any] = {"ok": True}
442
+ if "object" in fields:
443
+ result["object"] = await obj.to_graphql(db=db, fields=fields.get("object", {}))
444
+ return result