infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +8 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/oauth2.py +13 -19
- infrahub/api/oidc.py +15 -21
- infrahub/api/schema.py +24 -3
- infrahub/artifacts/models.py +2 -1
- infrahub/auth.py +137 -3
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +103 -98
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +30 -3
- infrahub/computed_attribute/tasks.py +20 -8
- infrahub/core/attribute.py +13 -5
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +7 -3
- infrahub/core/branch/tasks.py +70 -8
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/__init__.py +3 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +5 -2
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +6 -3
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +12 -11
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
- infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
- infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +35 -4
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +52 -19
- infrahub/core/node/__init__.py +158 -51
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/create.py +46 -63
- infrahub/core/node/lock_utils.py +70 -44
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/protocols.py +7 -1
- infrahub/core/query/attribute.py +55 -0
- infrahub/core/query/ipam.py +1 -0
- infrahub/core/query/node.py +23 -4
- infrahub/core/query/relationship.py +1 -0
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +1 -1
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -0
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/internal.py +16 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +22 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/schema_branch.py +300 -8
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +7 -0
- infrahub/generators/tasks.py +34 -22
- infrahub/git/base.py +4 -1
- infrahub/git/integrator.py +23 -15
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +22 -5
- infrahub/git/tasks.py +66 -10
- infrahub/git/utils.py +123 -1
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/api/endpoints.py +14 -4
- infrahub/graphql/manager.py +4 -9
- infrahub/graphql/mutations/convert_object_type.py +11 -1
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +54 -35
- infrahub/graphql/mutations/main.py +27 -28
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +4 -0
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +67 -16
- infrahub/message_bus/types.py +2 -1
- infrahub/middleware.py +26 -1
- infrahub/permissions/constants.py +2 -0
- infrahub/proposed_change/tasks.py +35 -17
- infrahub/server.py +21 -4
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/workers/dependencies.py +10 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +80 -0
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +13 -10
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/ctl/cli_commands.py +2 -0
- infrahub_sdk/ctl/generator.py +4 -0
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/schema.py +28 -9
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/protocols.py +14 -0
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +53 -44
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
- infrahub_testcontainers/container.py +115 -3
- infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
- infrahub_testcontainers/docker-compose.test.yml +6 -1
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub/graphql/manager.py
CHANGED
|
@@ -780,10 +780,7 @@ class GraphQLSchemaManager:
|
|
|
780
780
|
attr_kind = get_attr_kind(schema, attr)
|
|
781
781
|
attr_type = get_attribute_type(kind=attr_kind).get_graphql_update()
|
|
782
782
|
|
|
783
|
-
|
|
784
|
-
required = not attr.optional if not attr.default_value else False
|
|
785
|
-
|
|
786
|
-
attrs[attr.name] = graphene.InputField(attr_type, required=required, description=attr.description)
|
|
783
|
+
attrs[attr.name] = graphene.InputField(attr_type, description=attr.description)
|
|
787
784
|
|
|
788
785
|
for rel in schema.relationships:
|
|
789
786
|
if rel.internal_peer or rel.read_only:
|
|
@@ -791,14 +788,12 @@ class GraphQLSchemaManager:
|
|
|
791
788
|
|
|
792
789
|
input_type = self._get_related_input_type(relationship=rel)
|
|
793
790
|
|
|
794
|
-
required = not rel.optional
|
|
795
791
|
if rel.cardinality == RelationshipCardinality.ONE:
|
|
796
|
-
attrs[rel.name] = graphene.InputField(input_type,
|
|
792
|
+
attrs[rel.name] = graphene.InputField(input_type, description=rel.description)
|
|
797
793
|
|
|
798
794
|
elif rel.cardinality == RelationshipCardinality.MANY:
|
|
799
|
-
attrs[rel.name] = graphene.InputField(
|
|
800
|
-
|
|
801
|
-
)
|
|
795
|
+
attrs[rel.name] = graphene.InputField(graphene.List(input_type), description=rel.description)
|
|
796
|
+
|
|
802
797
|
input_name = f"{schema.kind}UpsertInput"
|
|
803
798
|
md5hash = hashlib.md5(usedforsecurity=False)
|
|
804
799
|
md5hash.update(f"{input_name}{schema.get_hash()}".encode())
|
|
@@ -10,9 +10,11 @@ from infrahub.core.convert_object_type.object_conversion import ConversionFieldI
|
|
|
10
10
|
from infrahub.core.convert_object_type.repository_conversion import convert_repository_type
|
|
11
11
|
from infrahub.core.convert_object_type.schema_mapping import get_schema_mapping
|
|
12
12
|
from infrahub.core.manager import NodeManager
|
|
13
|
+
from infrahub.exceptions import ValidationError
|
|
13
14
|
from infrahub.repositories.create_repository import RepositoryFinalizer
|
|
14
15
|
|
|
15
16
|
if TYPE_CHECKING:
|
|
17
|
+
from infrahub.core.attribute import BaseAttribute
|
|
16
18
|
from infrahub.graphql.initialization import GraphqlContext
|
|
17
19
|
|
|
18
20
|
|
|
@@ -49,7 +51,9 @@ class ConvertObjectType(Mutation):
|
|
|
49
51
|
|
|
50
52
|
fields_mapping: dict[str, ConversionFieldInput] = {}
|
|
51
53
|
if not isinstance(data.fields_mapping, dict):
|
|
52
|
-
raise
|
|
54
|
+
raise ValidationError(
|
|
55
|
+
input_value=f"Expected `fields_mapping` to be a `dict`, got {type(data.fields_mapping)}"
|
|
56
|
+
)
|
|
53
57
|
|
|
54
58
|
for field_name, input_for_dest_field_str in data.fields_mapping.items():
|
|
55
59
|
fields_mapping[field_name] = ConversionFieldInput(**input_for_dest_field_str)
|
|
@@ -57,6 +61,12 @@ class ConvertObjectType(Mutation):
|
|
|
57
61
|
node_to_convert = await NodeManager.get_one(
|
|
58
62
|
id=str(data.node_id), db=graphql_context.db, branch=graphql_context.branch
|
|
59
63
|
)
|
|
64
|
+
for attribute_name in source_schema.attribute_names:
|
|
65
|
+
attribute: BaseAttribute = getattr(node_to_convert, attribute_name)
|
|
66
|
+
if attribute.is_from_profile:
|
|
67
|
+
raise ValidationError(
|
|
68
|
+
input_value=f"The attribute '{attribute_name}' is from a profile, converting objects that use profiles is not yet supported."
|
|
69
|
+
)
|
|
60
70
|
|
|
61
71
|
# Complete fields mapping with auto-mapping.
|
|
62
72
|
mapping = get_schema_mapping(source_schema=source_schema, target_schema=target_schema)
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any
|
|
4
|
+
|
|
5
|
+
from graphene import Boolean, InputObjectType, Mutation, String
|
|
6
|
+
|
|
7
|
+
from infrahub.core.account import ObjectPermission
|
|
8
|
+
from infrahub.core.constants import GlobalPermissions, PermissionAction, PermissionDecision
|
|
9
|
+
from infrahub.core.manager import NodeManager
|
|
10
|
+
from infrahub.core.registry import registry
|
|
11
|
+
from infrahub.database import retry_db_transaction
|
|
12
|
+
from infrahub.events import EventMeta
|
|
13
|
+
from infrahub.events.node_action import NodeUpdatedEvent
|
|
14
|
+
from infrahub.exceptions import NodeNotFoundError, ValidationError
|
|
15
|
+
from infrahub.graphql.context import apply_external_context
|
|
16
|
+
from infrahub.graphql.types.context import ContextInput
|
|
17
|
+
from infrahub.log import get_log_data
|
|
18
|
+
from infrahub.permissions import define_global_permission_from_branch
|
|
19
|
+
from infrahub.worker import WORKER_IDENTITY
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from graphql import GraphQLResolveInfo
|
|
23
|
+
|
|
24
|
+
from infrahub.graphql.initialization import GraphqlContext
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class InfrahubDisplayLabelUpdateInput(InputObjectType):
|
|
28
|
+
id = String(required=True)
|
|
29
|
+
kind = String(required=True)
|
|
30
|
+
value = String(required=True)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class UpdateDisplayLabel(Mutation):
|
|
34
|
+
class Arguments:
|
|
35
|
+
data = InfrahubDisplayLabelUpdateInput(required=True)
|
|
36
|
+
context = ContextInput(required=False)
|
|
37
|
+
|
|
38
|
+
ok = Boolean()
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
@retry_db_transaction(name="update_display_label")
|
|
42
|
+
async def mutate(
|
|
43
|
+
cls,
|
|
44
|
+
_: dict,
|
|
45
|
+
info: GraphQLResolveInfo,
|
|
46
|
+
data: InfrahubDisplayLabelUpdateInput,
|
|
47
|
+
context: ContextInput | None = None,
|
|
48
|
+
) -> UpdateDisplayLabel:
|
|
49
|
+
graphql_context: GraphqlContext = info.context
|
|
50
|
+
node_schema = registry.schema.get_node_schema(
|
|
51
|
+
name=str(data.kind), branch=graphql_context.branch.name, duplicate=False
|
|
52
|
+
)
|
|
53
|
+
if not node_schema.display_label:
|
|
54
|
+
raise ValidationError(input_value=f"{node_schema.kind}.display_label has not been defined for this kind.")
|
|
55
|
+
|
|
56
|
+
graphql_context.active_permissions.raise_for_permissions(
|
|
57
|
+
permissions=[
|
|
58
|
+
define_global_permission_from_branch(
|
|
59
|
+
permission=GlobalPermissions.UPDATE_OBJECT_HFID_DISPLAY_LABEL,
|
|
60
|
+
branch_name=graphql_context.branch.name,
|
|
61
|
+
),
|
|
62
|
+
ObjectPermission(
|
|
63
|
+
namespace=node_schema.namespace,
|
|
64
|
+
name=node_schema.name,
|
|
65
|
+
action=PermissionAction.UPDATE.value,
|
|
66
|
+
decision=PermissionDecision.ALLOW_DEFAULT.value
|
|
67
|
+
if graphql_context.branch.name == registry.default_branch
|
|
68
|
+
else PermissionDecision.ALLOW_OTHER.value,
|
|
69
|
+
),
|
|
70
|
+
]
|
|
71
|
+
)
|
|
72
|
+
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
73
|
+
|
|
74
|
+
if not (
|
|
75
|
+
target_node := await NodeManager.get_one(
|
|
76
|
+
db=graphql_context.db,
|
|
77
|
+
kind=node_schema.kind,
|
|
78
|
+
id=str(data.id),
|
|
79
|
+
branch=graphql_context.branch,
|
|
80
|
+
fields={"display_label": None},
|
|
81
|
+
)
|
|
82
|
+
):
|
|
83
|
+
raise NodeNotFoundError(
|
|
84
|
+
node_type=node_schema.kind,
|
|
85
|
+
identifier=str(data.id),
|
|
86
|
+
message="The targeted node was not found in the database",
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
existing_label = (
|
|
90
|
+
await target_node.get_display_label(db=graphql_context.db) if target_node.has_display_label() else None
|
|
91
|
+
)
|
|
92
|
+
if str(data.value) != existing_label:
|
|
93
|
+
await target_node.set_display_label(value=str(data.value))
|
|
94
|
+
|
|
95
|
+
async with graphql_context.db.start_transaction() as dbt:
|
|
96
|
+
await target_node.save(db=dbt, fields=["display_label"])
|
|
97
|
+
|
|
98
|
+
log_data = get_log_data()
|
|
99
|
+
request_id = log_data.get("request_id", "")
|
|
100
|
+
|
|
101
|
+
event = NodeUpdatedEvent(
|
|
102
|
+
kind=node_schema.kind,
|
|
103
|
+
node_id=target_node.get_id(),
|
|
104
|
+
changelog=target_node.node_changelog.model_dump(),
|
|
105
|
+
fields=["display_label"],
|
|
106
|
+
meta=EventMeta(
|
|
107
|
+
context=graphql_context.get_context(),
|
|
108
|
+
initiator_id=WORKER_IDENTITY,
|
|
109
|
+
request_id=request_id,
|
|
110
|
+
account_id=graphql_context.active_account_session.account_id,
|
|
111
|
+
branch=graphql_context.branch,
|
|
112
|
+
),
|
|
113
|
+
)
|
|
114
|
+
await graphql_context.active_service.event.send(event=event)
|
|
115
|
+
|
|
116
|
+
result: dict[str, Any] = {"ok": True}
|
|
117
|
+
|
|
118
|
+
return cls(**result)
|
|
@@ -5,6 +5,7 @@ from typing import TYPE_CHECKING
|
|
|
5
5
|
from graphene import Boolean, Field, InputField, InputObjectType, List, Mutation, NonNull, String
|
|
6
6
|
|
|
7
7
|
from infrahub.core.manager import NodeManager
|
|
8
|
+
from infrahub.core.protocols import CoreGeneratorDefinition, CoreGenericRepository, CoreGraphQLQuery, CoreStandardGroup
|
|
8
9
|
from infrahub.generators.models import ProposedChangeGeneratorDefinition, RequestGeneratorDefinitionRun
|
|
9
10
|
from infrahub.graphql.context import apply_external_context
|
|
10
11
|
from infrahub.graphql.types.context import ContextInput
|
|
@@ -44,11 +45,18 @@ class GeneratorDefinitionRequestRun(Mutation):
|
|
|
44
45
|
db = graphql_context.db
|
|
45
46
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
46
47
|
generator_definition = await NodeManager.get_one(
|
|
47
|
-
id=str(data.id),
|
|
48
|
+
id=str(data.id),
|
|
49
|
+
kind=CoreGeneratorDefinition,
|
|
50
|
+
db=db,
|
|
51
|
+
branch=graphql_context.branch,
|
|
52
|
+
prefetch_relationships=True,
|
|
53
|
+
raise_on_error=True,
|
|
48
54
|
)
|
|
49
|
-
query = await generator_definition.query.get_peer(db=db)
|
|
50
|
-
repository = await generator_definition.repository.get_peer(
|
|
51
|
-
|
|
55
|
+
query = await generator_definition.query.get_peer(db=db, peer_type=CoreGraphQLQuery, raise_on_error=True)
|
|
56
|
+
repository = await generator_definition.repository.get_peer(
|
|
57
|
+
db=db, peer_type=CoreGenericRepository, raise_on_error=True
|
|
58
|
+
)
|
|
59
|
+
group = await generator_definition.targets.get_peer(db=db, peer_type=CoreStandardGroup, raise_on_error=True)
|
|
52
60
|
|
|
53
61
|
request_model = RequestGeneratorDefinitionRun(
|
|
54
62
|
generator_definition=ProposedChangeGeneratorDefinition(
|
|
@@ -57,11 +65,21 @@ class GeneratorDefinitionRequestRun(Mutation):
|
|
|
57
65
|
class_name=generator_definition.class_name.value,
|
|
58
66
|
file_path=generator_definition.file_path.value,
|
|
59
67
|
query_name=query.name.value,
|
|
60
|
-
query_models=query.models.value,
|
|
68
|
+
query_models=query.models.value or [],
|
|
61
69
|
repository_id=repository.id,
|
|
62
|
-
parameters=generator_definition.parameters.value
|
|
70
|
+
parameters=generator_definition.parameters.value
|
|
71
|
+
if isinstance(generator_definition.parameters.value, dict)
|
|
72
|
+
else {},
|
|
63
73
|
group_id=group.id,
|
|
64
|
-
convert_query_response=generator_definition.convert_query_response.value
|
|
74
|
+
convert_query_response=generator_definition.convert_query_response.value
|
|
75
|
+
if generator_definition.convert_query_response.value is not None
|
|
76
|
+
else False,
|
|
77
|
+
execute_in_proposed_change=generator_definition.execute_in_proposed_change.value
|
|
78
|
+
if generator_definition.execute_in_proposed_change.value is not None
|
|
79
|
+
else True,
|
|
80
|
+
execute_after_merge=generator_definition.execute_after_merge.value
|
|
81
|
+
if generator_definition.execute_after_merge.value is not None
|
|
82
|
+
else True,
|
|
65
83
|
),
|
|
66
84
|
branch=graphql_context.branch.name,
|
|
67
85
|
target_members=data.get("nodes", []),
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, cast
|
|
4
|
+
|
|
5
|
+
from graphene import Boolean, InputObjectType, List, Mutation, NonNull, String
|
|
6
|
+
|
|
7
|
+
from infrahub.core.account import ObjectPermission
|
|
8
|
+
from infrahub.core.constants import GlobalPermissions, PermissionAction, PermissionDecision
|
|
9
|
+
from infrahub.core.manager import NodeManager
|
|
10
|
+
from infrahub.core.registry import registry
|
|
11
|
+
from infrahub.database import retry_db_transaction
|
|
12
|
+
from infrahub.events import EventMeta
|
|
13
|
+
from infrahub.events.node_action import NodeUpdatedEvent
|
|
14
|
+
from infrahub.exceptions import NodeNotFoundError, ValidationError
|
|
15
|
+
from infrahub.graphql.context import apply_external_context
|
|
16
|
+
from infrahub.graphql.types.context import ContextInput
|
|
17
|
+
from infrahub.log import get_log_data
|
|
18
|
+
from infrahub.permissions import define_global_permission_from_branch
|
|
19
|
+
from infrahub.worker import WORKER_IDENTITY
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from graphql import GraphQLResolveInfo
|
|
23
|
+
|
|
24
|
+
from infrahub.graphql.initialization import GraphqlContext
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class InfrahubHFIDUpdateInput(InputObjectType):
|
|
28
|
+
id = String(required=True)
|
|
29
|
+
kind = String(required=True)
|
|
30
|
+
value = List(NonNull(String), required=True)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class UpdateHFID(Mutation):
|
|
34
|
+
class Arguments:
|
|
35
|
+
data = InfrahubHFIDUpdateInput(required=True)
|
|
36
|
+
context = ContextInput(required=False)
|
|
37
|
+
|
|
38
|
+
ok = Boolean()
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
@retry_db_transaction(name="update_hfid")
|
|
42
|
+
async def mutate(
|
|
43
|
+
cls,
|
|
44
|
+
_: dict,
|
|
45
|
+
info: GraphQLResolveInfo,
|
|
46
|
+
data: InfrahubHFIDUpdateInput,
|
|
47
|
+
context: ContextInput | None = None,
|
|
48
|
+
) -> UpdateHFID:
|
|
49
|
+
graphql_context: GraphqlContext = info.context
|
|
50
|
+
node_schema = registry.schema.get_node_schema(
|
|
51
|
+
name=str(data.kind), branch=graphql_context.branch.name, duplicate=False
|
|
52
|
+
)
|
|
53
|
+
if node_schema.human_friendly_id is None:
|
|
54
|
+
raise ValidationError(
|
|
55
|
+
input_value=f"{node_schema.kind}.human_friendly_id has not been defined for this kind."
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
updated_hfid = cast(list[str], data.value)
|
|
59
|
+
|
|
60
|
+
if len(node_schema.human_friendly_id) != len(updated_hfid):
|
|
61
|
+
raise ValidationError(
|
|
62
|
+
input_value=f"{node_schema.kind}.human_friendly_id requires {len(node_schema.human_friendly_id)} parts data has {len(updated_hfid)}"
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
graphql_context.active_permissions.raise_for_permissions(
|
|
66
|
+
permissions=[
|
|
67
|
+
define_global_permission_from_branch(
|
|
68
|
+
permission=GlobalPermissions.UPDATE_OBJECT_HFID_DISPLAY_LABEL,
|
|
69
|
+
branch_name=graphql_context.branch.name,
|
|
70
|
+
),
|
|
71
|
+
ObjectPermission(
|
|
72
|
+
namespace=node_schema.namespace,
|
|
73
|
+
name=node_schema.name,
|
|
74
|
+
action=PermissionAction.UPDATE.value,
|
|
75
|
+
decision=PermissionDecision.ALLOW_DEFAULT.value
|
|
76
|
+
if graphql_context.branch.name == registry.default_branch
|
|
77
|
+
else PermissionDecision.ALLOW_OTHER.value,
|
|
78
|
+
),
|
|
79
|
+
]
|
|
80
|
+
)
|
|
81
|
+
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
82
|
+
|
|
83
|
+
if not (
|
|
84
|
+
target_node := await NodeManager.get_one(
|
|
85
|
+
db=graphql_context.db,
|
|
86
|
+
kind=node_schema.kind,
|
|
87
|
+
id=str(data.id),
|
|
88
|
+
branch=graphql_context.branch,
|
|
89
|
+
fields={"human_friendly_id": None},
|
|
90
|
+
)
|
|
91
|
+
):
|
|
92
|
+
raise NodeNotFoundError(
|
|
93
|
+
node_type=node_schema.kind,
|
|
94
|
+
identifier=str(data.id),
|
|
95
|
+
message="The targeted node was not found in the database",
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
existing = await target_node.get_hfid(db=graphql_context.db) if target_node.has_human_friendly_id() else None
|
|
99
|
+
if updated_hfid != existing:
|
|
100
|
+
await target_node.set_human_friendly_id(value=updated_hfid)
|
|
101
|
+
|
|
102
|
+
async with graphql_context.db.start_transaction() as dbt:
|
|
103
|
+
await target_node.save(db=dbt, fields=["human_friendly_id"])
|
|
104
|
+
|
|
105
|
+
log_data = get_log_data()
|
|
106
|
+
request_id = log_data.get("request_id", "")
|
|
107
|
+
|
|
108
|
+
event = NodeUpdatedEvent(
|
|
109
|
+
kind=node_schema.kind,
|
|
110
|
+
node_id=target_node.get_id(),
|
|
111
|
+
changelog=target_node.node_changelog.model_dump(),
|
|
112
|
+
fields=["human_friendly_id"],
|
|
113
|
+
meta=EventMeta(
|
|
114
|
+
context=graphql_context.get_context(),
|
|
115
|
+
initiator_id=WORKER_IDENTITY,
|
|
116
|
+
request_id=request_id,
|
|
117
|
+
account_id=graphql_context.active_account_session.account_id,
|
|
118
|
+
branch=graphql_context.branch,
|
|
119
|
+
),
|
|
120
|
+
)
|
|
121
|
+
await graphql_context.active_service.event.send(event=event)
|
|
122
|
+
|
|
123
|
+
result: dict[str, Any] = {"ok": True}
|
|
124
|
+
|
|
125
|
+
return cls(**result)
|
|
@@ -20,7 +20,7 @@ from infrahub.lock import InfrahubMultiLock
|
|
|
20
20
|
from infrahub.log import get_logger
|
|
21
21
|
|
|
22
22
|
from ...core.node.create import create_node
|
|
23
|
-
from ...core.node.lock_utils import build_object_lock_name
|
|
23
|
+
from ...core.node.lock_utils import build_object_lock_name, get_lock_names_on_object_mutation
|
|
24
24
|
from .main import DeleteResult, InfrahubMutationMixin, InfrahubMutationOptions, build_graphql_response
|
|
25
25
|
from .node_getter.by_default_filter import MutationNodeGetterByDefaultFilter
|
|
26
26
|
|
|
@@ -108,11 +108,11 @@ class InfrahubIPAddressMutation(InfrahubMutationMixin, Mutation):
|
|
|
108
108
|
super().__init_subclass_with_meta__(_meta=_meta, **options)
|
|
109
109
|
|
|
110
110
|
@staticmethod
|
|
111
|
-
def
|
|
111
|
+
def _get_lock_names(namespace_id: str, branch: Branch) -> list[str]:
|
|
112
112
|
if not branch.is_default:
|
|
113
113
|
# Do not lock on other branches as reconciliation will be performed at least when merging in main branch.
|
|
114
|
-
return
|
|
115
|
-
return build_object_lock_name(InfrahubKind.IPADDRESS + "_" + namespace_id)
|
|
114
|
+
return []
|
|
115
|
+
return [build_object_lock_name(InfrahubKind.IPADDRESS + "_" + namespace_id)]
|
|
116
116
|
|
|
117
117
|
@classmethod
|
|
118
118
|
async def _mutate_create_object_and_reconcile(
|
|
@@ -150,17 +150,13 @@ class InfrahubIPAddressMutation(InfrahubMutationMixin, Mutation):
|
|
|
150
150
|
ip_address = ipaddress.ip_interface(data["address"]["value"])
|
|
151
151
|
namespace_id = await validate_namespace(db=db, branch=branch, data=data)
|
|
152
152
|
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
reconciled_address = await cls._mutate_create_object_and_reconcile(
|
|
157
|
-
data=data, branch=branch, db=dbt, ip_address=ip_address, namespace_id=namespace_id
|
|
158
|
-
)
|
|
159
|
-
else:
|
|
153
|
+
lock_names = cls._get_lock_names(namespace_id, branch)
|
|
154
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names):
|
|
155
|
+
async with db.start_transaction() as dbt:
|
|
160
156
|
reconciled_address = await cls._mutate_create_object_and_reconcile(
|
|
161
157
|
data=data, branch=branch, db=dbt, ip_address=ip_address, namespace_id=namespace_id
|
|
162
158
|
)
|
|
163
|
-
|
|
159
|
+
graphql_response = await build_graphql_response(info=info, db=dbt, obj=reconciled_address)
|
|
164
160
|
|
|
165
161
|
return reconciled_address, cls(**graphql_response)
|
|
166
162
|
|
|
@@ -206,18 +202,28 @@ class InfrahubIPAddressMutation(InfrahubMutationMixin, Mutation):
|
|
|
206
202
|
namespace = await address.ip_namespace.get_peer(db)
|
|
207
203
|
namespace_id = await validate_namespace(db=db, branch=branch, data=data, existing_namespace_id=namespace.id)
|
|
208
204
|
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
205
|
+
# Prepare a clone to compute locks without triggering pool allocations
|
|
206
|
+
preview_obj = await NodeManager.get_one_by_id_or_default_filter(
|
|
207
|
+
db=db,
|
|
208
|
+
kind=address.get_kind(),
|
|
209
|
+
id=address.get_id(),
|
|
210
|
+
branch=branch,
|
|
211
|
+
)
|
|
212
|
+
await preview_obj.from_graphql(db=db, data=data, process_pools=False)
|
|
213
|
+
|
|
214
|
+
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
215
|
+
lock_names = get_lock_names_on_object_mutation(node=preview_obj, schema_branch=schema_branch)
|
|
216
|
+
|
|
217
|
+
namespace_lock_names = cls._get_lock_names(namespace_id, branch)
|
|
218
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=namespace_lock_names):
|
|
219
|
+
# FIXME: do not lock when data does not contain uniqueness constraint fields or resource pool allocations
|
|
220
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names, metrics=False):
|
|
221
|
+
async with db.start_transaction() as dbt:
|
|
212
222
|
reconciled_address = await cls._mutate_update_object_and_reconcile(
|
|
213
223
|
info=info, data=data, branch=branch, address=address, namespace_id=namespace_id, db=dbt
|
|
214
224
|
)
|
|
215
|
-
else:
|
|
216
|
-
reconciled_address = await cls._mutate_update_object_and_reconcile(
|
|
217
|
-
info=info, data=data, branch=branch, address=address, namespace_id=namespace_id, db=dbt
|
|
218
|
-
)
|
|
219
225
|
|
|
220
|
-
|
|
226
|
+
result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=reconciled_address)
|
|
221
227
|
|
|
222
228
|
return address, result
|
|
223
229
|
|
|
@@ -269,11 +275,11 @@ class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation):
|
|
|
269
275
|
super().__init_subclass_with_meta__(_meta=_meta, **options)
|
|
270
276
|
|
|
271
277
|
@staticmethod
|
|
272
|
-
def
|
|
278
|
+
def _get_lock_names(namespace_id: str) -> list[str]:
|
|
273
279
|
# IPPrefix has some cardinality-one relationships involved (parent/child/ip_address),
|
|
274
280
|
# so we need to lock on any branch to avoid creating multiple peers for these relationships
|
|
275
281
|
# during concurrent ipam reconciliations.
|
|
276
|
-
return build_object_lock_name(InfrahubKind.IPPREFIX + "_" + namespace_id)
|
|
282
|
+
return [build_object_lock_name(InfrahubKind.IPPREFIX + "_" + namespace_id)]
|
|
277
283
|
|
|
278
284
|
@classmethod
|
|
279
285
|
async def _mutate_create_object_and_reconcile(
|
|
@@ -306,9 +312,9 @@ class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation):
|
|
|
306
312
|
db = database or graphql_context.db
|
|
307
313
|
namespace_id = await validate_namespace(db=db, branch=branch, data=data)
|
|
308
314
|
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
async with
|
|
315
|
+
lock_names = cls._get_lock_names(namespace_id)
|
|
316
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names):
|
|
317
|
+
async with db.start_transaction() as dbt:
|
|
312
318
|
reconciled_prefix = await cls._mutate_create_object_and_reconcile(
|
|
313
319
|
data=data, branch=branch, db=dbt, namespace_id=namespace_id
|
|
314
320
|
)
|
|
@@ -356,13 +362,26 @@ class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation):
|
|
|
356
362
|
namespace = await prefix.ip_namespace.get_peer(db)
|
|
357
363
|
namespace_id = await validate_namespace(db=db, branch=branch, data=data, existing_namespace_id=namespace.id)
|
|
358
364
|
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
365
|
+
# Prepare a clone to compute locks without triggering pool allocations
|
|
366
|
+
preview_obj = await NodeManager.get_one_by_id_or_default_filter(
|
|
367
|
+
db=db,
|
|
368
|
+
kind=prefix.get_kind(),
|
|
369
|
+
id=prefix.get_id(),
|
|
370
|
+
branch=branch,
|
|
371
|
+
)
|
|
372
|
+
await preview_obj.from_graphql(db=db, data=data, process_pools=False)
|
|
373
|
+
|
|
374
|
+
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
375
|
+
lock_names = get_lock_names_on_object_mutation(node=preview_obj, schema_branch=schema_branch)
|
|
376
|
+
|
|
377
|
+
namespace_lock_names = cls._get_lock_names(namespace_id)
|
|
378
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=namespace_lock_names):
|
|
379
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names, metrics=False):
|
|
380
|
+
async with db.start_transaction() as dbt:
|
|
381
|
+
reconciled_prefix = await cls._mutate_update_object_and_reconcile(
|
|
382
|
+
info=info, data=data, prefix=prefix, db=dbt, namespace_id=namespace_id, branch=branch
|
|
383
|
+
)
|
|
384
|
+
result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=reconciled_prefix)
|
|
366
385
|
|
|
367
386
|
return prefix, result
|
|
368
387
|
|
|
@@ -421,9 +440,9 @@ class InfrahubIPPrefixMutation(InfrahubMutationMixin, Mutation):
|
|
|
421
440
|
namespace_rels = await prefix.ip_namespace.get_relationships(db=db)
|
|
422
441
|
namespace_id = namespace_rels[0].peer_id
|
|
423
442
|
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
async with
|
|
443
|
+
lock_names = cls._get_lock_names(namespace_id)
|
|
444
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names):
|
|
445
|
+
async with graphql_context.db.start_transaction() as dbt:
|
|
427
446
|
reconciled_prefix = await cls._reconcile_prefix(
|
|
428
447
|
branch=branch, db=dbt, prefix=prefix, namespace_id=namespace_id, is_delete=True
|
|
429
448
|
)
|
|
@@ -28,7 +28,7 @@ from infrahub.lock import InfrahubMultiLock
|
|
|
28
28
|
from infrahub.log import get_log_data, get_logger
|
|
29
29
|
from infrahub.profiles.node_applier import NodeProfilesApplier
|
|
30
30
|
|
|
31
|
-
from ...core.node.lock_utils import
|
|
31
|
+
from ...core.node.lock_utils import get_lock_names_on_object_mutation
|
|
32
32
|
from .node_getter.by_default_filter import MutationNodeGetterByDefaultFilter
|
|
33
33
|
|
|
34
34
|
if TYPE_CHECKING:
|
|
@@ -180,41 +180,40 @@ class InfrahubMutationMixin:
|
|
|
180
180
|
Wrapper around mutate_update to potentially activate locking and call it within a database transaction.
|
|
181
181
|
"""
|
|
182
182
|
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
183
|
+
# Prepare a clone to compute locks without triggering pool allocations
|
|
184
|
+
preview_obj = await NodeManager.get_one_by_id_or_default_filter(
|
|
185
|
+
db=db,
|
|
186
|
+
kind=obj.get_kind(),
|
|
187
|
+
id=obj.get_id(),
|
|
188
|
+
branch=branch,
|
|
186
189
|
)
|
|
190
|
+
await preview_obj.from_graphql(db=db, data=data, process_pools=False)
|
|
187
191
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
else:
|
|
192
|
+
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
193
|
+
lock_names = get_lock_names_on_object_mutation(node=preview_obj, schema_branch=schema_branch)
|
|
194
|
+
|
|
195
|
+
# FIXME: do not lock when data does not contain uniqueness constraint fields or resource pool allocations
|
|
196
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names, metrics=False):
|
|
197
|
+
if db.is_transaction:
|
|
195
198
|
obj = await cls.mutate_update_object(
|
|
196
199
|
db=db, info=info, data=data, branch=branch, obj=obj, skip_uniqueness_check=skip_uniqueness_check
|
|
197
200
|
)
|
|
198
|
-
result = await cls.mutate_update_to_graphql(db=db, info=info, obj=obj)
|
|
199
|
-
return obj, result
|
|
200
201
|
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
db=dbt,
|
|
206
|
-
info=info,
|
|
207
|
-
data=data,
|
|
208
|
-
branch=branch,
|
|
209
|
-
obj=obj,
|
|
210
|
-
skip_uniqueness_check=skip_uniqueness_check,
|
|
211
|
-
)
|
|
212
|
-
else:
|
|
202
|
+
result = await cls.mutate_update_to_graphql(db=db, info=info, obj=obj)
|
|
203
|
+
return obj, result
|
|
204
|
+
|
|
205
|
+
async with db.start_transaction() as dbt:
|
|
213
206
|
obj = await cls.mutate_update_object(
|
|
214
|
-
db=dbt,
|
|
207
|
+
db=dbt,
|
|
208
|
+
info=info,
|
|
209
|
+
data=data,
|
|
210
|
+
branch=branch,
|
|
211
|
+
obj=obj,
|
|
212
|
+
skip_uniqueness_check=skip_uniqueness_check,
|
|
215
213
|
)
|
|
216
|
-
|
|
217
|
-
|
|
214
|
+
|
|
215
|
+
result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=obj)
|
|
216
|
+
return obj, result
|
|
218
217
|
|
|
219
218
|
@classmethod
|
|
220
219
|
@retry_db_transaction(name="object_update")
|
|
@@ -91,7 +91,7 @@ class RelationshipAdd(Mutation):
|
|
|
91
91
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
92
92
|
|
|
93
93
|
rel_schema = source.get_schema().get_relationship(name=relationship_name)
|
|
94
|
-
display_label: str = await source.
|
|
94
|
+
display_label: str = await source.get_display_label(db=graphql_context.db) or ""
|
|
95
95
|
node_changelog = NodeChangelog(
|
|
96
96
|
node_id=source.get_id(), node_kind=source.get_kind(), display_label=display_label
|
|
97
97
|
)
|
|
@@ -214,7 +214,7 @@ class RelationshipRemove(Mutation):
|
|
|
214
214
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
215
215
|
|
|
216
216
|
rel_schema = source.get_schema().get_relationship(name=relationship_name)
|
|
217
|
-
display_label: str = await source.
|
|
217
|
+
display_label: str = await source.get_display_label(db=graphql_context.db) or ""
|
|
218
218
|
node_changelog = NodeChangelog(
|
|
219
219
|
node_id=source.get_id(), node_kind=source.get_kind(), display_label=display_label
|
|
220
220
|
)
|
|
@@ -98,7 +98,7 @@ class IPPrefixPoolGetResource(Mutation):
|
|
|
98
98
|
"id": resource.id,
|
|
99
99
|
"kind": resource.get_kind(),
|
|
100
100
|
"identifier": data.get("identifier", None),
|
|
101
|
-
"display_label": await resource.
|
|
101
|
+
"display_label": await resource.get_display_label(db=graphql_context.db),
|
|
102
102
|
"branch": graphql_context.branch.name,
|
|
103
103
|
},
|
|
104
104
|
}
|
|
@@ -144,7 +144,7 @@ class IPAddressPoolGetResource(Mutation):
|
|
|
144
144
|
"id": resource.id,
|
|
145
145
|
"kind": resource.get_kind(),
|
|
146
146
|
"identifier": data.get("identifier"),
|
|
147
|
-
"display_label": await resource.
|
|
147
|
+
"display_label": await resource.get_display_label(db=graphql_context.db),
|
|
148
148
|
"branch": graphql_context.branch.name,
|
|
149
149
|
},
|
|
150
150
|
}
|