infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. infrahub/actions/tasks.py +8 -0
  2. infrahub/api/diff/diff.py +1 -1
  3. infrahub/cli/db.py +24 -0
  4. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  5. infrahub/core/attribute.py +3 -3
  6. infrahub/core/branch/tasks.py +2 -1
  7. infrahub/core/changelog/models.py +4 -12
  8. infrahub/core/constants/infrahubkind.py +1 -0
  9. infrahub/core/diff/model/path.py +4 -0
  10. infrahub/core/diff/payload_builder.py +1 -1
  11. infrahub/core/graph/__init__.py +1 -1
  12. infrahub/core/ipam/utilization.py +1 -1
  13. infrahub/core/manager.py +6 -3
  14. infrahub/core/migrations/graph/__init__.py +4 -0
  15. infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
  16. infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
  17. infrahub/core/migrations/schema/node_attribute_add.py +5 -2
  18. infrahub/core/migrations/shared.py +5 -6
  19. infrahub/core/node/__init__.py +142 -40
  20. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  21. infrahub/core/node/node_property_attribute.py +230 -0
  22. infrahub/core/node/standard.py +1 -1
  23. infrahub/core/protocols.py +7 -1
  24. infrahub/core/query/node.py +14 -1
  25. infrahub/core/registry.py +2 -2
  26. infrahub/core/relationship/constraints/count.py +1 -1
  27. infrahub/core/relationship/model.py +1 -1
  28. infrahub/core/schema/basenode_schema.py +42 -2
  29. infrahub/core/schema/definitions/core/__init__.py +2 -0
  30. infrahub/core/schema/definitions/core/generator.py +2 -0
  31. infrahub/core/schema/definitions/core/group.py +16 -2
  32. infrahub/core/schema/definitions/internal.py +14 -1
  33. infrahub/core/schema/generated/base_node_schema.py +6 -1
  34. infrahub/core/schema/node_schema.py +5 -2
  35. infrahub/core/schema/schema_branch.py +134 -0
  36. infrahub/core/schema/schema_branch_display.py +123 -0
  37. infrahub/core/schema/schema_branch_hfid.py +114 -0
  38. infrahub/core/validators/aggregated_checker.py +1 -1
  39. infrahub/core/validators/determiner.py +12 -1
  40. infrahub/core/validators/relationship/peer.py +1 -1
  41. infrahub/core/validators/tasks.py +1 -1
  42. infrahub/display_labels/__init__.py +0 -0
  43. infrahub/display_labels/gather.py +48 -0
  44. infrahub/display_labels/models.py +240 -0
  45. infrahub/display_labels/tasks.py +186 -0
  46. infrahub/display_labels/triggers.py +22 -0
  47. infrahub/events/group_action.py +1 -1
  48. infrahub/events/node_action.py +1 -1
  49. infrahub/generators/constants.py +7 -0
  50. infrahub/generators/models.py +7 -0
  51. infrahub/generators/tasks.py +31 -15
  52. infrahub/git/integrator.py +22 -14
  53. infrahub/graphql/analyzer.py +1 -1
  54. infrahub/graphql/mutations/display_label.py +111 -0
  55. infrahub/graphql/mutations/generator.py +25 -7
  56. infrahub/graphql/mutations/hfid.py +118 -0
  57. infrahub/graphql/mutations/relationship.py +2 -2
  58. infrahub/graphql/mutations/resource_manager.py +2 -2
  59. infrahub/graphql/mutations/schema.py +5 -5
  60. infrahub/graphql/queries/resource_manager.py +1 -1
  61. infrahub/graphql/resolvers/resolver.py +2 -0
  62. infrahub/graphql/schema.py +4 -0
  63. infrahub/groups/tasks.py +1 -1
  64. infrahub/hfid/__init__.py +0 -0
  65. infrahub/hfid/gather.py +48 -0
  66. infrahub/hfid/models.py +240 -0
  67. infrahub/hfid/tasks.py +185 -0
  68. infrahub/hfid/triggers.py +22 -0
  69. infrahub/lock.py +15 -4
  70. infrahub/middleware.py +26 -1
  71. infrahub/proposed_change/tasks.py +10 -1
  72. infrahub/server.py +16 -3
  73. infrahub/services/__init__.py +8 -5
  74. infrahub/trigger/catalogue.py +4 -0
  75. infrahub/trigger/models.py +2 -0
  76. infrahub/trigger/tasks.py +3 -0
  77. infrahub/workflows/catalogue.py +72 -0
  78. infrahub/workflows/initialization.py +16 -0
  79. infrahub_sdk/checks.py +1 -1
  80. infrahub_sdk/ctl/cli_commands.py +2 -0
  81. infrahub_sdk/ctl/generator.py +4 -0
  82. infrahub_sdk/ctl/graphql.py +184 -0
  83. infrahub_sdk/ctl/schema.py +6 -2
  84. infrahub_sdk/generator.py +7 -1
  85. infrahub_sdk/graphql/__init__.py +12 -0
  86. infrahub_sdk/graphql/constants.py +1 -0
  87. infrahub_sdk/graphql/plugin.py +85 -0
  88. infrahub_sdk/graphql/query.py +77 -0
  89. infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
  90. infrahub_sdk/graphql/utils.py +40 -0
  91. infrahub_sdk/protocols.py +14 -0
  92. infrahub_sdk/schema/__init__.py +38 -0
  93. infrahub_sdk/schema/repository.py +8 -0
  94. infrahub_sdk/spec/object.py +84 -10
  95. infrahub_sdk/spec/range_expansion.py +1 -1
  96. infrahub_sdk/transforms.py +1 -1
  97. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +5 -4
  98. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +104 -79
  99. infrahub_testcontainers/container.py +1 -1
  100. infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
  101. infrahub_testcontainers/docker-compose.test.yml +1 -1
  102. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
  103. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
  104. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
@@ -24,7 +24,7 @@ class NodeMutatedEvent(InfrahubEvent):
24
24
 
25
25
  def get_related(self) -> list[dict[str, str]]:
26
26
  related = super().get_related()
27
- if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
27
+ if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GENERATORAWAREGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
28
28
  # Temporary workaround to avoid too large payloads for the related field
29
29
  return related
30
30
 
@@ -0,0 +1,7 @@
1
+ from enum import Enum
2
+
3
+
4
+ class GeneratorDefinitionRunSource(Enum):
5
+ PROPOSED_CHANGE = "proposed_change"
6
+ MERGE = "merge"
7
+ UNKNOWN = "unknown"
@@ -44,6 +44,11 @@ class GeneratorDefinitionModel(BaseModel):
44
44
  group_id: str = Field(..., description="The group to target when running this generator")
45
45
  parameters: dict = Field(..., description="The input parameters required to run this check")
46
46
 
47
+ execute_in_proposed_change: bool = Field(
48
+ ..., description="Indicates if the generator should execute in a proposed change."
49
+ )
50
+ execute_after_merge: bool = Field(..., description="Indicates if the generator should execute after a merge.")
51
+
47
52
  @classmethod
48
53
  def from_pc_generator_definition(cls, model: ProposedChangeGeneratorDefinition) -> GeneratorDefinitionModel:
49
54
  return GeneratorDefinitionModel(
@@ -55,6 +60,8 @@ class GeneratorDefinitionModel(BaseModel):
55
60
  file_path=model.file_path,
56
61
  group_id=model.group_id,
57
62
  parameters=model.parameters,
63
+ execute_in_proposed_change=model.execute_in_proposed_change,
64
+ execute_after_merge=model.execute_after_merge,
58
65
  )
59
66
 
60
67
 
@@ -14,6 +14,7 @@ from prefect.states import Completed, Failed
14
14
  from infrahub import lock
15
15
  from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
16
16
  from infrahub.core.constants import GeneratorInstanceStatus, InfrahubKind
17
+ from infrahub.generators.constants import GeneratorDefinitionRunSource
17
18
  from infrahub.generators.models import (
18
19
  GeneratorDefinitionModel,
19
20
  ProposedChangeGeneratorDefinition,
@@ -57,6 +58,8 @@ async def run_generator(model: RequestGeneratorRun) -> None:
57
58
  query=model.generator_definition.query_name,
58
59
  targets=model.generator_definition.group_id,
59
60
  convert_query_response=model.generator_definition.convert_query_response,
61
+ execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
62
+ execute_after_merge=model.generator_definition.execute_after_merge,
60
63
  )
61
64
 
62
65
  commit_worktree = repository.get_commit_worktree(commit=model.commit)
@@ -80,6 +83,8 @@ async def run_generator(model: RequestGeneratorRun) -> None:
80
83
  params=model.variables,
81
84
  generator_instance=generator_instance.id,
82
85
  convert_query_response=generator_definition.convert_query_response,
86
+ execute_in_proposed_change=generator_definition.execute_in_proposed_change,
87
+ execute_after_merge=generator_definition.execute_after_merge,
83
88
  infrahub_node=InfrahubNode,
84
89
  )
85
90
  await generator.run(identifier=generator_definition.name)
@@ -129,28 +134,39 @@ async def _define_instance(model: RequestGeneratorRun, client: InfrahubClient) -
129
134
 
130
135
 
131
136
  @flow(name="generator-definition-run", flow_run_name="Run all generators")
132
- async def run_generator_definition(branch: str, context: InfrahubContext) -> None:
137
+ async def run_generator_definition(
138
+ branch: str, context: InfrahubContext, source: GeneratorDefinitionRunSource = GeneratorDefinitionRunSource.UNKNOWN
139
+ ) -> None:
133
140
  await add_tags(branches=[branch])
134
141
 
135
142
  generators = await get_client().filters(
136
143
  kind=InfrahubKind.GENERATORDEFINITION, prefetch_relationships=True, populate_store=True, branch=branch
137
144
  )
138
145
 
139
- generator_definitions = [
140
- ProposedChangeGeneratorDefinition(
141
- definition_id=generator.id,
142
- definition_name=generator.name.value,
143
- class_name=generator.class_name.value,
144
- file_path=generator.file_path.value,
145
- query_name=generator.query.peer.name.value,
146
- query_models=generator.query.peer.models.value,
147
- repository_id=generator.repository.peer.id,
148
- parameters=generator.parameters.value,
149
- group_id=generator.targets.peer.id,
150
- convert_query_response=generator.convert_query_response.value,
146
+ generator_definitions: list[ProposedChangeGeneratorDefinition] = []
147
+
148
+ for generator in generators:
149
+ if (
150
+ source == GeneratorDefinitionRunSource.PROPOSED_CHANGE and not generator.execute_in_proposed_change.value
151
+ ) or (source == GeneratorDefinitionRunSource.MERGE and not generator.execute_after_merge.value):
152
+ continue
153
+
154
+ generator_definitions.append(
155
+ ProposedChangeGeneratorDefinition(
156
+ definition_id=generator.id,
157
+ definition_name=generator.name.value,
158
+ class_name=generator.class_name.value,
159
+ file_path=generator.file_path.value,
160
+ query_name=generator.query.peer.name.value,
161
+ query_models=generator.query.peer.models.value,
162
+ repository_id=generator.repository.peer.id,
163
+ parameters=generator.parameters.value,
164
+ group_id=generator.targets.peer.id,
165
+ convert_query_response=generator.convert_query_response.value,
166
+ execute_in_proposed_change=generator.execute_in_proposed_change.value,
167
+ execute_after_merge=generator.execute_after_merge.value,
168
+ )
151
169
  )
152
- for generator in generators
153
- ]
154
170
 
155
171
  for generator_definition in generator_definitions:
156
172
  model = RequestGeneratorDefinitionRun(branch=branch, generator_definition=generator_definition)
@@ -226,7 +226,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
226
226
  )
227
227
  )
228
228
 
229
- @task(name="import-jinja2-tansforms", task_run_name="Import Jinja2 transform", cache_policy=NONE) # type: ignore[arg-type]
229
+ @task(name="import-jinja2-transforms", task_run_name="Import Jinja2 transform", cache_policy=NONE)
230
230
  async def import_jinja2_transforms(
231
231
  self,
232
232
  branch_name: str,
@@ -331,7 +331,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
331
331
 
332
332
  await existing_transform.save()
333
333
 
334
- @task(name="import-artifact-definitions", task_run_name="Import Artifact Definitions", cache_policy=NONE) # type: ignore[arg-type]
334
+ @task(name="import-artifact-definitions", task_run_name="Import Artifact Definitions", cache_policy=NONE)
335
335
  async def import_artifact_definitions(
336
336
  self,
337
337
  branch_name: str,
@@ -432,7 +432,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
432
432
 
433
433
  await existing_artifact_definition.save()
434
434
 
435
- @task(name="repository-get-config", task_run_name="get repository config", cache_policy=NONE) # type: ignore[arg-type]
435
+ @task(name="repository-get-config", task_run_name="get repository config", cache_policy=NONE)
436
436
  async def get_repository_config(self, branch_name: str, commit: str) -> InfrahubRepositoryConfig | None:
437
437
  branch_wt = self.get_worktree(identifier=commit or branch_name)
438
438
  log = get_run_logger()
@@ -469,7 +469,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
469
469
  log.error(f"Unable to load the configuration file {config_file_name}, the format is not valid : {exc}")
470
470
  return None
471
471
 
472
- @task(name="import-schema-files", task_run_name="Import schema files", cache_policy=NONE) # type: ignore[arg-type]
472
+ @task(name="import-schema-files", task_run_name="Import schema files", cache_policy=NONE)
473
473
  async def import_schema_files(self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig) -> None:
474
474
  log = get_run_logger()
475
475
  branch_wt = self.get_worktree(identifier=commit or branch_name)
@@ -541,7 +541,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
541
541
  for schema_file in schemas_data:
542
542
  log.info(f"schema '{schema_file.identifier}' loaded successfully!")
543
543
 
544
- @task(name="import-graphql-queries", task_run_name="Import GraphQL Queries", cache_policy=NONE) # type: ignore[arg-type]
544
+ @task(name="import-graphql-queries", task_run_name="Import GraphQL Queries", cache_policy=NONE)
545
545
  async def import_all_graphql_query(
546
546
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
547
547
  ) -> None:
@@ -599,7 +599,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
599
599
  await obj.save()
600
600
  return obj
601
601
 
602
- @task(name="import-python-check-definitions", task_run_name="Import Python Check Definitions", cache_policy=NONE) # type: ignore[arg-type]
602
+ @task(name="import-python-check-definitions", task_run_name="Import Python Check Definitions", cache_policy=NONE)
603
603
  async def import_python_check_definitions(
604
604
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
605
605
  ) -> None:
@@ -670,7 +670,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
670
670
  log.info(f"CheckDefinition '{check_name!r}' not found locally, deleting")
671
671
  await check_definition_in_graph[check_name].delete()
672
672
 
673
- @task(name="import-generator-definitions", task_run_name="Import Generator Definitions", cache_policy=NONE) # type: ignore[arg-type]
673
+ @task(name="import-generator-definitions", task_run_name="Import Generator Definitions", cache_policy=NONE)
674
674
  async def import_generator_definitions(
675
675
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
676
676
  ) -> None:
@@ -756,11 +756,13 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
756
756
  or existing_generator.parameters.value != generator.parameters
757
757
  or existing_generator.convert_query_response.value != generator.convert_query_response
758
758
  or existing_generator.targets.id != generator.targets
759
+ or existing_generator.execute_in_proposed_change.value != generator.execute_in_proposed_change
760
+ or existing_generator.execute_after_merge.value != generator.execute_after_merge
759
761
  ):
760
762
  return True
761
763
  return False
762
764
 
763
- @task(name="import-python-transforms", task_run_name="Import Python Transforms", cache_policy=NONE) # type: ignore[arg-type]
765
+ @task(name="import-python-transforms", task_run_name="Import Python Transforms", cache_policy=NONE)
764
766
  async def import_python_transforms(
765
767
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
766
768
  ) -> None:
@@ -885,7 +887,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
885
887
  file_type=file_type,
886
888
  )
887
889
 
888
- @task(name="import-objects", task_run_name="Import Objects", cache_policy=NONE) # type: ignore[arg-type]
890
+ @task(name="import-objects", task_run_name="Import Objects", cache_policy=NONE)
889
891
  async def import_objects(
890
892
  self,
891
893
  branch_name: str,
@@ -905,7 +907,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
905
907
  object_type=RepositoryObjects.MENU,
906
908
  )
907
909
 
908
- @task(name="check-definition-get", task_run_name="Get Check Definition", cache_policy=NONE) # type: ignore[arg-type]
910
+ @task(name="check-definition-get", task_run_name="Get Check Definition", cache_policy=NONE)
909
911
  async def get_check_definition(
910
912
  self,
911
913
  branch_name: str,
@@ -945,7 +947,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
945
947
  raise
946
948
  return checks
947
949
 
948
- @task(name="python-transform-get", task_run_name="Get Python Transform", cache_policy=NONE) # type: ignore[arg-type]
950
+ @task(name="python-transform-get", task_run_name="Get Python Transform", cache_policy=NONE)
949
951
  async def get_python_transforms(
950
952
  self, branch_name: str, module: types.ModuleType, file_path: str, transform: InfrahubPythonTransformConfig
951
953
  ) -> list[TransformPythonInformation]:
@@ -1023,6 +1025,12 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1023
1025
  if existing_generator.targets.id != generator.targets:
1024
1026
  existing_generator.targets = {"id": generator.targets, "source": str(self.id), "is_protected": True}
1025
1027
 
1028
+ if existing_generator.execute_in_proposed_change.value != generator.execute_in_proposed_change:
1029
+ existing_generator.execute_in_proposed_change.value = generator.execute_in_proposed_change
1030
+
1031
+ if existing_generator.execute_after_merge.value != generator.execute_after_merge:
1032
+ existing_generator.execute_after_merge.value = generator.execute_after_merge
1033
+
1026
1034
  await existing_generator.save()
1027
1035
 
1028
1036
  async def create_python_check_definition(
@@ -1152,7 +1160,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1152
1160
  await self.import_python_transforms(branch_name=branch_name, commit=commit, config_file=config_file) # type: ignore[misc]
1153
1161
  await self.import_generator_definitions(branch_name=branch_name, commit=commit, config_file=config_file) # type: ignore[misc]
1154
1162
 
1155
- @task(name="jinja2-template-render", task_run_name="Render Jinja2 template", cache_policy=NONE) # type: ignore[arg-type]
1163
+ @task(name="jinja2-template-render", task_run_name="Render Jinja2 template", cache_policy=NONE)
1156
1164
  async def render_jinja2_template(self, commit: str, location: str, data: dict) -> str:
1157
1165
  log = get_run_logger()
1158
1166
  commit_worktree = self.get_commit_worktree(commit=commit)
@@ -1168,7 +1176,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1168
1176
  repository_name=self.name, commit=commit, location=location, message=exc.message
1169
1177
  ) from exc
1170
1178
 
1171
- @task(name="python-check-execute", task_run_name="Execute Python Check", cache_policy=NONE) # type: ignore[arg-type]
1179
+ @task(name="python-check-execute", task_run_name="Execute Python Check", cache_policy=NONE)
1172
1180
  async def execute_python_check(
1173
1181
  self,
1174
1182
  branch_name: str,
@@ -1227,7 +1235,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1227
1235
  repository_name=self.name, class_name=class_name, commit=commit, location=location, message=str(exc)
1228
1236
  ) from exc
1229
1237
 
1230
- @task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE) # type: ignore[arg-type]
1238
+ @task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE)
1231
1239
  async def execute_python_transform(
1232
1240
  self,
1233
1241
  branch_name: str,
@@ -639,7 +639,7 @@ class InfrahubGraphQLQueryAnalyzer(GraphQLQueryAnalyzer):
639
639
  self, node: InlineFragmentNode, query_node: GraphQLQueryNode
640
640
  ) -> GraphQLQueryNode:
641
641
  context_type = query_node.context_type
642
- infrahub_model = self.schema_branch.get(name=node.type_condition.name.value)
642
+ infrahub_model = self.schema_branch.get(name=node.type_condition.name.value, duplicate=False)
643
643
  context_type = ContextType.DIRECT
644
644
  current_node = GraphQLQueryNode(
645
645
  parent=query_node,
@@ -0,0 +1,111 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any
4
+
5
+ from graphene import Boolean, InputObjectType, Mutation, String
6
+
7
+ from infrahub.core.account import ObjectPermission
8
+ from infrahub.core.constants import PermissionAction, PermissionDecision
9
+ from infrahub.core.manager import NodeManager
10
+ from infrahub.core.registry import registry
11
+ from infrahub.database import retry_db_transaction
12
+ from infrahub.events import EventMeta
13
+ from infrahub.events.node_action import NodeUpdatedEvent
14
+ from infrahub.exceptions import NodeNotFoundError, ValidationError
15
+ from infrahub.graphql.context import apply_external_context
16
+ from infrahub.graphql.types.context import ContextInput
17
+ from infrahub.log import get_log_data
18
+ from infrahub.worker import WORKER_IDENTITY
19
+
20
+ if TYPE_CHECKING:
21
+ from graphql import GraphQLResolveInfo
22
+
23
+ from infrahub.graphql.initialization import GraphqlContext
24
+
25
+
26
+ class InfrahubDisplayLabelUpdateInput(InputObjectType):
27
+ id = String(required=True)
28
+ kind = String(required=True)
29
+ value = String(required=True)
30
+
31
+
32
+ class UpdateDisplayLabel(Mutation):
33
+ class Arguments:
34
+ data = InfrahubDisplayLabelUpdateInput(required=True)
35
+ context = ContextInput(required=False)
36
+
37
+ ok = Boolean()
38
+
39
+ @classmethod
40
+ @retry_db_transaction(name="update_display_label")
41
+ async def mutate(
42
+ cls,
43
+ _: dict,
44
+ info: GraphQLResolveInfo,
45
+ data: InfrahubDisplayLabelUpdateInput,
46
+ context: ContextInput | None = None,
47
+ ) -> UpdateDisplayLabel:
48
+ graphql_context: GraphqlContext = info.context
49
+ node_schema = registry.schema.get_node_schema(
50
+ name=str(data.kind), branch=graphql_context.branch.name, duplicate=False
51
+ )
52
+ if not node_schema.display_label:
53
+ raise ValidationError(input_value=f"{node_schema.kind}.display_label has not been defined for this kind.")
54
+
55
+ graphql_context.active_permissions.raise_for_permission(
56
+ permission=ObjectPermission(
57
+ namespace=node_schema.namespace,
58
+ name=node_schema.name,
59
+ action=PermissionAction.UPDATE.value,
60
+ decision=PermissionDecision.ALLOW_DEFAULT.value
61
+ if graphql_context.branch.name == registry.default_branch
62
+ else PermissionDecision.ALLOW_OTHER.value,
63
+ )
64
+ )
65
+ await apply_external_context(graphql_context=graphql_context, context_input=context)
66
+
67
+ if not (
68
+ target_node := await NodeManager.get_one(
69
+ db=graphql_context.db,
70
+ kind=node_schema.kind,
71
+ id=str(data.id),
72
+ branch=graphql_context.branch,
73
+ fields={"display_label": None},
74
+ )
75
+ ):
76
+ raise NodeNotFoundError(
77
+ node_type=node_schema.kind,
78
+ identifier=str(data.id),
79
+ message="The targeted node was not found in the database",
80
+ )
81
+
82
+ existing_label = (
83
+ await target_node.get_display_label(db=graphql_context.db) if target_node.has_display_label() else None
84
+ )
85
+ if str(data.value) != existing_label:
86
+ await target_node.set_display_label(value=str(data.value))
87
+
88
+ async with graphql_context.db.start_transaction() as dbt:
89
+ await target_node.save(db=dbt, fields=["display_label"])
90
+
91
+ log_data = get_log_data()
92
+ request_id = log_data.get("request_id", "")
93
+
94
+ event = NodeUpdatedEvent(
95
+ kind=node_schema.kind,
96
+ node_id=target_node.get_id(),
97
+ changelog=target_node.node_changelog.model_dump(),
98
+ fields=["display_label"],
99
+ meta=EventMeta(
100
+ context=graphql_context.get_context(),
101
+ initiator_id=WORKER_IDENTITY,
102
+ request_id=request_id,
103
+ account_id=graphql_context.active_account_session.account_id,
104
+ branch=graphql_context.branch,
105
+ ),
106
+ )
107
+ await graphql_context.active_service.event.send(event=event)
108
+
109
+ result: dict[str, Any] = {"ok": True}
110
+
111
+ return cls(**result)
@@ -5,6 +5,7 @@ from typing import TYPE_CHECKING
5
5
  from graphene import Boolean, Field, InputField, InputObjectType, List, Mutation, NonNull, String
6
6
 
7
7
  from infrahub.core.manager import NodeManager
8
+ from infrahub.core.protocols import CoreGeneratorDefinition, CoreGenericRepository, CoreGraphQLQuery, CoreStandardGroup
8
9
  from infrahub.generators.models import ProposedChangeGeneratorDefinition, RequestGeneratorDefinitionRun
9
10
  from infrahub.graphql.context import apply_external_context
10
11
  from infrahub.graphql.types.context import ContextInput
@@ -44,11 +45,18 @@ class GeneratorDefinitionRequestRun(Mutation):
44
45
  db = graphql_context.db
45
46
  await apply_external_context(graphql_context=graphql_context, context_input=context)
46
47
  generator_definition = await NodeManager.get_one(
47
- id=str(data.id), db=db, branch=graphql_context.branch, prefetch_relationships=True, raise_on_error=True
48
+ id=str(data.id),
49
+ kind=CoreGeneratorDefinition,
50
+ db=db,
51
+ branch=graphql_context.branch,
52
+ prefetch_relationships=True,
53
+ raise_on_error=True,
48
54
  )
49
- query = await generator_definition.query.get_peer(db=db)
50
- repository = await generator_definition.repository.get_peer(db=db)
51
- group = await generator_definition.targets.get_peer(db=db)
55
+ query = await generator_definition.query.get_peer(db=db, peer_type=CoreGraphQLQuery, raise_on_error=True)
56
+ repository = await generator_definition.repository.get_peer(
57
+ db=db, peer_type=CoreGenericRepository, raise_on_error=True
58
+ )
59
+ group = await generator_definition.targets.get_peer(db=db, peer_type=CoreStandardGroup, raise_on_error=True)
52
60
 
53
61
  request_model = RequestGeneratorDefinitionRun(
54
62
  generator_definition=ProposedChangeGeneratorDefinition(
@@ -57,11 +65,21 @@ class GeneratorDefinitionRequestRun(Mutation):
57
65
  class_name=generator_definition.class_name.value,
58
66
  file_path=generator_definition.file_path.value,
59
67
  query_name=query.name.value,
60
- query_models=query.models.value,
68
+ query_models=query.models.value or [],
61
69
  repository_id=repository.id,
62
- parameters=generator_definition.parameters.value,
70
+ parameters=generator_definition.parameters.value
71
+ if isinstance(generator_definition.parameters.value, dict)
72
+ else {},
63
73
  group_id=group.id,
64
- convert_query_response=generator_definition.convert_query_response.value or False,
74
+ convert_query_response=generator_definition.convert_query_response.value
75
+ if generator_definition.convert_query_response.value is not None
76
+ else False,
77
+ execute_in_proposed_change=generator_definition.execute_in_proposed_change.value
78
+ if generator_definition.execute_in_proposed_change.value is not None
79
+ else True,
80
+ execute_after_merge=generator_definition.execute_after_merge.value
81
+ if generator_definition.execute_after_merge.value is not None
82
+ else True,
65
83
  ),
66
84
  branch=graphql_context.branch.name,
67
85
  target_members=data.get("nodes", []),
@@ -0,0 +1,118 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Any, cast
4
+
5
+ from graphene import Boolean, InputObjectType, List, Mutation, NonNull, String
6
+
7
+ from infrahub.core.account import ObjectPermission
8
+ from infrahub.core.constants import PermissionAction, PermissionDecision
9
+ from infrahub.core.manager import NodeManager
10
+ from infrahub.core.registry import registry
11
+ from infrahub.database import retry_db_transaction
12
+ from infrahub.events import EventMeta
13
+ from infrahub.events.node_action import NodeUpdatedEvent
14
+ from infrahub.exceptions import NodeNotFoundError, ValidationError
15
+ from infrahub.graphql.context import apply_external_context
16
+ from infrahub.graphql.types.context import ContextInput
17
+ from infrahub.log import get_log_data
18
+ from infrahub.worker import WORKER_IDENTITY
19
+
20
+ if TYPE_CHECKING:
21
+ from graphql import GraphQLResolveInfo
22
+
23
+ from infrahub.graphql.initialization import GraphqlContext
24
+
25
+
26
+ class InfrahubHFIDUpdateInput(InputObjectType):
27
+ id = String(required=True)
28
+ kind = String(required=True)
29
+ value = List(NonNull(String), required=True)
30
+
31
+
32
+ class UpdateHFID(Mutation):
33
+ class Arguments:
34
+ data = InfrahubHFIDUpdateInput(required=True)
35
+ context = ContextInput(required=False)
36
+
37
+ ok = Boolean()
38
+
39
+ @classmethod
40
+ @retry_db_transaction(name="update_hfid")
41
+ async def mutate(
42
+ cls,
43
+ _: dict,
44
+ info: GraphQLResolveInfo,
45
+ data: InfrahubHFIDUpdateInput,
46
+ context: ContextInput | None = None,
47
+ ) -> UpdateHFID:
48
+ graphql_context: GraphqlContext = info.context
49
+ node_schema = registry.schema.get_node_schema(
50
+ name=str(data.kind), branch=graphql_context.branch.name, duplicate=False
51
+ )
52
+ if node_schema.human_friendly_id is None:
53
+ raise ValidationError(
54
+ input_value=f"{node_schema.kind}.human_friendly_id has not been defined for this kind."
55
+ )
56
+
57
+ updated_hfid = cast(list[str], data.value)
58
+
59
+ if len(node_schema.human_friendly_id) != len(updated_hfid):
60
+ raise ValidationError(
61
+ input_value=f"{node_schema.kind}.human_friendly_id requires {len(node_schema.human_friendly_id)} parts data has {len(updated_hfid)}"
62
+ )
63
+
64
+ graphql_context.active_permissions.raise_for_permission(
65
+ permission=ObjectPermission(
66
+ namespace=node_schema.namespace,
67
+ name=node_schema.name,
68
+ action=PermissionAction.UPDATE.value,
69
+ decision=PermissionDecision.ALLOW_DEFAULT.value
70
+ if graphql_context.branch.name == registry.default_branch
71
+ else PermissionDecision.ALLOW_OTHER.value,
72
+ )
73
+ )
74
+ await apply_external_context(graphql_context=graphql_context, context_input=context)
75
+
76
+ if not (
77
+ target_node := await NodeManager.get_one(
78
+ db=graphql_context.db,
79
+ kind=node_schema.kind,
80
+ id=str(data.id),
81
+ branch=graphql_context.branch,
82
+ fields={"human_friendly_id": None},
83
+ )
84
+ ):
85
+ raise NodeNotFoundError(
86
+ node_type=node_schema.kind,
87
+ identifier=str(data.id),
88
+ message="The targeted node was not found in the database",
89
+ )
90
+
91
+ existing = await target_node.get_hfid(db=graphql_context.db) if target_node.has_human_friendly_id() else None
92
+ if updated_hfid != existing:
93
+ await target_node.set_human_friendly_id(value=updated_hfid)
94
+
95
+ async with graphql_context.db.start_transaction() as dbt:
96
+ await target_node.save(db=dbt, fields=["human_friendly_id"])
97
+
98
+ log_data = get_log_data()
99
+ request_id = log_data.get("request_id", "")
100
+
101
+ event = NodeUpdatedEvent(
102
+ kind=node_schema.kind,
103
+ node_id=target_node.get_id(),
104
+ changelog=target_node.node_changelog.model_dump(),
105
+ fields=["human_friendly_id"],
106
+ meta=EventMeta(
107
+ context=graphql_context.get_context(),
108
+ initiator_id=WORKER_IDENTITY,
109
+ request_id=request_id,
110
+ account_id=graphql_context.active_account_session.account_id,
111
+ branch=graphql_context.branch,
112
+ ),
113
+ )
114
+ await graphql_context.active_service.event.send(event=event)
115
+
116
+ result: dict[str, Any] = {"ok": True}
117
+
118
+ return cls(**result)
@@ -91,7 +91,7 @@ class RelationshipAdd(Mutation):
91
91
  await apply_external_context(graphql_context=graphql_context, context_input=context)
92
92
 
93
93
  rel_schema = source.get_schema().get_relationship(name=relationship_name)
94
- display_label: str = await source.render_display_label(db=graphql_context.db)
94
+ display_label: str = await source.get_display_label(db=graphql_context.db) or ""
95
95
  node_changelog = NodeChangelog(
96
96
  node_id=source.get_id(), node_kind=source.get_kind(), display_label=display_label
97
97
  )
@@ -214,7 +214,7 @@ class RelationshipRemove(Mutation):
214
214
  await apply_external_context(graphql_context=graphql_context, context_input=context)
215
215
 
216
216
  rel_schema = source.get_schema().get_relationship(name=relationship_name)
217
- display_label: str = await source.render_display_label(db=graphql_context.db)
217
+ display_label: str = await source.get_display_label(db=graphql_context.db) or ""
218
218
  node_changelog = NodeChangelog(
219
219
  node_id=source.get_id(), node_kind=source.get_kind(), display_label=display_label
220
220
  )
@@ -98,7 +98,7 @@ class IPPrefixPoolGetResource(Mutation):
98
98
  "id": resource.id,
99
99
  "kind": resource.get_kind(),
100
100
  "identifier": data.get("identifier", None),
101
- "display_label": await resource.render_display_label(db=graphql_context.db),
101
+ "display_label": await resource.get_display_label(db=graphql_context.db),
102
102
  "branch": graphql_context.branch.name,
103
103
  },
104
104
  }
@@ -144,7 +144,7 @@ class IPAddressPoolGetResource(Mutation):
144
144
  "id": resource.id,
145
145
  "kind": resource.get_kind(),
146
146
  "identifier": data.get("identifier"),
147
- "display_label": await resource.render_display_label(db=graphql_context.db),
147
+ "display_label": await resource.get_display_label(db=graphql_context.db),
148
148
  "branch": graphql_context.branch.name,
149
149
  },
150
150
  }
@@ -81,7 +81,7 @@ class SchemaDropdownAdd(Mutation):
81
81
  _validate_schema_permission(graphql_context=graphql_context)
82
82
  await apply_external_context(graphql_context=graphql_context, context_input=context)
83
83
 
84
- kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name)
84
+ kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=False)
85
85
  attribute = str(data.attribute)
86
86
  validate_kind_dropdown(kind=kind, attribute=attribute)
87
87
  dropdown = str(data.dropdown)
@@ -104,7 +104,7 @@ class SchemaDropdownAdd(Mutation):
104
104
  context=graphql_context.get_context(),
105
105
  )
106
106
 
107
- kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name)
107
+ kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=False)
108
108
  attrib = kind.get_attribute(attribute)
109
109
  dropdown_entry = {}
110
110
  success = False
@@ -141,7 +141,7 @@ class SchemaDropdownRemove(Mutation):
141
141
  graphql_context: GraphqlContext = info.context
142
142
 
143
143
  _validate_schema_permission(graphql_context=graphql_context)
144
- kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name)
144
+ kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=False)
145
145
  await apply_external_context(graphql_context=graphql_context, context_input=context)
146
146
 
147
147
  attribute = str(data.attribute)
@@ -197,7 +197,7 @@ class SchemaEnumAdd(Mutation):
197
197
  graphql_context: GraphqlContext = info.context
198
198
 
199
199
  _validate_schema_permission(graphql_context=graphql_context)
200
- kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name)
200
+ kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=False)
201
201
  await apply_external_context(graphql_context=graphql_context, context_input=context)
202
202
 
203
203
  attribute = str(data.attribute)
@@ -243,7 +243,7 @@ class SchemaEnumRemove(Mutation):
243
243
  graphql_context: GraphqlContext = info.context
244
244
 
245
245
  _validate_schema_permission(graphql_context=graphql_context)
246
- kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name)
246
+ kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=False)
247
247
  await apply_external_context(graphql_context=graphql_context, context_input=context)
248
248
 
249
249
  attribute = str(data.attribute)
@@ -242,7 +242,7 @@ class PoolUtilization(ObjectType):
242
242
  if "kind" in node_fields:
243
243
  node_response["kind"] = resource_node.get_kind()
244
244
  if "display_label" in node_fields:
245
- node_response["display_label"] = await resource_node.render_display_label(db=db)
245
+ node_response["display_label"] = await resource_node.get_display_label(db=db)
246
246
  if "weight" in node_fields:
247
247
  node_response["weight"] = await resource_node.get_resource_weight(db=db) # type: ignore[attr-defined]
248
248
  if "utilization" in node_fields:
@@ -166,6 +166,8 @@ async def default_paginated_list_resolver(
166
166
 
167
167
  edges = fields.get("edges", {})
168
168
  node_fields = edges.get("node", {})
169
+ if "hfid" in node_fields:
170
+ node_fields["human_friendly_id"] = None
169
171
 
170
172
  permission_set: dict[str, Any] | None = None
171
173
  permissions = (