infrahub-server 1.5.0b1__py3-none-any.whl → 1.5.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. infrahub/api/internal.py +2 -0
  2. infrahub/api/oauth2.py +13 -19
  3. infrahub/api/oidc.py +15 -21
  4. infrahub/api/schema.py +24 -3
  5. infrahub/artifacts/models.py +2 -1
  6. infrahub/auth.py +137 -3
  7. infrahub/cli/__init__.py +2 -0
  8. infrahub/cli/db.py +83 -102
  9. infrahub/cli/dev.py +118 -0
  10. infrahub/cli/tasks.py +46 -0
  11. infrahub/cli/upgrade.py +30 -3
  12. infrahub/computed_attribute/tasks.py +20 -8
  13. infrahub/core/attribute.py +10 -2
  14. infrahub/core/branch/enums.py +1 -1
  15. infrahub/core/branch/models.py +7 -3
  16. infrahub/core/branch/tasks.py +68 -7
  17. infrahub/core/constants/__init__.py +3 -0
  18. infrahub/core/diff/query/artifact.py +1 -0
  19. infrahub/core/diff/query/field_summary.py +1 -0
  20. infrahub/core/graph/__init__.py +1 -1
  21. infrahub/core/initialization.py +5 -2
  22. infrahub/core/migrations/__init__.py +3 -0
  23. infrahub/core/migrations/exceptions.py +4 -0
  24. infrahub/core/migrations/graph/__init__.py +10 -13
  25. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  26. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  27. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  28. infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
  29. infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
  30. infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
  31. infrahub/core/migrations/query/__init__.py +7 -8
  32. infrahub/core/migrations/query/attribute_add.py +8 -6
  33. infrahub/core/migrations/query/attribute_remove.py +134 -0
  34. infrahub/core/migrations/runner.py +54 -0
  35. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  36. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  37. infrahub/core/migrations/schema/node_attribute_add.py +30 -2
  38. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  39. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  40. infrahub/core/migrations/schema/node_remove.py +2 -1
  41. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  42. infrahub/core/migrations/shared.py +48 -14
  43. infrahub/core/node/__init__.py +16 -11
  44. infrahub/core/node/create.py +46 -63
  45. infrahub/core/node/lock_utils.py +70 -44
  46. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  47. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  48. infrahub/core/node/resource_manager/number_pool.py +2 -1
  49. infrahub/core/query/attribute.py +55 -0
  50. infrahub/core/query/ipam.py +1 -0
  51. infrahub/core/query/node.py +9 -3
  52. infrahub/core/query/relationship.py +1 -0
  53. infrahub/core/schema/__init__.py +56 -0
  54. infrahub/core/schema/attribute_schema.py +4 -0
  55. infrahub/core/schema/definitions/internal.py +2 -2
  56. infrahub/core/schema/generated/attribute_schema.py +2 -2
  57. infrahub/core/schema/manager.py +22 -1
  58. infrahub/core/schema/schema_branch.py +180 -22
  59. infrahub/database/graph.py +21 -0
  60. infrahub/display_labels/tasks.py +13 -7
  61. infrahub/events/branch_action.py +27 -1
  62. infrahub/generators/tasks.py +3 -7
  63. infrahub/git/base.py +4 -1
  64. infrahub/git/integrator.py +1 -1
  65. infrahub/git/models.py +2 -1
  66. infrahub/git/repository.py +22 -5
  67. infrahub/git/tasks.py +66 -10
  68. infrahub/git/utils.py +123 -1
  69. infrahub/graphql/api/endpoints.py +14 -4
  70. infrahub/graphql/manager.py +4 -9
  71. infrahub/graphql/mutations/convert_object_type.py +11 -1
  72. infrahub/graphql/mutations/display_label.py +17 -10
  73. infrahub/graphql/mutations/hfid.py +17 -10
  74. infrahub/graphql/mutations/ipam.py +54 -35
  75. infrahub/graphql/mutations/main.py +27 -28
  76. infrahub/graphql/schema_sort.py +170 -0
  77. infrahub/graphql/types/branch.py +4 -1
  78. infrahub/graphql/types/enums.py +3 -0
  79. infrahub/hfid/tasks.py +13 -7
  80. infrahub/lock.py +52 -12
  81. infrahub/message_bus/types.py +2 -1
  82. infrahub/permissions/constants.py +2 -0
  83. infrahub/proposed_change/tasks.py +25 -16
  84. infrahub/server.py +6 -2
  85. infrahub/services/__init__.py +2 -2
  86. infrahub/services/adapters/http/__init__.py +5 -0
  87. infrahub/services/adapters/workflow/worker.py +14 -3
  88. infrahub/task_manager/event.py +5 -0
  89. infrahub/task_manager/models.py +7 -0
  90. infrahub/task_manager/task.py +73 -0
  91. infrahub/trigger/setup.py +13 -4
  92. infrahub/trigger/tasks.py +3 -0
  93. infrahub/workers/dependencies.py +10 -1
  94. infrahub/workers/infrahub_async.py +10 -2
  95. infrahub/workflows/catalogue.py +8 -0
  96. infrahub/workflows/initialization.py +5 -0
  97. infrahub/workflows/utils.py +2 -1
  98. infrahub_sdk/client.py +13 -10
  99. infrahub_sdk/config.py +29 -2
  100. infrahub_sdk/ctl/schema.py +22 -7
  101. infrahub_sdk/schema/__init__.py +32 -4
  102. infrahub_sdk/spec/models.py +7 -0
  103. infrahub_sdk/spec/object.py +37 -102
  104. infrahub_sdk/spec/processors/__init__.py +0 -0
  105. infrahub_sdk/spec/processors/data_processor.py +10 -0
  106. infrahub_sdk/spec/processors/factory.py +34 -0
  107. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  108. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +3 -1
  109. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +115 -101
  110. infrahub_testcontainers/container.py +114 -2
  111. infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
  112. infrahub_testcontainers/docker-compose.test.yml +5 -0
  113. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  114. infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +0 -97
  115. infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +0 -86
  116. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
  117. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
  118. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
@@ -2,6 +2,9 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING, Any
4
4
 
5
+ from cachetools import TTLCache
6
+ from cachetools.keys import hashkey
7
+ from cachetools_async import cached
5
8
  from git.exc import BadName, GitCommandError
6
9
  from infrahub_sdk.exceptions import GraphQLError
7
10
  from prefect import task
@@ -248,12 +251,13 @@ class InfrahubReadOnlyRepository(InfrahubRepositoryIntegrator):
248
251
  await self.update_commit_value(branch_name=self.infrahub_branch_name, commit=commit)
249
252
 
250
253
 
251
- @task(
252
- name="Fetch repository commit",
253
- description="Retrieve a git repository at a given commit, if it does not already exist locally",
254
- cache_policy=NONE,
254
+ @cached(
255
+ TTLCache(maxsize=100, ttl=30),
256
+ key=lambda *_, **kwargs: hashkey(
257
+ kwargs.get("repository_id"), kwargs.get("name"), kwargs.get("repository_kind"), kwargs.get("commit")
258
+ ),
255
259
  )
256
- async def get_initialized_repo(
260
+ async def _get_initialized_repo(
257
261
  client: InfrahubClient, repository_id: str, name: str, repository_kind: str, commit: str | None = None
258
262
  ) -> InfrahubReadOnlyRepository | InfrahubRepository:
259
263
  if repository_kind == InfrahubKind.REPOSITORY:
@@ -263,3 +267,16 @@ async def get_initialized_repo(
263
267
  return await InfrahubReadOnlyRepository.init(id=repository_id, name=name, commit=commit, client=client)
264
268
 
265
269
  raise NotImplementedError(f"The repository kind {repository_kind} has not been implemented")
270
+
271
+
272
+ @task(
273
+ name="Fetch repository commit",
274
+ description="Retrieve a git repository at a given commit, if it does not already exist locally",
275
+ cache_policy=NONE,
276
+ )
277
+ async def get_initialized_repo(
278
+ client: InfrahubClient, repository_id: str, name: str, repository_kind: str, commit: str | None = None
279
+ ) -> InfrahubReadOnlyRepository | InfrahubRepository:
280
+ return await _get_initialized_repo(
281
+ client=client, repository_id=repository_id, name=name, repository_kind=repository_kind, commit=commit
282
+ )
infrahub/git/tasks.py CHANGED
@@ -1,3 +1,5 @@
1
+ from typing import Any
2
+
1
3
  from infrahub_sdk import InfrahubClient
2
4
  from infrahub_sdk.protocols import (
3
5
  CoreArtifact,
@@ -14,7 +16,12 @@ from prefect.logging import get_run_logger
14
16
 
15
17
  from infrahub import lock
16
18
  from infrahub.context import InfrahubContext
17
- from infrahub.core.constants import InfrahubKind, RepositoryInternalStatus, ValidatorConclusion
19
+ from infrahub.core.constants import (
20
+ InfrahubKind,
21
+ RepositoryInternalStatus,
22
+ RepositoryOperationalStatus,
23
+ ValidatorConclusion,
24
+ )
18
25
  from infrahub.core.manager import NodeManager
19
26
  from infrahub.core.registry import registry
20
27
  from infrahub.exceptions import CheckError, RepositoryError
@@ -53,6 +60,7 @@ from .models import (
53
60
  UserCheckDefinitionData,
54
61
  )
55
62
  from .repository import InfrahubReadOnlyRepository, InfrahubRepository, get_initialized_repo
63
+ from .utils import fetch_artifact_definition_targets, fetch_check_definition_targets
56
64
 
57
65
 
58
66
  @flow(
@@ -151,6 +159,39 @@ async def create_branch(branch: str, branch_id: str) -> None:
151
159
  pass
152
160
 
153
161
 
162
+ @flow(name="sync-git-repo-with-origin", flow_run_name="Sync git repo with origin")
163
+ async def sync_git_repo_with_origin_and_tag_on_failure(
164
+ client: InfrahubClient,
165
+ repository_id: str,
166
+ repository_name: str,
167
+ repository_location: str,
168
+ internal_status: str,
169
+ default_branch_name: str,
170
+ operational_status: str,
171
+ staging_branch: str | None = None,
172
+ infrahub_branch: str | None = None,
173
+ ) -> None:
174
+ repo = await InfrahubRepository.init(
175
+ id=repository_id,
176
+ name=repository_name,
177
+ location=repository_location,
178
+ client=client,
179
+ internal_status=internal_status,
180
+ default_branch_name=default_branch_name,
181
+ )
182
+
183
+ try:
184
+ await repo.sync(staging_branch=staging_branch)
185
+ except RepositoryError:
186
+ if operational_status == RepositoryOperationalStatus.ONLINE.value:
187
+ params: dict[str, Any] = {
188
+ "branches": [infrahub_branch] if infrahub_branch else [],
189
+ "nodes": [str(repository_id)],
190
+ }
191
+ await add_tags(**params)
192
+ raise
193
+
194
+
154
195
  @flow(name="git_repositories_sync", flow_run_name="Sync Git Repositories")
155
196
  async def sync_remote_repositories() -> None:
156
197
  log = get_run_logger()
@@ -203,7 +244,17 @@ async def sync_remote_repositories() -> None:
203
244
  continue
204
245
 
205
246
  try:
206
- await repo.sync(staging_branch=staging_branch)
247
+ await sync_git_repo_with_origin_and_tag_on_failure(
248
+ client=client,
249
+ repository_id=repository_data.repository.id,
250
+ repository_name=repository_data.repository.name.value,
251
+ repository_location=repository_data.repository.location.value,
252
+ internal_status=active_internal_status,
253
+ default_branch_name=repository_data.repository.default_branch.value,
254
+ operational_status=repository_data.repository.operational_status.value,
255
+ staging_branch=staging_branch,
256
+ infrahub_branch=infrahub_branch,
257
+ )
207
258
  # Tell workers to fetch to stay in sync
208
259
  message = messages.RefreshGitFetch(
209
260
  meta=Meta(initiator_id=WORKER_IDENTITY, request_id=get_log_data().get("request_id", "")),
@@ -323,9 +374,8 @@ async def generate_request_artifact_definition(
323
374
  kind=CoreArtifactDefinition, id=model.artifact_definition_id, branch=model.branch
324
375
  )
325
376
 
326
- await artifact_definition.targets.fetch()
327
- group = artifact_definition.targets.peer
328
- await group.members.fetch()
377
+ group = await fetch_artifact_definition_targets(client=client, branch=model.branch, definition=artifact_definition)
378
+
329
379
  current_members = [member.id for member in group.members.peers]
330
380
 
331
381
  artifacts_by_member = {}
@@ -356,6 +406,7 @@ async def generate_request_artifact_definition(
356
406
  transform_location = f"{transform.file_path.value}::{transform.class_name.value}"
357
407
  convert_query_response = transform.convert_query_response.value
358
408
 
409
+ batch = await client.create_batch()
359
410
  for relationship in group.members.peers:
360
411
  member = relationship.peer
361
412
  artifact_id = artifacts_by_member.get(member.id)
@@ -376,6 +427,7 @@ async def generate_request_artifact_definition(
376
427
  repository_kind=repository.get_kind(),
377
428
  branch_name=model.branch,
378
429
  query=query.name.value,
430
+ query_id=query.id,
379
431
  variables=await member.extract(params=artifact_definition.parameters.value),
380
432
  target_id=member.id,
381
433
  target_name=member.display_label,
@@ -385,10 +437,16 @@ async def generate_request_artifact_definition(
385
437
  context=context,
386
438
  )
387
439
 
388
- await get_workflow().submit_workflow(
389
- workflow=REQUEST_ARTIFACT_GENERATE, context=context, parameters={"model": request_artifact_generate_model}
440
+ batch.add(
441
+ task=get_workflow().submit_workflow,
442
+ workflow=REQUEST_ARTIFACT_GENERATE,
443
+ context=context,
444
+ parameters={"model": request_artifact_generate_model},
390
445
  )
391
446
 
447
+ async for _, _ in batch.execute():
448
+ pass
449
+
392
450
 
393
451
  @flow(name="git-repository-pull-read-only", flow_run_name="Pull latest commit on {model.repository_name}")
394
452
  async def pull_read_only(model: GitRepositoryPullReadOnly) -> None:
@@ -569,9 +627,7 @@ async def trigger_repository_user_checks_definitions(model: UserCheckDefinitionD
569
627
 
570
628
  if definition.targets.id:
571
629
  # Check against a group of targets
572
- await definition.targets.fetch()
573
- group = definition.targets.peer
574
- await group.members.fetch()
630
+ group = await fetch_check_definition_targets(client=client, branch=model.branch_name, definition=definition)
575
631
  check_models = []
576
632
  for relationship in group.members.peers:
577
633
  member = relationship.peer
infrahub/git/utils.py CHANGED
@@ -1,9 +1,16 @@
1
- from typing import TYPE_CHECKING
1
+ from collections import defaultdict
2
+ from typing import TYPE_CHECKING, Any
3
+
4
+ from infrahub_sdk import InfrahubClient
5
+ from infrahub_sdk.node import RelationshipManager
6
+ from infrahub_sdk.protocols import CoreArtifactDefinition, CoreCheckDefinition, CoreGroup
7
+ from infrahub_sdk.types import Order
2
8
 
3
9
  from infrahub.core import registry
4
10
  from infrahub.core.constants import InfrahubKind
5
11
  from infrahub.core.manager import NodeManager
6
12
  from infrahub.database import InfrahubDatabase
13
+ from infrahub.generators.models import ProposedChangeGeneratorDefinition
7
14
 
8
15
  from .models import RepositoryBranchInfo, RepositoryData
9
16
 
@@ -46,3 +53,118 @@ async def get_repositories_commit_per_branch(
46
53
  )
47
54
 
48
55
  return repositories
56
+
57
+
58
+ def _collect_parameter_first_segments(params: Any) -> set[str]:
59
+ segments: set[str] = set()
60
+
61
+ def _walk(value: Any) -> None:
62
+ if isinstance(value, str):
63
+ segment = value.split("__", 1)[0]
64
+ if segment:
65
+ segments.add(segment)
66
+ elif isinstance(value, dict):
67
+ for nested in value.values():
68
+ _walk(nested)
69
+ elif isinstance(value, (list, tuple, set)):
70
+ for nested in value:
71
+ _walk(nested)
72
+
73
+ _walk(params)
74
+ return segments
75
+
76
+
77
+ async def _prefetch_group_member_nodes(
78
+ client: InfrahubClient,
79
+ members: RelationshipManager,
80
+ branch: str,
81
+ required_fields: set[str],
82
+ ) -> None:
83
+ ids_per_kind: dict[str, set[str]] = defaultdict(set)
84
+ for peer in members.peers:
85
+ if peer.id and peer.typename:
86
+ ids_per_kind[peer.typename].add(peer.id)
87
+
88
+ if not ids_per_kind:
89
+ return
90
+
91
+ batch = await client.create_batch()
92
+
93
+ for kind, ids in ids_per_kind.items():
94
+ schema = await client.schema.get(kind=kind, branch=branch)
95
+
96
+ # FIXME: https://github.com/opsmill/infrahub-sdk-python/pull/205
97
+ valid_fields = set(schema.attribute_names) | set(schema.relationship_names)
98
+ keep_relationships = set(schema.relationship_names) & required_fields
99
+ cleaned_fields = valid_fields - required_fields
100
+
101
+ kwargs: dict[str, Any] = {
102
+ "kind": kind,
103
+ "ids": list(ids),
104
+ "branch": branch,
105
+ "exclude": list(cleaned_fields),
106
+ "populate_store": True,
107
+ "order": Order(disable=True),
108
+ }
109
+
110
+ if keep_relationships:
111
+ kwargs["include"] = list(keep_relationships)
112
+
113
+ batch.add(task=client.filters, **kwargs)
114
+
115
+ async for _ in batch.execute():
116
+ pass
117
+
118
+
119
+ async def _fetch_definition_targets(
120
+ client: InfrahubClient,
121
+ branch: str,
122
+ group_id: str,
123
+ parameters: Any,
124
+ ) -> CoreGroup:
125
+ group = await client.get(
126
+ kind=CoreGroup,
127
+ id=group_id,
128
+ branch=branch,
129
+ include=["members"],
130
+ )
131
+
132
+ parameter_fields = _collect_parameter_first_segments(parameters)
133
+ await _prefetch_group_member_nodes(
134
+ client=client,
135
+ members=group.members,
136
+ branch=branch,
137
+ required_fields=parameter_fields,
138
+ )
139
+
140
+ return group
141
+
142
+
143
+ async def fetch_artifact_definition_targets(
144
+ client: InfrahubClient,
145
+ branch: str,
146
+ definition: CoreArtifactDefinition,
147
+ ) -> CoreGroup:
148
+ return await _fetch_definition_targets(
149
+ client=client, branch=branch, group_id=definition.targets.id, parameters=definition.parameters.value
150
+ )
151
+
152
+
153
+ async def fetch_check_definition_targets(
154
+ client: InfrahubClient,
155
+ branch: str,
156
+ definition: CoreCheckDefinition,
157
+ ) -> CoreGroup:
158
+ return await _fetch_definition_targets(
159
+ client=client, branch=branch, group_id=definition.targets.id, parameters=definition.parameters.value
160
+ )
161
+
162
+
163
+ async def fetch_proposed_change_generator_definition_targets(
164
+ client: InfrahubClient,
165
+ branch: str,
166
+ definition: ProposedChangeGeneratorDefinition,
167
+ ) -> CoreGroup:
168
+ return await _fetch_definition_targets(
169
+ client=client, branch=branch, group_id=definition.group_id, parameters=definition.parameters
170
+ )
@@ -2,14 +2,15 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING
4
4
 
5
- from fastapi import APIRouter, Depends
5
+ from fastapi import APIRouter, Depends, Query
6
6
  from fastapi.responses import PlainTextResponse
7
- from graphql import print_schema
7
+ from graphql import parse, print_ast, print_schema
8
8
  from starlette.routing import Route, WebSocketRoute
9
9
 
10
10
  from infrahub.api.dependencies import get_branch_dep, get_current_user
11
11
  from infrahub.core import registry
12
12
  from infrahub.graphql.registry import registry as graphql_registry
13
+ from infrahub.graphql.schema_sort import sort_schema_ast
13
14
 
14
15
  from .dependencies import build_graphql_app
15
16
 
@@ -27,11 +28,20 @@ router.routes.append(WebSocketRoute(path="/graphql", endpoint=graphql_app))
27
28
  router.routes.append(WebSocketRoute(path="/graphql/{branch_name:str}", endpoint=graphql_app))
28
29
 
29
30
 
30
- @router.get("/schema.graphql", include_in_schema=False)
31
+ @router.get("/schema.graphql")
31
32
  async def get_graphql_schema(
32
- branch: Branch = Depends(get_branch_dep), _: AccountSession = Depends(get_current_user)
33
+ branch: Branch = Depends(get_branch_dep),
34
+ _: AccountSession = Depends(get_current_user),
35
+ sort_schema: bool = Query(default=False, alias="sorted", description="Whether to sort the schema alphabetically."),
33
36
  ) -> PlainTextResponse:
34
37
  schema_branch = registry.schema.get_schema_branch(name=branch.name)
35
38
  gqlm = graphql_registry.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
36
39
  graphql_schema = gqlm.get_graphql_schema()
40
+
41
+ if sort_schema:
42
+ schema_str = print_schema(graphql_schema)
43
+ schema_ast = parse(schema_str)
44
+ sorted_schema_ast = sort_schema_ast(schema_ast)
45
+ return PlainTextResponse(content=print_ast(sorted_schema_ast))
46
+
37
47
  return PlainTextResponse(content=print_schema(graphql_schema))
@@ -780,10 +780,7 @@ class GraphQLSchemaManager:
780
780
  attr_kind = get_attr_kind(schema, attr)
781
781
  attr_type = get_attribute_type(kind=attr_kind).get_graphql_update()
782
782
 
783
- # A Field is not required if explicitly indicated or if a default value has been provided
784
- required = not attr.optional if not attr.default_value else False
785
-
786
- attrs[attr.name] = graphene.InputField(attr_type, required=required, description=attr.description)
783
+ attrs[attr.name] = graphene.InputField(attr_type, description=attr.description)
787
784
 
788
785
  for rel in schema.relationships:
789
786
  if rel.internal_peer or rel.read_only:
@@ -791,14 +788,12 @@ class GraphQLSchemaManager:
791
788
 
792
789
  input_type = self._get_related_input_type(relationship=rel)
793
790
 
794
- required = not rel.optional
795
791
  if rel.cardinality == RelationshipCardinality.ONE:
796
- attrs[rel.name] = graphene.InputField(input_type, required=required, description=rel.description)
792
+ attrs[rel.name] = graphene.InputField(input_type, description=rel.description)
797
793
 
798
794
  elif rel.cardinality == RelationshipCardinality.MANY:
799
- attrs[rel.name] = graphene.InputField(
800
- graphene.List(input_type), required=required, description=rel.description
801
- )
795
+ attrs[rel.name] = graphene.InputField(graphene.List(input_type), description=rel.description)
796
+
802
797
  input_name = f"{schema.kind}UpsertInput"
803
798
  md5hash = hashlib.md5(usedforsecurity=False)
804
799
  md5hash.update(f"{input_name}{schema.get_hash()}".encode())
@@ -10,9 +10,11 @@ from infrahub.core.convert_object_type.object_conversion import ConversionFieldI
10
10
  from infrahub.core.convert_object_type.repository_conversion import convert_repository_type
11
11
  from infrahub.core.convert_object_type.schema_mapping import get_schema_mapping
12
12
  from infrahub.core.manager import NodeManager
13
+ from infrahub.exceptions import ValidationError
13
14
  from infrahub.repositories.create_repository import RepositoryFinalizer
14
15
 
15
16
  if TYPE_CHECKING:
17
+ from infrahub.core.attribute import BaseAttribute
16
18
  from infrahub.graphql.initialization import GraphqlContext
17
19
 
18
20
 
@@ -49,7 +51,9 @@ class ConvertObjectType(Mutation):
49
51
 
50
52
  fields_mapping: dict[str, ConversionFieldInput] = {}
51
53
  if not isinstance(data.fields_mapping, dict):
52
- raise ValueError(f"Expected `fields_mapping` to be a `dict`, got {type(data.fields_mapping)}")
54
+ raise ValidationError(
55
+ input_value=f"Expected `fields_mapping` to be a `dict`, got {type(data.fields_mapping)}"
56
+ )
53
57
 
54
58
  for field_name, input_for_dest_field_str in data.fields_mapping.items():
55
59
  fields_mapping[field_name] = ConversionFieldInput(**input_for_dest_field_str)
@@ -57,6 +61,12 @@ class ConvertObjectType(Mutation):
57
61
  node_to_convert = await NodeManager.get_one(
58
62
  id=str(data.node_id), db=graphql_context.db, branch=graphql_context.branch
59
63
  )
64
+ for attribute_name in source_schema.attribute_names:
65
+ attribute: BaseAttribute = getattr(node_to_convert, attribute_name)
66
+ if attribute.is_from_profile:
67
+ raise ValidationError(
68
+ input_value=f"The attribute '{attribute_name}' is from a profile, converting objects that use profiles is not yet supported."
69
+ )
60
70
 
61
71
  # Complete fields mapping with auto-mapping.
62
72
  mapping = get_schema_mapping(source_schema=source_schema, target_schema=target_schema)
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Any
5
5
  from graphene import Boolean, InputObjectType, Mutation, String
6
6
 
7
7
  from infrahub.core.account import ObjectPermission
8
- from infrahub.core.constants import PermissionAction, PermissionDecision
8
+ from infrahub.core.constants import GlobalPermissions, PermissionAction, PermissionDecision
9
9
  from infrahub.core.manager import NodeManager
10
10
  from infrahub.core.registry import registry
11
11
  from infrahub.database import retry_db_transaction
@@ -15,6 +15,7 @@ from infrahub.exceptions import NodeNotFoundError, ValidationError
15
15
  from infrahub.graphql.context import apply_external_context
16
16
  from infrahub.graphql.types.context import ContextInput
17
17
  from infrahub.log import get_log_data
18
+ from infrahub.permissions import define_global_permission_from_branch
18
19
  from infrahub.worker import WORKER_IDENTITY
19
20
 
20
21
  if TYPE_CHECKING:
@@ -52,15 +53,21 @@ class UpdateDisplayLabel(Mutation):
52
53
  if not node_schema.display_label:
53
54
  raise ValidationError(input_value=f"{node_schema.kind}.display_label has not been defined for this kind.")
54
55
 
55
- graphql_context.active_permissions.raise_for_permission(
56
- permission=ObjectPermission(
57
- namespace=node_schema.namespace,
58
- name=node_schema.name,
59
- action=PermissionAction.UPDATE.value,
60
- decision=PermissionDecision.ALLOW_DEFAULT.value
61
- if graphql_context.branch.name == registry.default_branch
62
- else PermissionDecision.ALLOW_OTHER.value,
63
- )
56
+ graphql_context.active_permissions.raise_for_permissions(
57
+ permissions=[
58
+ define_global_permission_from_branch(
59
+ permission=GlobalPermissions.UPDATE_OBJECT_HFID_DISPLAY_LABEL,
60
+ branch_name=graphql_context.branch.name,
61
+ ),
62
+ ObjectPermission(
63
+ namespace=node_schema.namespace,
64
+ name=node_schema.name,
65
+ action=PermissionAction.UPDATE.value,
66
+ decision=PermissionDecision.ALLOW_DEFAULT.value
67
+ if graphql_context.branch.name == registry.default_branch
68
+ else PermissionDecision.ALLOW_OTHER.value,
69
+ ),
70
+ ]
64
71
  )
65
72
  await apply_external_context(graphql_context=graphql_context, context_input=context)
66
73
 
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Any, cast
5
5
  from graphene import Boolean, InputObjectType, List, Mutation, NonNull, String
6
6
 
7
7
  from infrahub.core.account import ObjectPermission
8
- from infrahub.core.constants import PermissionAction, PermissionDecision
8
+ from infrahub.core.constants import GlobalPermissions, PermissionAction, PermissionDecision
9
9
  from infrahub.core.manager import NodeManager
10
10
  from infrahub.core.registry import registry
11
11
  from infrahub.database import retry_db_transaction
@@ -15,6 +15,7 @@ from infrahub.exceptions import NodeNotFoundError, ValidationError
15
15
  from infrahub.graphql.context import apply_external_context
16
16
  from infrahub.graphql.types.context import ContextInput
17
17
  from infrahub.log import get_log_data
18
+ from infrahub.permissions import define_global_permission_from_branch
18
19
  from infrahub.worker import WORKER_IDENTITY
19
20
 
20
21
  if TYPE_CHECKING:
@@ -61,15 +62,21 @@ class UpdateHFID(Mutation):
61
62
  input_value=f"{node_schema.kind}.human_friendly_id requires {len(node_schema.human_friendly_id)} parts data has {len(updated_hfid)}"
62
63
  )
63
64
 
64
- graphql_context.active_permissions.raise_for_permission(
65
- permission=ObjectPermission(
66
- namespace=node_schema.namespace,
67
- name=node_schema.name,
68
- action=PermissionAction.UPDATE.value,
69
- decision=PermissionDecision.ALLOW_DEFAULT.value
70
- if graphql_context.branch.name == registry.default_branch
71
- else PermissionDecision.ALLOW_OTHER.value,
72
- )
65
+ graphql_context.active_permissions.raise_for_permissions(
66
+ permissions=[
67
+ define_global_permission_from_branch(
68
+ permission=GlobalPermissions.UPDATE_OBJECT_HFID_DISPLAY_LABEL,
69
+ branch_name=graphql_context.branch.name,
70
+ ),
71
+ ObjectPermission(
72
+ namespace=node_schema.namespace,
73
+ name=node_schema.name,
74
+ action=PermissionAction.UPDATE.value,
75
+ decision=PermissionDecision.ALLOW_DEFAULT.value
76
+ if graphql_context.branch.name == registry.default_branch
77
+ else PermissionDecision.ALLOW_OTHER.value,
78
+ ),
79
+ ]
73
80
  )
74
81
  await apply_external_context(graphql_context=graphql_context, context_input=context)
75
82