infrahub-server 1.6.0__py3-none-any.whl → 1.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/oauth2.py +33 -6
- infrahub/api/oidc.py +36 -6
- infrahub/auth.py +11 -0
- infrahub/auth_pkce.py +41 -0
- infrahub/config.py +8 -2
- infrahub/core/branch/models.py +3 -2
- infrahub/core/changelog/models.py +2 -2
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/integrity/object_conflict/conflict_recorder.py +1 -1
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m047_backfill_or_null_display_label.py +606 -0
- infrahub/core/node/__init__.py +5 -8
- infrahub/core/node/proposed_change.py +5 -3
- infrahub/core/relationship/model.py +9 -3
- infrahub/core/schema/manager.py +8 -3
- infrahub/core/validators/attribute/choices.py +2 -2
- infrahub/git/models.py +13 -0
- infrahub/git/tasks.py +23 -19
- infrahub/git/utils.py +16 -9
- infrahub/graphql/app.py +6 -6
- infrahub/graphql/mutations/action.py +15 -7
- infrahub/graphql/mutations/hfid.py +1 -1
- infrahub/graphql/mutations/repository.py +3 -3
- infrahub/graphql/mutations/schema.py +4 -4
- infrahub/graphql/mutations/webhook.py +2 -2
- infrahub/proposed_change/branch_diff.py +1 -1
- infrahub/repositories/create_repository.py +3 -3
- infrahub/task_manager/models.py +1 -1
- infrahub/task_manager/task.py +3 -3
- infrahub/validators/tasks.py +1 -1
- infrahub_sdk/ctl/AGENTS.md +67 -0
- infrahub_sdk/ctl/repository.py +4 -46
- infrahub_sdk/node/constants.py +2 -0
- infrahub_sdk/node/node.py +303 -3
- infrahub_sdk/pytest_plugin/AGENTS.md +67 -0
- infrahub_sdk/timestamp.py +7 -7
- {infrahub_server-1.6.0.dist-info → infrahub_server-1.6.1.dist-info}/METADATA +2 -3
- {infrahub_server-1.6.0.dist-info → infrahub_server-1.6.1.dist-info}/RECORD +41 -37
- {infrahub_server-1.6.0.dist-info → infrahub_server-1.6.1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.6.0.dist-info → infrahub_server-1.6.1.dist-info}/entry_points.txt +0 -0
- {infrahub_server-1.6.0.dist-info → infrahub_server-1.6.1.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -1072,7 +1072,12 @@ class RelationshipManager:
|
|
|
1072
1072
|
|
|
1073
1073
|
return self._relationships.as_list()
|
|
1074
1074
|
|
|
1075
|
-
async def update(
|
|
1075
|
+
async def update(
|
|
1076
|
+
self,
|
|
1077
|
+
data: list[str | Node] | dict[str, Any] | str | Node | None,
|
|
1078
|
+
db: InfrahubDatabase,
|
|
1079
|
+
process_delete: bool = True,
|
|
1080
|
+
) -> bool:
|
|
1076
1081
|
"""Replace and Update the list of relationships with this one."""
|
|
1077
1082
|
if not isinstance(data, list):
|
|
1078
1083
|
list_data: Sequence[str | Node | dict[str, Any] | None] = [data]
|
|
@@ -1098,8 +1103,9 @@ class RelationshipManager:
|
|
|
1098
1103
|
|
|
1099
1104
|
if item is None:
|
|
1100
1105
|
if previous_relationships:
|
|
1101
|
-
|
|
1102
|
-
|
|
1106
|
+
if process_delete:
|
|
1107
|
+
for rel in previous_relationships.values():
|
|
1108
|
+
await rel.delete(db=db)
|
|
1103
1109
|
changed = True
|
|
1104
1110
|
continue
|
|
1105
1111
|
|
infrahub/core/schema/manager.py
CHANGED
|
@@ -774,10 +774,15 @@ class SchemaManager(NodeManager):
|
|
|
774
774
|
"""Return non active branches that were purged."""
|
|
775
775
|
|
|
776
776
|
hashes_to_keep: set[str] = set()
|
|
777
|
+
branch_processed: set[str] = set()
|
|
777
778
|
for active_branch in active_branches:
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
779
|
+
branch_hash = self._branch_hash_by_name.get(active_branch)
|
|
780
|
+
if not branch_hash or branch_hash not in branch_processed:
|
|
781
|
+
if branch_hash:
|
|
782
|
+
branch_processed.add(branch_hash)
|
|
783
|
+
if branch := self._branches.get(active_branch):
|
|
784
|
+
nodes = branch.get_all(include_internal=True, duplicate=False)
|
|
785
|
+
hashes_to_keep.update([node.get_hash() for node in nodes.values()])
|
|
781
786
|
|
|
782
787
|
removed_branches: list[str] = []
|
|
783
788
|
for branch_name in list(self._branches.keys()):
|
|
@@ -4,13 +4,13 @@ from typing import TYPE_CHECKING, Any, cast
|
|
|
4
4
|
|
|
5
5
|
from infrahub.core.constants import NULL_VALUE, PathType
|
|
6
6
|
from infrahub.core.path import DataPath, GroupedDataPaths
|
|
7
|
-
from infrahub.core.schema.generic_schema import GenericSchema
|
|
8
7
|
|
|
9
8
|
from ..interface import ConstraintCheckerInterface
|
|
10
9
|
from ..shared import AttributeSchemaValidatorQuery
|
|
11
10
|
|
|
12
11
|
if TYPE_CHECKING:
|
|
13
12
|
from infrahub.core.branch import Branch
|
|
13
|
+
from infrahub.core.schema.generic_schema import GenericSchema
|
|
14
14
|
from infrahub.database import InfrahubDatabase
|
|
15
15
|
|
|
16
16
|
from ..model import SchemaConstraintValidatorRequest
|
|
@@ -106,7 +106,7 @@ class AttributeChoicesChecker(ConstraintCheckerInterface):
|
|
|
106
106
|
# skip inheriting schemas that override the attribute being checked
|
|
107
107
|
excluded_kinds: list[str] = []
|
|
108
108
|
if request.node_schema.is_generic_schema:
|
|
109
|
-
request.node_schema = cast(GenericSchema, request.node_schema)
|
|
109
|
+
request.node_schema = cast("GenericSchema", request.node_schema)
|
|
110
110
|
for inheriting_kind in request.node_schema.used_by:
|
|
111
111
|
inheriting_schema = request.schema_branch.get_node(name=inheriting_kind, duplicate=False)
|
|
112
112
|
inheriting_schema_attribute = inheriting_schema.get_attribute(name=request.schema_path.field_name)
|
infrahub/git/models.py
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
from pydantic import BaseModel, ConfigDict, Field
|
|
2
2
|
|
|
3
3
|
from infrahub.context import InfrahubContext
|
|
4
|
+
from infrahub.core.node import Node
|
|
5
|
+
from infrahub.core.protocols import CoreReadOnlyRepository, CoreRepository
|
|
4
6
|
from infrahub.message_bus.types import ProposedChangeBranchDiff
|
|
5
7
|
|
|
6
8
|
|
|
@@ -201,11 +203,22 @@ class RepositoryBranchInfo(BaseModel):
|
|
|
201
203
|
|
|
202
204
|
|
|
203
205
|
class RepositoryData(BaseModel):
|
|
206
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
207
|
+
|
|
204
208
|
repository_id: str = Field(..., description="Id of the repository")
|
|
205
209
|
repository_name: str = Field(..., description="Name of the repository")
|
|
210
|
+
repository: CoreRepository | CoreReadOnlyRepository | Node = Field(
|
|
211
|
+
..., description="InfrahubNode representing a Repository"
|
|
212
|
+
)
|
|
206
213
|
branches: dict[str, str] = Field(
|
|
207
214
|
...,
|
|
208
215
|
description="Dictionary with the name of the branch as the key and the active commit id as the value",
|
|
209
216
|
)
|
|
210
217
|
|
|
211
218
|
branch_info: dict[str, RepositoryBranchInfo] = Field(default_factory=dict)
|
|
219
|
+
|
|
220
|
+
def get_staging_branch(self) -> str | None:
|
|
221
|
+
for branch, info in self.branch_info.items():
|
|
222
|
+
if info.internal_status == "staging":
|
|
223
|
+
return branch
|
|
224
|
+
return None
|
infrahub/git/tasks.py
CHANGED
|
@@ -60,7 +60,7 @@ from .models import (
|
|
|
60
60
|
UserCheckDefinitionData,
|
|
61
61
|
)
|
|
62
62
|
from .repository import InfrahubReadOnlyRepository, InfrahubRepository, get_initialized_repo
|
|
63
|
-
from .utils import fetch_artifact_definition_targets, fetch_check_definition_targets
|
|
63
|
+
from .utils import fetch_artifact_definition_targets, fetch_check_definition_targets, get_repositories_commit_per_branch
|
|
64
64
|
|
|
65
65
|
|
|
66
66
|
@flow(
|
|
@@ -195,13 +195,17 @@ async def sync_git_repo_with_origin_and_tag_on_failure(
|
|
|
195
195
|
@flow(name="git_repositories_sync", flow_run_name="Sync Git Repositories")
|
|
196
196
|
async def sync_remote_repositories() -> None:
|
|
197
197
|
log = get_run_logger()
|
|
198
|
+
db = await get_database()
|
|
198
199
|
|
|
199
200
|
client = get_client()
|
|
200
201
|
|
|
201
202
|
branches = await client.branch.all()
|
|
202
|
-
|
|
203
|
+
async with db.start_session() as dbs:
|
|
204
|
+
repositories = await get_repositories_commit_per_branch(db=dbs, kind=InfrahubKind.REPOSITORY)
|
|
203
205
|
|
|
204
206
|
for repo_name, repository_data in repositories.items():
|
|
207
|
+
repository: CoreRepository = repository_data.repository
|
|
208
|
+
|
|
205
209
|
active_internal_status = RepositoryInternalStatus.ACTIVE.value
|
|
206
210
|
default_internal_status = repository_data.branch_info[registry.default_branch].internal_status
|
|
207
211
|
staging_branch = None
|
|
@@ -215,12 +219,12 @@ async def sync_remote_repositories() -> None:
|
|
|
215
219
|
init_failed = False
|
|
216
220
|
try:
|
|
217
221
|
repo = await InfrahubRepository.init(
|
|
218
|
-
id=
|
|
219
|
-
name=
|
|
220
|
-
location=
|
|
222
|
+
id=repository.id,
|
|
223
|
+
name=repository.name.value,
|
|
224
|
+
location=repository.location.value,
|
|
221
225
|
client=client,
|
|
222
226
|
internal_status=active_internal_status,
|
|
223
|
-
default_branch_name=
|
|
227
|
+
default_branch_name=repository.default_branch.value,
|
|
224
228
|
)
|
|
225
229
|
except RepositoryError as exc:
|
|
226
230
|
get_logger().error(str(exc))
|
|
@@ -229,12 +233,12 @@ async def sync_remote_repositories() -> None:
|
|
|
229
233
|
if init_failed:
|
|
230
234
|
try:
|
|
231
235
|
repo = await InfrahubRepository.new(
|
|
232
|
-
id=
|
|
233
|
-
name=
|
|
234
|
-
location=
|
|
236
|
+
id=repository.id,
|
|
237
|
+
name=repository.name.value,
|
|
238
|
+
location=repository.location.value,
|
|
235
239
|
client=client,
|
|
236
240
|
internal_status=active_internal_status,
|
|
237
|
-
default_branch_name=
|
|
241
|
+
default_branch_name=repository.default_branch.value,
|
|
238
242
|
)
|
|
239
243
|
await repo.import_objects_from_files( # type: ignore[call-overload]
|
|
240
244
|
git_branch_name=registry.default_branch, infrahub_branch_name=infrahub_branch
|
|
@@ -246,22 +250,22 @@ async def sync_remote_repositories() -> None:
|
|
|
246
250
|
try:
|
|
247
251
|
await sync_git_repo_with_origin_and_tag_on_failure(
|
|
248
252
|
client=client,
|
|
249
|
-
repository_id=
|
|
250
|
-
repository_name=
|
|
251
|
-
repository_location=
|
|
253
|
+
repository_id=repository.id,
|
|
254
|
+
repository_name=repository.name.value,
|
|
255
|
+
repository_location=repository.location.value,
|
|
252
256
|
internal_status=active_internal_status,
|
|
253
|
-
default_branch_name=
|
|
254
|
-
operational_status=
|
|
257
|
+
default_branch_name=repository.default_branch.value,
|
|
258
|
+
operational_status=repository.operational_status.value,
|
|
255
259
|
staging_branch=staging_branch,
|
|
256
260
|
infrahub_branch=infrahub_branch,
|
|
257
261
|
)
|
|
258
262
|
# Tell workers to fetch to stay in sync
|
|
259
263
|
message = messages.RefreshGitFetch(
|
|
260
264
|
meta=Meta(initiator_id=WORKER_IDENTITY, request_id=get_log_data().get("request_id", "")),
|
|
261
|
-
location=
|
|
262
|
-
repository_id=
|
|
263
|
-
repository_name=
|
|
264
|
-
repository_kind=
|
|
265
|
+
location=repository.location.value,
|
|
266
|
+
repository_id=repository.id,
|
|
267
|
+
repository_name=repository.name.value,
|
|
268
|
+
repository_kind=repository.get_kind(),
|
|
265
269
|
infrahub_branch_name=infrahub_branch,
|
|
266
270
|
infrahub_branch_id=branches[infrahub_branch].id,
|
|
267
271
|
)
|
infrahub/git/utils.py
CHANGED
|
@@ -1,10 +1,16 @@
|
|
|
1
1
|
import re
|
|
2
2
|
from collections import defaultdict
|
|
3
|
-
from typing import
|
|
3
|
+
from typing import Any
|
|
4
4
|
|
|
5
5
|
from infrahub_sdk import InfrahubClient
|
|
6
6
|
from infrahub_sdk.node import RelationshipManager
|
|
7
|
-
from infrahub_sdk.protocols import
|
|
7
|
+
from infrahub_sdk.protocols import (
|
|
8
|
+
CoreArtifactDefinition,
|
|
9
|
+
CoreCheckDefinition,
|
|
10
|
+
CoreGroup,
|
|
11
|
+
CoreReadOnlyRepository,
|
|
12
|
+
CoreRepository,
|
|
13
|
+
)
|
|
8
14
|
from infrahub_sdk.types import Order
|
|
9
15
|
|
|
10
16
|
from infrahub.core import registry
|
|
@@ -12,16 +18,15 @@ from infrahub.core.constants import InfrahubKind
|
|
|
12
18
|
from infrahub.core.manager import NodeManager
|
|
13
19
|
from infrahub.database import InfrahubDatabase
|
|
14
20
|
from infrahub.generators.models import ProposedChangeGeneratorDefinition
|
|
21
|
+
from infrahub.graphql.models import OrderModel
|
|
15
22
|
|
|
16
23
|
from .. import config
|
|
17
24
|
from .models import RepositoryBranchInfo, RepositoryData
|
|
18
25
|
|
|
19
|
-
if TYPE_CHECKING:
|
|
20
|
-
from infrahub.core.protocols import CoreGenericRepository
|
|
21
|
-
|
|
22
26
|
|
|
23
27
|
async def get_repositories_commit_per_branch(
|
|
24
28
|
db: InfrahubDatabase,
|
|
29
|
+
kind: str = InfrahubKind.GENERICREPOSITORY,
|
|
25
30
|
) -> dict[str, RepositoryData]:
|
|
26
31
|
"""Get a list of all repositories and their commit on each branches.
|
|
27
32
|
|
|
@@ -33,11 +38,12 @@ async def get_repositories_commit_per_branch(
|
|
|
33
38
|
repositories: dict[str, RepositoryData] = {}
|
|
34
39
|
|
|
35
40
|
for branch in list(registry.branch.values()):
|
|
36
|
-
repos: list[
|
|
41
|
+
repos: list[CoreRepository | CoreReadOnlyRepository] = await NodeManager.query(
|
|
37
42
|
db=db,
|
|
38
43
|
branch=branch,
|
|
39
|
-
fields={"id": None, "name": None, "commit": None, "internal_status": None},
|
|
40
|
-
schema=
|
|
44
|
+
fields={"id": None, "name": None, "commit": None, "internal_status": None, "location": None, "ref": None},
|
|
45
|
+
schema=kind,
|
|
46
|
+
order=OrderModel(disable=True),
|
|
41
47
|
)
|
|
42
48
|
|
|
43
49
|
for repository in repos:
|
|
@@ -46,10 +52,11 @@ async def get_repositories_commit_per_branch(
|
|
|
46
52
|
repositories[repo_name] = RepositoryData(
|
|
47
53
|
repository_id=repository.get_id(),
|
|
48
54
|
repository_name=repo_name,
|
|
55
|
+
repository=repository,
|
|
49
56
|
branches={},
|
|
50
57
|
)
|
|
51
58
|
|
|
52
|
-
repositories[repo_name].branches[branch.name] = repository.commit.value
|
|
59
|
+
repositories[repo_name].branches[branch.name] = repository.commit.value
|
|
53
60
|
repositories[repo_name].branch_info[branch.name] = RepositoryBranchInfo(
|
|
54
61
|
internal_status=repository.internal_status.value
|
|
55
62
|
)
|
infrahub/graphql/app.py
CHANGED
|
@@ -172,9 +172,9 @@ class InfrahubGraphQLApp:
|
|
|
172
172
|
|
|
173
173
|
response = handler(request)
|
|
174
174
|
if isawaitable(response):
|
|
175
|
-
return await cast(Awaitable[Response], response)
|
|
175
|
+
return await cast("Awaitable[Response]", response)
|
|
176
176
|
|
|
177
|
-
return cast(Response, response)
|
|
177
|
+
return cast("Response", response)
|
|
178
178
|
|
|
179
179
|
async def _handle_http_request(
|
|
180
180
|
self, request: Request, db: InfrahubDatabase, branch: Branch, account_session: AccountSession
|
|
@@ -350,8 +350,8 @@ class InfrahubGraphQLApp:
|
|
|
350
350
|
websocket: WebSocket,
|
|
351
351
|
subscriptions: dict[str, AsyncGenerator[Any, None]],
|
|
352
352
|
) -> None:
|
|
353
|
-
operation_id = cast(str, message.get("id"))
|
|
354
|
-
message_type = cast(str, message.get("type"))
|
|
353
|
+
operation_id = cast("str", message.get("id"))
|
|
354
|
+
message_type = cast("str", message.get("type"))
|
|
355
355
|
|
|
356
356
|
if message_type == GQL_CONNECTION_INIT:
|
|
357
357
|
websocket.scope["connection_params"] = message.get("payload")
|
|
@@ -445,7 +445,7 @@ class InfrahubGraphQLApp:
|
|
|
445
445
|
if isinstance(result, ExecutionResult) and result.errors:
|
|
446
446
|
return result.errors
|
|
447
447
|
|
|
448
|
-
asyncgen = cast(AsyncGenerator[Any, None], result)
|
|
448
|
+
asyncgen = cast("AsyncGenerator[Any, None]", result)
|
|
449
449
|
subscriptions[operation_id] = asyncgen
|
|
450
450
|
task = asyncio.create_task(self._observe_subscription(asyncgen, operation_id, websocket))
|
|
451
451
|
subscription_tasks.add(task)
|
|
@@ -479,7 +479,7 @@ async def _get_operation_from_request(request: Request) -> dict[str, Any] | list
|
|
|
479
479
|
content_type = request.headers.get("Content-Type", "").split(";")[0]
|
|
480
480
|
if content_type == "application/json":
|
|
481
481
|
try:
|
|
482
|
-
return cast(dict[str, Any] | list[Any], await request.json())
|
|
482
|
+
return cast("dict[str, Any] | list[Any]", await request.json())
|
|
483
483
|
except (TypeError, ValueError) as err:
|
|
484
484
|
raise ValueError("Request body is not a valid JSON") from err
|
|
485
485
|
elif content_type == "multipart/form-data":
|
|
@@ -5,7 +5,6 @@ from typing import TYPE_CHECKING, Any, cast
|
|
|
5
5
|
from graphene import InputObjectType, Mutation
|
|
6
6
|
from typing_extensions import Self
|
|
7
7
|
|
|
8
|
-
from infrahub.core.protocols import CoreNodeTriggerAttributeMatch, CoreNodeTriggerRelationshipMatch, CoreNodeTriggerRule
|
|
9
8
|
from infrahub.exceptions import SchemaNotFoundError, ValidationError
|
|
10
9
|
from infrahub.log import get_logger
|
|
11
10
|
|
|
@@ -16,6 +15,11 @@ if TYPE_CHECKING:
|
|
|
16
15
|
|
|
17
16
|
from infrahub.core.branch import Branch
|
|
18
17
|
from infrahub.core.node import Node
|
|
18
|
+
from infrahub.core.protocols import (
|
|
19
|
+
CoreNodeTriggerAttributeMatch,
|
|
20
|
+
CoreNodeTriggerRelationshipMatch,
|
|
21
|
+
CoreNodeTriggerRule,
|
|
22
|
+
)
|
|
19
23
|
from infrahub.core.schema import NodeSchema
|
|
20
24
|
from infrahub.database import InfrahubDatabase
|
|
21
25
|
|
|
@@ -104,9 +108,11 @@ class InfrahubTriggerRuleMatchMutation(InfrahubMutationMixin, Mutation):
|
|
|
104
108
|
trigger_match, result = await super().mutate_create(
|
|
105
109
|
info=info, data=data, branch=branch, database=dbt, override_data=override_data
|
|
106
110
|
)
|
|
107
|
-
trigger_match_model = cast(
|
|
111
|
+
trigger_match_model = cast(
|
|
112
|
+
"CoreNodeTriggerAttributeMatch | CoreNodeTriggerRelationshipMatch", trigger_match
|
|
113
|
+
)
|
|
108
114
|
node_trigger_rule = await trigger_match_model.trigger.get_peer(db=dbt, raise_on_error=True)
|
|
109
|
-
node_trigger_rule_model = cast(CoreNodeTriggerRule, node_trigger_rule)
|
|
115
|
+
node_trigger_rule_model = cast("CoreNodeTriggerRule", node_trigger_rule)
|
|
110
116
|
node_schema = dbt.schema.get_node_schema(name=node_trigger_rule_model.node_kind.value, duplicate=False)
|
|
111
117
|
_validate_node_kind_field(data=data, node_schema=node_schema)
|
|
112
118
|
|
|
@@ -124,9 +130,11 @@ class InfrahubTriggerRuleMatchMutation(InfrahubMutationMixin, Mutation):
|
|
|
124
130
|
graphql_context: GraphqlContext = info.context
|
|
125
131
|
async with graphql_context.db.start_transaction() as dbt:
|
|
126
132
|
trigger_match, result = await super().mutate_update(info=info, data=data, branch=branch, database=dbt)
|
|
127
|
-
trigger_match_model = cast(
|
|
133
|
+
trigger_match_model = cast(
|
|
134
|
+
"CoreNodeTriggerAttributeMatch | CoreNodeTriggerRelationshipMatch", trigger_match
|
|
135
|
+
)
|
|
128
136
|
node_trigger_rule = await trigger_match_model.trigger.get_peer(db=dbt, raise_on_error=True)
|
|
129
|
-
node_trigger_rule_model = cast(CoreNodeTriggerRule, node_trigger_rule)
|
|
137
|
+
node_trigger_rule_model = cast("CoreNodeTriggerRule", node_trigger_rule)
|
|
130
138
|
node_schema = dbt.schema.get_node_schema(name=node_trigger_rule_model.node_kind.value, duplicate=False)
|
|
131
139
|
_validate_node_kind_field(data=data, node_schema=node_schema)
|
|
132
140
|
|
|
@@ -134,7 +142,7 @@ class InfrahubTriggerRuleMatchMutation(InfrahubMutationMixin, Mutation):
|
|
|
134
142
|
|
|
135
143
|
|
|
136
144
|
def _validate_node_kind(data: InputObjectType, db: InfrahubDatabase) -> None:
|
|
137
|
-
input_data = cast(dict[str, dict[str, Any]], data)
|
|
145
|
+
input_data = cast("dict[str, dict[str, Any]]", data)
|
|
138
146
|
if node_kind := input_data.get("node_kind"):
|
|
139
147
|
value = node_kind.get("value")
|
|
140
148
|
if isinstance(value, str):
|
|
@@ -149,7 +157,7 @@ def _validate_node_kind(data: InputObjectType, db: InfrahubDatabase) -> None:
|
|
|
149
157
|
|
|
150
158
|
|
|
151
159
|
def _validate_node_kind_field(data: InputObjectType, node_schema: NodeSchema) -> None:
|
|
152
|
-
input_data = cast(dict[str, dict[str, Any]], data)
|
|
160
|
+
input_data = cast("dict[str, dict[str, Any]]", data)
|
|
153
161
|
if attribute_name := input_data.get("attribute_name"):
|
|
154
162
|
value = attribute_name.get("value")
|
|
155
163
|
if isinstance(value, str):
|
|
@@ -55,7 +55,7 @@ class UpdateHFID(Mutation):
|
|
|
55
55
|
input_value=f"{node_schema.kind}.human_friendly_id has not been defined for this kind."
|
|
56
56
|
)
|
|
57
57
|
|
|
58
|
-
updated_hfid = cast(list[str], data.value)
|
|
58
|
+
updated_hfid = cast("list[str]", data.value)
|
|
59
59
|
|
|
60
60
|
if len(node_schema.human_friendly_id) != len(updated_hfid):
|
|
61
61
|
raise ValidationError(
|
|
@@ -9,7 +9,6 @@ from graphene import Boolean, Field, InputObjectType, Mutation, String
|
|
|
9
9
|
from infrahub import config
|
|
10
10
|
from infrahub.core.constants import InfrahubKind
|
|
11
11
|
from infrahub.core.manager import NodeManager
|
|
12
|
-
from infrahub.core.protocols import CoreReadOnlyRepository, CoreRepository
|
|
13
12
|
from infrahub.core.schema import NodeSchema
|
|
14
13
|
from infrahub.git.models import (
|
|
15
14
|
GitRepositoryImportObjects,
|
|
@@ -34,6 +33,7 @@ if TYPE_CHECKING:
|
|
|
34
33
|
|
|
35
34
|
from infrahub.core.branch import Branch
|
|
36
35
|
from infrahub.core.node import Node
|
|
36
|
+
from infrahub.core.protocols import CoreReadOnlyRepository, CoreRepository
|
|
37
37
|
from infrahub.database import InfrahubDatabase
|
|
38
38
|
from infrahub.graphql.initialization import GraphqlContext
|
|
39
39
|
|
|
@@ -107,7 +107,7 @@ class InfrahubRepositoryMutation(InfrahubMutationMixin, Mutation):
|
|
|
107
107
|
if node.get_kind() != InfrahubKind.READONLYREPOSITORY:
|
|
108
108
|
return await super().mutate_update(info, data, branch, database=graphql_context.db, node=node)
|
|
109
109
|
|
|
110
|
-
node = cast(CoreReadOnlyRepository, node)
|
|
110
|
+
node = cast("CoreReadOnlyRepository", node)
|
|
111
111
|
current_commit = node.commit.value
|
|
112
112
|
current_ref = node.ref.value
|
|
113
113
|
new_commit = None
|
|
@@ -118,7 +118,7 @@ class InfrahubRepositoryMutation(InfrahubMutationMixin, Mutation):
|
|
|
118
118
|
new_ref = data.ref.value
|
|
119
119
|
|
|
120
120
|
obj, result = await super().mutate_update(info, data, branch, database=graphql_context.db, node=node)
|
|
121
|
-
obj = cast(CoreReadOnlyRepository, obj)
|
|
121
|
+
obj = cast("CoreReadOnlyRepository", obj)
|
|
122
122
|
|
|
123
123
|
send_update_message = (new_commit and new_commit != current_commit) or (new_ref and new_ref != current_ref)
|
|
124
124
|
if not send_update_message:
|
|
@@ -81,7 +81,7 @@ class SchemaDropdownAdd(Mutation):
|
|
|
81
81
|
_validate_schema_permission(graphql_context=graphql_context)
|
|
82
82
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
83
83
|
|
|
84
|
-
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=
|
|
84
|
+
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=True)
|
|
85
85
|
attribute = str(data.attribute)
|
|
86
86
|
validate_kind_dropdown(kind=kind, attribute=attribute)
|
|
87
87
|
dropdown = str(data.dropdown)
|
|
@@ -141,7 +141,7 @@ class SchemaDropdownRemove(Mutation):
|
|
|
141
141
|
graphql_context: GraphqlContext = info.context
|
|
142
142
|
|
|
143
143
|
_validate_schema_permission(graphql_context=graphql_context)
|
|
144
|
-
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=
|
|
144
|
+
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=True)
|
|
145
145
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
146
146
|
|
|
147
147
|
attribute = str(data.attribute)
|
|
@@ -197,7 +197,7 @@ class SchemaEnumAdd(Mutation):
|
|
|
197
197
|
graphql_context: GraphqlContext = info.context
|
|
198
198
|
|
|
199
199
|
_validate_schema_permission(graphql_context=graphql_context)
|
|
200
|
-
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=
|
|
200
|
+
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=True)
|
|
201
201
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
202
202
|
|
|
203
203
|
attribute = str(data.attribute)
|
|
@@ -243,7 +243,7 @@ class SchemaEnumRemove(Mutation):
|
|
|
243
243
|
graphql_context: GraphqlContext = info.context
|
|
244
244
|
|
|
245
245
|
_validate_schema_permission(graphql_context=graphql_context)
|
|
246
|
-
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=
|
|
246
|
+
kind = graphql_context.db.schema.get(name=str(data.kind), branch=graphql_context.branch.name, duplicate=True)
|
|
247
247
|
await apply_external_context(graphql_context=graphql_context, context_input=context)
|
|
248
248
|
|
|
249
249
|
attribute = str(data.attribute)
|
|
@@ -6,7 +6,6 @@ from typing import TYPE_CHECKING, Any, Self, cast
|
|
|
6
6
|
from graphene import InputObjectType, Mutation
|
|
7
7
|
|
|
8
8
|
from infrahub.core.manager import NodeManager
|
|
9
|
-
from infrahub.core.protocols import CoreWebhook
|
|
10
9
|
from infrahub.core.schema import NodeSchema
|
|
11
10
|
from infrahub.database import retry_db_transaction
|
|
12
11
|
from infrahub.events.utils import get_all_infrahub_node_kind_events
|
|
@@ -20,6 +19,7 @@ if TYPE_CHECKING:
|
|
|
20
19
|
|
|
21
20
|
from infrahub.core.branch import Branch
|
|
22
21
|
from infrahub.core.node import Node
|
|
22
|
+
from infrahub.core.protocols import CoreWebhook
|
|
23
23
|
from infrahub.database import InfrahubDatabase
|
|
24
24
|
from infrahub.graphql.initialization import GraphqlContext
|
|
25
25
|
|
|
@@ -107,7 +107,7 @@ class InfrahubWebhookMutation(InfrahubMutationMixin, Mutation):
|
|
|
107
107
|
branch=branch,
|
|
108
108
|
)
|
|
109
109
|
|
|
110
|
-
webhook = cast(CoreWebhook, obj)
|
|
110
|
+
webhook = cast("CoreWebhook", obj)
|
|
111
111
|
|
|
112
112
|
event_type = input_data.event_type.value if input_data.event_type else webhook.event_type.value.value
|
|
113
113
|
node_kind = input_data.node_kind.value if input_data.node_kind else webhook.node_kind.value
|
|
@@ -64,4 +64,4 @@ async def get_diff_summary_cache(pipeline_id: UUID) -> list[NodeDiff]:
|
|
|
64
64
|
if not summary_payload:
|
|
65
65
|
raise ResourceNotFoundError(message=f"Diff summary for pipeline {pipeline_id} was not found in the cache")
|
|
66
66
|
|
|
67
|
-
return cast(list[
|
|
67
|
+
return cast("list[NodeDiff]", json.loads(summary_payload))
|
|
@@ -4,7 +4,6 @@ from typing import TYPE_CHECKING, cast
|
|
|
4
4
|
|
|
5
5
|
from infrahub.core.constants import RepositoryInternalStatus
|
|
6
6
|
from infrahub.core.constants.infrahubkind import READONLYREPOSITORY, REPOSITORY
|
|
7
|
-
from infrahub.core.protocols import CoreGenericRepository, CoreReadOnlyRepository, CoreRepository
|
|
8
7
|
from infrahub.exceptions import ValidationError
|
|
9
8
|
from infrahub.git.models import GitRepositoryAdd, GitRepositoryAddReadOnly
|
|
10
9
|
from infrahub.log import get_logger
|
|
@@ -16,6 +15,7 @@ if TYPE_CHECKING:
|
|
|
16
15
|
from infrahub.auth import AccountSession
|
|
17
16
|
from infrahub.context import InfrahubContext
|
|
18
17
|
from infrahub.core.branch import Branch
|
|
18
|
+
from infrahub.core.protocols import CoreGenericRepository, CoreReadOnlyRepository, CoreRepository
|
|
19
19
|
from infrahub.database import InfrahubDatabase
|
|
20
20
|
from infrahub.services import InfrahubServices
|
|
21
21
|
|
|
@@ -74,7 +74,7 @@ class RepositoryFinalizer:
|
|
|
74
74
|
authenticated_user = self.account_session.account_id
|
|
75
75
|
|
|
76
76
|
if obj.get_kind() == READONLYREPOSITORY:
|
|
77
|
-
obj = cast(CoreReadOnlyRepository, obj)
|
|
77
|
+
obj = cast("CoreReadOnlyRepository", obj)
|
|
78
78
|
model = GitRepositoryAddReadOnly(
|
|
79
79
|
repository_id=obj.id,
|
|
80
80
|
repository_name=obj.name.value,
|
|
@@ -92,7 +92,7 @@ class RepositoryFinalizer:
|
|
|
92
92
|
)
|
|
93
93
|
|
|
94
94
|
elif obj.get_kind() == REPOSITORY:
|
|
95
|
-
obj = cast(CoreRepository, obj)
|
|
95
|
+
obj = cast("CoreRepository", obj)
|
|
96
96
|
git_repo_add_model = GitRepositoryAdd(
|
|
97
97
|
repository_id=obj.id,
|
|
98
98
|
repository_name=obj.name.value,
|
infrahub/task_manager/models.py
CHANGED
|
@@ -74,7 +74,7 @@ class FlowLogs(BaseModel):
|
|
|
74
74
|
"node": {
|
|
75
75
|
"message": log.message,
|
|
76
76
|
"severity": LOG_LEVEL_MAPPING.get(log.level, "error"),
|
|
77
|
-
"timestamp": log.timestamp.
|
|
77
|
+
"timestamp": log.timestamp.isoformat(),
|
|
78
78
|
}
|
|
79
79
|
}
|
|
80
80
|
for log in self.logs[flow_id]
|
infrahub/task_manager/task.py
CHANGED
|
@@ -329,9 +329,9 @@ class PrefectTask:
|
|
|
329
329
|
"related_node": related_node.id if related_node else None,
|
|
330
330
|
"related_node_kind": related_node.kind if related_node else None,
|
|
331
331
|
"related_nodes": related_nodes_info.get_related_nodes_as_dict(flow_id=flow.id),
|
|
332
|
-
"created_at": flow.created.
|
|
333
|
-
"updated_at": flow.updated.
|
|
334
|
-
"start_time": flow.start_time.
|
|
332
|
+
"created_at": flow.created.isoformat() if flow.created else None,
|
|
333
|
+
"updated_at": flow.updated.isoformat() if flow.updated else None,
|
|
334
|
+
"start_time": flow.start_time.isoformat() if flow.start_time else None,
|
|
335
335
|
"id": flow.id,
|
|
336
336
|
"logs": {"edges": logs, "count": len(logs)},
|
|
337
337
|
}
|
infrahub/validators/tasks.py
CHANGED
|
@@ -26,7 +26,7 @@ async def start_validator[ValidatorType: CoreValidator](
|
|
|
26
26
|
validator.started_at.value = ""
|
|
27
27
|
validator.completed_at.value = ""
|
|
28
28
|
await validator.save()
|
|
29
|
-
validator = cast(ValidatorType, validator)
|
|
29
|
+
validator = cast("ValidatorType", validator)
|
|
30
30
|
else:
|
|
31
31
|
data["proposed_change"] = proposed_change
|
|
32
32
|
validator = await client.create(kind=validator_type, data=data)
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# infrahub_sdk/ctl/AGENTS.md
|
|
2
|
+
|
|
3
|
+
CLI tool (`infrahubctl`) built with Typer/AsyncTyper.
|
|
4
|
+
|
|
5
|
+
## Command Pattern
|
|
6
|
+
|
|
7
|
+
```python
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from ..async_typer import AsyncTyper
|
|
10
|
+
from ..ctl.client import initialize_client
|
|
11
|
+
from ..ctl.utils import catch_exception
|
|
12
|
+
from .parameters import CONFIG_PARAM
|
|
13
|
+
|
|
14
|
+
console = Console()
|
|
15
|
+
app = AsyncTyper()
|
|
16
|
+
|
|
17
|
+
@app.command(name="my-command")
|
|
18
|
+
@catch_exception(console=console)
|
|
19
|
+
async def my_command(
|
|
20
|
+
path: str = typer.Option(".", help="Path to file"),
|
|
21
|
+
branch: Optional[str] = None,
|
|
22
|
+
_: str = CONFIG_PARAM, # Always include, even if unused
|
|
23
|
+
):
|
|
24
|
+
client = initialize_client(branch=branch)
|
|
25
|
+
# implementation using Rich for output
|
|
26
|
+
console.print(Panel("Result", title="Success"))
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
## File Organization
|
|
30
|
+
|
|
31
|
+
```text
|
|
32
|
+
infrahub_sdk/ctl/
|
|
33
|
+
├── cli_commands.py # Entry point, registers subcommands
|
|
34
|
+
├── client.py # initialize_client(), initialize_client_sync()
|
|
35
|
+
├── utils.py # catch_exception decorator, parse_cli_vars
|
|
36
|
+
├── parameters.py # CONFIG_PARAM and shared parameters
|
|
37
|
+
├── branch.py # Branch subcommands
|
|
38
|
+
├── schema.py # Schema subcommands
|
|
39
|
+
└── object.py # Object subcommands
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
## Registering Commands
|
|
43
|
+
|
|
44
|
+
```python
|
|
45
|
+
# In cli_commands.py
|
|
46
|
+
from ..ctl.branch import app as branch_app
|
|
47
|
+
app.add_typer(branch_app, name="branch")
|
|
48
|
+
|
|
49
|
+
# Or for top-level commands
|
|
50
|
+
from .exporter import dump
|
|
51
|
+
app.command(name="dump")(dump)
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
## Boundaries
|
|
55
|
+
|
|
56
|
+
✅ **Always**
|
|
57
|
+
|
|
58
|
+
- Use `@catch_exception(console=console)` decorator
|
|
59
|
+
- Include `CONFIG_PARAM` in all commands
|
|
60
|
+
- Use `initialize_client()` for client creation
|
|
61
|
+
- Use Rich for output (tables, panels, console.print)
|
|
62
|
+
|
|
63
|
+
🚫 **Never**
|
|
64
|
+
|
|
65
|
+
- Use plain `print()` statements
|
|
66
|
+
- Instantiate `InfrahubClient` directly (use `initialize_client`)
|
|
67
|
+
- Forget error handling decorator
|