infrahub-server 1.5.4__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/artifact.py +5 -3
- infrahub/auth.py +5 -6
- infrahub/cli/db.py +3 -3
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +2 -2
- infrahub/cli/dev.py +30 -0
- infrahub/config.py +62 -14
- infrahub/constants/database.py +5 -5
- infrahub/core/branch/models.py +24 -6
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/diff/model/diff.py +2 -2
- infrahub/core/graph/constraints.py +2 -2
- infrahub/core/manager.py +191 -60
- infrahub/core/merge.py +29 -2
- infrahub/core/migrations/shared.py +2 -2
- infrahub/core/models.py +5 -6
- infrahub/core/node/__init__.py +12 -6
- infrahub/core/node/create.py +36 -8
- infrahub/core/node/ipam.py +4 -4
- infrahub/core/node/node_property_attribute.py +2 -2
- infrahub/core/node/standard.py +1 -1
- infrahub/core/query/attribute.py +1 -1
- infrahub/core/query/branch.py +11 -0
- infrahub/core/query/node.py +9 -5
- infrahub/core/query/standard_node.py +3 -0
- infrahub/core/relationship/model.py +15 -10
- infrahub/core/schema/__init__.py +3 -3
- infrahub/core/schema/generic_schema.py +1 -1
- infrahub/core/schema/schema_branch.py +35 -16
- infrahub/core/task/user_task.py +2 -2
- infrahub/core/validators/determiner.py +3 -6
- infrahub/core/validators/enum.py +2 -2
- infrahub/database/__init__.py +1 -1
- infrahub/dependencies/interface.py +2 -2
- infrahub/events/constants.py +2 -2
- infrahub/git/base.py +42 -1
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +5 -1
- infrahub/git/tasks.py +28 -1
- infrahub/git/utils.py +9 -0
- infrahub/graphql/analyzer.py +4 -4
- infrahub/graphql/loaders/peers.py +6 -0
- infrahub/graphql/mutations/computed_attribute.py +1 -1
- infrahub/graphql/mutations/convert_object_type.py +1 -1
- infrahub/graphql/mutations/display_label.py +1 -1
- infrahub/graphql/mutations/hfid.py +1 -1
- infrahub/graphql/mutations/ipam.py +1 -1
- infrahub/graphql/mutations/profile.py +9 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +1 -1
- infrahub/graphql/queries/__init__.py +2 -1
- infrahub/graphql/queries/branch.py +58 -3
- infrahub/graphql/queries/ipam.py +9 -4
- infrahub/graphql/queries/resource_manager.py +7 -11
- infrahub/graphql/queries/search.py +5 -6
- infrahub/graphql/resolvers/ipam.py +20 -0
- infrahub/graphql/resolvers/many_relationship.py +12 -11
- infrahub/graphql/resolvers/resolver.py +6 -2
- infrahub/graphql/resolvers/single_relationship.py +1 -11
- infrahub/graphql/schema.py +2 -0
- infrahub/graphql/types/__init__.py +3 -1
- infrahub/graphql/types/branch.py +98 -2
- infrahub/lock.py +6 -6
- infrahub/log.py +1 -1
- infrahub/message_bus/messages/__init__.py +0 -12
- infrahub/patch/constants.py +2 -2
- infrahub/profiles/node_applier.py +9 -0
- infrahub/proposed_change/tasks.py +1 -1
- infrahub/task_manager/task.py +4 -4
- infrahub/telemetry/constants.py +2 -2
- infrahub/trigger/models.py +2 -2
- infrahub/trigger/setup.py +6 -9
- infrahub/utils.py +19 -1
- infrahub/validators/tasks.py +1 -1
- infrahub/workers/infrahub_async.py +39 -1
- infrahub_sdk/async_typer.py +2 -1
- infrahub_sdk/batch.py +2 -2
- infrahub_sdk/client.py +121 -10
- infrahub_sdk/config.py +2 -2
- infrahub_sdk/ctl/branch.py +176 -2
- infrahub_sdk/ctl/check.py +3 -3
- infrahub_sdk/ctl/cli.py +2 -2
- infrahub_sdk/ctl/cli_commands.py +10 -9
- infrahub_sdk/ctl/generator.py +2 -2
- infrahub_sdk/ctl/graphql.py +3 -4
- infrahub_sdk/ctl/importer.py +2 -3
- infrahub_sdk/ctl/repository.py +5 -6
- infrahub_sdk/ctl/task.py +2 -4
- infrahub_sdk/ctl/utils.py +4 -4
- infrahub_sdk/ctl/validate.py +1 -2
- infrahub_sdk/diff.py +80 -3
- infrahub_sdk/graphql/constants.py +14 -1
- infrahub_sdk/graphql/renderers.py +5 -1
- infrahub_sdk/node/attribute.py +10 -10
- infrahub_sdk/node/constants.py +2 -3
- infrahub_sdk/node/node.py +54 -11
- infrahub_sdk/node/related_node.py +1 -2
- infrahub_sdk/node/relationship.py +1 -2
- infrahub_sdk/object_store.py +4 -4
- infrahub_sdk/operation.py +2 -2
- infrahub_sdk/protocols_base.py +0 -1
- infrahub_sdk/protocols_generator/generator.py +1 -1
- infrahub_sdk/pytest_plugin/items/jinja2_transform.py +1 -1
- infrahub_sdk/pytest_plugin/models.py +1 -1
- infrahub_sdk/pytest_plugin/plugin.py +1 -1
- infrahub_sdk/query_groups.py +2 -2
- infrahub_sdk/schema/__init__.py +10 -14
- infrahub_sdk/schema/main.py +2 -2
- infrahub_sdk/schema/repository.py +2 -2
- infrahub_sdk/spec/object.py +2 -2
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/template/__init__.py +2 -1
- infrahub_sdk/transfer/importer/json.py +3 -3
- infrahub_sdk/types.py +2 -2
- infrahub_sdk/utils.py +2 -2
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info}/METADATA +58 -59
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info}/RECORD +239 -245
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info}/WHEEL +1 -1
- infrahub_server-1.6.0.dist-info/entry_points.txt +12 -0
- infrahub_testcontainers/container.py +2 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- infrahub/core/schema/generated/__init__.py +0 -0
- infrahub/core/schema/generated/attribute_schema.py +0 -133
- infrahub/core/schema/generated/base_node_schema.py +0 -111
- infrahub/core/schema/generated/genericnode_schema.py +0 -30
- infrahub/core/schema/generated/node_schema.py +0 -40
- infrahub/core/schema/generated/relationship_schema.py +0 -141
- infrahub_server-1.5.4.dist-info/entry_points.txt +0 -13
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info/licenses}/LICENSE.txt +0 -0
|
@@ -22,6 +22,8 @@ class QueryPeerParams:
|
|
|
22
22
|
fields: dict | None = None
|
|
23
23
|
at: Timestamp | str | None = None
|
|
24
24
|
branch_agnostic: bool = False
|
|
25
|
+
include_source: bool = False
|
|
26
|
+
include_owner: bool = False
|
|
25
27
|
|
|
26
28
|
def __hash__(self) -> int:
|
|
27
29
|
frozen_fields: frozenset | None = None
|
|
@@ -39,6 +41,8 @@ class QueryPeerParams:
|
|
|
39
41
|
self.schema.name,
|
|
40
42
|
str(self.source_kind),
|
|
41
43
|
str(self.branch_agnostic),
|
|
44
|
+
str(self.include_source),
|
|
45
|
+
str(self.include_owner),
|
|
42
46
|
]
|
|
43
47
|
)
|
|
44
48
|
return hash(hash_str)
|
|
@@ -63,6 +67,8 @@ class PeerRelationshipsDataLoader(DataLoader[str, list[Relationship]]):
|
|
|
63
67
|
branch=self.query_params.branch,
|
|
64
68
|
branch_agnostic=self.query_params.branch_agnostic,
|
|
65
69
|
fetch_peers=True,
|
|
70
|
+
include_source=self.query_params.include_source,
|
|
71
|
+
include_owner=self.query_params.include_owner,
|
|
66
72
|
)
|
|
67
73
|
peer_rels_by_node_id: dict[str, list[Relationship]] = {}
|
|
68
74
|
for rel in peer_rels:
|
|
@@ -110,7 +110,7 @@ class UpdateComputedAttribute(Mutation):
|
|
|
110
110
|
event = NodeUpdatedEvent(
|
|
111
111
|
kind=node_schema.kind,
|
|
112
112
|
node_id=target_node.get_id(),
|
|
113
|
-
changelog=target_node.node_changelog
|
|
113
|
+
changelog=target_node.node_changelog,
|
|
114
114
|
fields=[str(data.attribute)],
|
|
115
115
|
meta=EventMeta(
|
|
116
116
|
context=graphql_context.get_context(),
|
|
@@ -76,7 +76,7 @@ class ConvertObjectType(Mutation):
|
|
|
76
76
|
|
|
77
77
|
if target_schema.kind in [REPOSITORY, READONLYREPOSITORY]:
|
|
78
78
|
new_node = await convert_repository_type(
|
|
79
|
-
repository=node_to_convert,
|
|
79
|
+
repository=node_to_convert, # type: ignore[arg-type]
|
|
80
80
|
target_schema=target_schema,
|
|
81
81
|
mapping=fields_mapping,
|
|
82
82
|
branch=graphql_context.branch,
|
|
@@ -101,7 +101,7 @@ class UpdateDisplayLabel(Mutation):
|
|
|
101
101
|
event = NodeUpdatedEvent(
|
|
102
102
|
kind=node_schema.kind,
|
|
103
103
|
node_id=target_node.get_id(),
|
|
104
|
-
changelog=target_node.node_changelog
|
|
104
|
+
changelog=target_node.node_changelog,
|
|
105
105
|
fields=["display_label"],
|
|
106
106
|
meta=EventMeta(
|
|
107
107
|
context=graphql_context.get_context(),
|
|
@@ -108,7 +108,7 @@ class UpdateHFID(Mutation):
|
|
|
108
108
|
event = NodeUpdatedEvent(
|
|
109
109
|
kind=node_schema.kind,
|
|
110
110
|
node_id=target_node.get_id(),
|
|
111
|
-
changelog=target_node.node_changelog
|
|
111
|
+
changelog=target_node.node_changelog,
|
|
112
112
|
fields=["human_friendly_id"],
|
|
113
113
|
meta=EventMeta(
|
|
114
114
|
context=graphql_context.get_context(),
|
|
@@ -48,7 +48,7 @@ async def validate_namespace(
|
|
|
48
48
|
namespace_id = namespace.id
|
|
49
49
|
elif "hfid" in data["ip_namespace"]:
|
|
50
50
|
namespace = await registry.manager.get_one_by_hfid(
|
|
51
|
-
db=db, branch=branch, kind=InfrahubKind.IPNAMESPACE, hfid=data["ip_namespace"]["hfid"]
|
|
51
|
+
db=db, branch=branch, kind=InfrahubKind.IPNAMESPACE, hfid=data["ip_namespace"]["hfid"], raise_on_error=True
|
|
52
52
|
)
|
|
53
53
|
namespace_id = namespace.id
|
|
54
54
|
else:
|
|
@@ -57,6 +57,8 @@ class InfrahubProfileMutation(InfrahubMutationMixin, Mutation):
|
|
|
57
57
|
) -> None:
|
|
58
58
|
if not node_ids:
|
|
59
59
|
related_nodes = await obj.related_nodes.get_relationships(db=db) # type: ignore[attr-defined]
|
|
60
|
+
if hasattr(obj, "related_templates"):
|
|
61
|
+
related_nodes.extend(await obj.related_templates.get_relationships(db=db)) # type: ignore[attr-defined]
|
|
60
62
|
node_ids = [rel.peer_id for rel in related_nodes]
|
|
61
63
|
if node_ids:
|
|
62
64
|
await workflow_service.submit_workflow(
|
|
@@ -79,7 +81,12 @@ class InfrahubProfileMutation(InfrahubMutationMixin, Mutation):
|
|
|
79
81
|
|
|
80
82
|
@classmethod
|
|
81
83
|
async def _get_profile_related_node_ids(cls, db: InfrahubDatabase, obj: Node) -> set[str]:
|
|
82
|
-
related_nodes =
|
|
84
|
+
related_nodes = []
|
|
85
|
+
related_nodes.extend(await obj.related_nodes.get_relationships(db=db)) # type: ignore[attr-defined]
|
|
86
|
+
|
|
87
|
+
if hasattr(obj, "related_templates"):
|
|
88
|
+
related_nodes.extend(await obj.related_templates.get_relationships(db=db)) # type: ignore[attr-defined]
|
|
89
|
+
|
|
83
90
|
if related_nodes:
|
|
84
91
|
related_node_ids = {rel.peer_id for rel in related_nodes}
|
|
85
92
|
else:
|
|
@@ -186,6 +193,7 @@ class InfrahubProfilesRefresh(Mutation):
|
|
|
186
193
|
branch=branch,
|
|
187
194
|
id=str(data.id),
|
|
188
195
|
include_source=True,
|
|
196
|
+
raise_on_error=True,
|
|
189
197
|
)
|
|
190
198
|
node_profiles_applier = NodeProfilesApplier(db=db, branch=branch)
|
|
191
199
|
updated_fields = await node_profiles_applier.apply_profiles(node=obj)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from enum import
|
|
3
|
+
from enum import StrEnum
|
|
4
4
|
from typing import TYPE_CHECKING, Self
|
|
5
5
|
|
|
6
6
|
from graphene import Boolean, InputField, InputObjectType, List, Mutation, String
|
|
@@ -48,7 +48,7 @@ if TYPE_CHECKING:
|
|
|
48
48
|
RELATIONSHIP_PEERS_TO_IGNORE = [InfrahubKind.NODE]
|
|
49
49
|
|
|
50
50
|
|
|
51
|
-
class GroupUpdateType(
|
|
51
|
+
class GroupUpdateType(StrEnum):
|
|
52
52
|
NONE = "none"
|
|
53
53
|
MEMBERS = "members"
|
|
54
54
|
MEMBER_OF_GROUPS = "member_of_groups"
|
|
@@ -122,7 +122,7 @@ class IPAddressPoolGetResource(Mutation):
|
|
|
122
122
|
) -> Self:
|
|
123
123
|
graphql_context: GraphqlContext = info.context
|
|
124
124
|
|
|
125
|
-
obj: CoreIPAddressPool = await registry.manager.find_object(
|
|
125
|
+
obj: CoreIPAddressPool = await registry.manager.find_object( # type: ignore[assignment]
|
|
126
126
|
db=graphql_context.db,
|
|
127
127
|
kind=InfrahubKind.IPADDRESSPOOL,
|
|
128
128
|
id=data.get("id"),
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from .account import AccountPermissions, AccountToken
|
|
2
|
-
from .branch import BranchQueryList
|
|
2
|
+
from .branch import BranchQueryList, InfrahubBranchQueryList
|
|
3
3
|
from .internal import InfrahubInfo
|
|
4
4
|
from .ipam import (
|
|
5
5
|
DeprecatedIPAddressGetNextAvailable,
|
|
@@ -20,6 +20,7 @@ __all__ = [
|
|
|
20
20
|
"BranchQueryList",
|
|
21
21
|
"DeprecatedIPAddressGetNextAvailable",
|
|
22
22
|
"DeprecatedIPPrefixGetNextAvailable",
|
|
23
|
+
"InfrahubBranchQueryList",
|
|
23
24
|
"InfrahubIPAddressGetNextAvailable",
|
|
24
25
|
"InfrahubIPPrefixGetNextAvailable",
|
|
25
26
|
"InfrahubInfo",
|
|
@@ -2,10 +2,12 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
|
-
from graphene import ID, Field, List, NonNull, String
|
|
5
|
+
from graphene import ID, Field, Int, List, NonNull, String
|
|
6
6
|
|
|
7
|
+
from infrahub.core.registry import registry
|
|
8
|
+
from infrahub.exceptions import ValidationError
|
|
7
9
|
from infrahub.graphql.field_extractor import extract_graphql_fields
|
|
8
|
-
from infrahub.graphql.types import BranchType
|
|
10
|
+
from infrahub.graphql.types import BranchType, InfrahubBranch, InfrahubBranchType
|
|
9
11
|
|
|
10
12
|
if TYPE_CHECKING:
|
|
11
13
|
from graphql import GraphQLResolveInfo
|
|
@@ -17,7 +19,7 @@ async def branch_resolver(
|
|
|
17
19
|
**kwargs: Any,
|
|
18
20
|
) -> list[dict[str, Any]]:
|
|
19
21
|
fields = extract_graphql_fields(info)
|
|
20
|
-
return await BranchType.get_list(graphql_context=info.context, fields=fields, **kwargs)
|
|
22
|
+
return await BranchType.get_list(graphql_context=info.context, fields=fields, exclude_global=True, **kwargs)
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
BranchQueryList = Field(
|
|
@@ -28,3 +30,56 @@ BranchQueryList = Field(
|
|
|
28
30
|
resolver=branch_resolver,
|
|
29
31
|
required=True,
|
|
30
32
|
)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
async def infrahub_branch_resolver(
|
|
36
|
+
root: dict, # noqa: ARG001
|
|
37
|
+
info: GraphQLResolveInfo,
|
|
38
|
+
limit: int | None = None,
|
|
39
|
+
offset: int | None = None,
|
|
40
|
+
name__value: str | None = None,
|
|
41
|
+
ids: list[str] | None = None,
|
|
42
|
+
) -> dict[str, Any]:
|
|
43
|
+
if isinstance(limit, int) and limit < 1:
|
|
44
|
+
raise ValidationError("limit must be >= 1")
|
|
45
|
+
if isinstance(offset, int) and offset < 0:
|
|
46
|
+
raise ValidationError("offset must be >= 0")
|
|
47
|
+
|
|
48
|
+
fields = extract_graphql_fields(info)
|
|
49
|
+
result: dict[str, Any] = {}
|
|
50
|
+
if "edges" in fields:
|
|
51
|
+
branches = await InfrahubBranch.get_list(
|
|
52
|
+
graphql_context=info.context,
|
|
53
|
+
fields=fields.get("edges", {}).get("node", {}),
|
|
54
|
+
limit=limit,
|
|
55
|
+
offset=offset,
|
|
56
|
+
name=name__value,
|
|
57
|
+
ids=ids,
|
|
58
|
+
exclude_global=True,
|
|
59
|
+
)
|
|
60
|
+
result["edges"] = [{"node": branch} for branch in branches]
|
|
61
|
+
if "count" in fields:
|
|
62
|
+
result["count"] = await InfrahubBranchType.get_list_count(
|
|
63
|
+
graphql_context=info.context, name=name__value, ids=ids
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
if "default_branch" in fields:
|
|
67
|
+
result["default_branch"] = await InfrahubBranch.get_by_name(
|
|
68
|
+
graphql_context=info.context,
|
|
69
|
+
fields=fields["default_branch"],
|
|
70
|
+
name=registry.default_branch,
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
return result
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
InfrahubBranchQueryList = Field(
|
|
77
|
+
InfrahubBranchType,
|
|
78
|
+
offset=Int(),
|
|
79
|
+
limit=Int(),
|
|
80
|
+
name__value=String(),
|
|
81
|
+
ids=List(ID),
|
|
82
|
+
description="Retrieve paginated information about active branches.",
|
|
83
|
+
resolver=infrahub_branch_resolver,
|
|
84
|
+
required=True,
|
|
85
|
+
)
|
infrahub/graphql/queries/ipam.py
CHANGED
|
@@ -8,6 +8,7 @@ from netaddr import IPSet
|
|
|
8
8
|
|
|
9
9
|
from infrahub.core.constants import InfrahubKind
|
|
10
10
|
from infrahub.core.manager import NodeManager
|
|
11
|
+
from infrahub.core.protocols import BuiltinIPPrefix
|
|
11
12
|
from infrahub.core.query.ipam import get_ip_addresses, get_subnets
|
|
12
13
|
from infrahub.exceptions import NodeNotFoundError, ValidationError
|
|
13
14
|
from infrahub.pools.address import get_available
|
|
@@ -31,7 +32,9 @@ class IPAddressGetNextAvailable(ObjectType):
|
|
|
31
32
|
) -> dict[str, str]:
|
|
32
33
|
graphql_context: GraphqlContext = info.context
|
|
33
34
|
|
|
34
|
-
prefix = await NodeManager.get_one(
|
|
35
|
+
prefix = await NodeManager.get_one(
|
|
36
|
+
id=prefix_id, kind=BuiltinIPPrefix, db=graphql_context.db, branch=graphql_context.branch
|
|
37
|
+
)
|
|
35
38
|
|
|
36
39
|
if not prefix:
|
|
37
40
|
raise NodeNotFoundError(
|
|
@@ -78,17 +81,19 @@ class IPPrefixGetNextAvailable(ObjectType):
|
|
|
78
81
|
) -> dict[str, str]:
|
|
79
82
|
graphql_context: GraphqlContext = info.context
|
|
80
83
|
|
|
81
|
-
prefix = await NodeManager.get_one(
|
|
84
|
+
prefix = await NodeManager.get_one(
|
|
85
|
+
id=prefix_id, db=graphql_context.db, branch=graphql_context.branch, kind=BuiltinIPPrefix
|
|
86
|
+
)
|
|
82
87
|
|
|
83
88
|
if not prefix:
|
|
84
89
|
raise NodeNotFoundError(
|
|
85
90
|
branch_name=graphql_context.branch.name, node_type=InfrahubKind.IPPREFIX, identifier=prefix_id
|
|
86
91
|
)
|
|
87
92
|
|
|
88
|
-
namespace = await prefix.ip_namespace.get_peer(db=graphql_context.db)
|
|
93
|
+
namespace = await prefix.ip_namespace.get_peer(db=graphql_context.db)
|
|
89
94
|
subnets = await get_subnets(
|
|
90
95
|
db=graphql_context.db,
|
|
91
|
-
ip_prefix=ipaddress.ip_network(prefix.prefix.value),
|
|
96
|
+
ip_prefix=ipaddress.ip_network(prefix.prefix.value),
|
|
92
97
|
namespace=namespace,
|
|
93
98
|
branch=graphql_context.branch,
|
|
94
99
|
)
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import contextlib
|
|
3
4
|
from typing import TYPE_CHECKING, Any
|
|
4
5
|
|
|
5
6
|
from graphene import BigInt, Field, Float, Int, List, NonNull, ObjectType, String
|
|
@@ -23,7 +24,6 @@ if TYPE_CHECKING:
|
|
|
23
24
|
|
|
24
25
|
from infrahub.core.branch import Branch
|
|
25
26
|
from infrahub.core.node import Node
|
|
26
|
-
from infrahub.core.protocols import CoreNode
|
|
27
27
|
from infrahub.core.timestamp import Timestamp
|
|
28
28
|
from infrahub.database import InfrahubDatabase
|
|
29
29
|
from infrahub.graphql.initialization import GraphqlContext
|
|
@@ -59,7 +59,7 @@ class PoolAllocatedEdge(ObjectType):
|
|
|
59
59
|
node = Field(PoolAllocatedNode, required=True)
|
|
60
60
|
|
|
61
61
|
|
|
62
|
-
def _validate_pool_type(pool_id: str, pool:
|
|
62
|
+
def _validate_pool_type(pool_id: str, pool: Node | None = None) -> Node:
|
|
63
63
|
if not pool or pool.get_kind() not in [
|
|
64
64
|
InfrahubKind.IPADDRESSPOOL,
|
|
65
65
|
InfrahubKind.IPPREFIXPOOL,
|
|
@@ -83,9 +83,7 @@ class PoolAllocated(ObjectType):
|
|
|
83
83
|
limit: int = 10,
|
|
84
84
|
) -> dict:
|
|
85
85
|
graphql_context: GraphqlContext = info.context
|
|
86
|
-
pool
|
|
87
|
-
id=pool_id, db=graphql_context.db, branch=graphql_context.branch
|
|
88
|
-
)
|
|
86
|
+
pool = await NodeManager.get_one(id=pool_id, db=graphql_context.db, branch=graphql_context.branch)
|
|
89
87
|
|
|
90
88
|
fields = extract_graphql_fields(info=info)
|
|
91
89
|
|
|
@@ -190,7 +188,7 @@ class PoolUtilization(ObjectType):
|
|
|
190
188
|
) -> dict:
|
|
191
189
|
graphql_context: GraphqlContext = info.context
|
|
192
190
|
db: InfrahubDatabase = graphql_context.db
|
|
193
|
-
pool
|
|
191
|
+
pool = await NodeManager.get_one(id=pool_id, db=db, branch=graphql_context.branch)
|
|
194
192
|
pool = _validate_pool_type(pool_id=pool_id, pool=pool)
|
|
195
193
|
if pool.get_kind() == "CoreNumberPool":
|
|
196
194
|
return await resolve_number_pool_utilization(
|
|
@@ -199,10 +197,8 @@ class PoolUtilization(ObjectType):
|
|
|
199
197
|
|
|
200
198
|
resources_map: dict[str, Node] = {}
|
|
201
199
|
|
|
202
|
-
|
|
200
|
+
with contextlib.suppress(SchemaNotFoundError):
|
|
203
201
|
resources_map = await pool.resources.get_peers(db=db, branch_agnostic=True) # type: ignore[attr-defined,union-attr]
|
|
204
|
-
except SchemaNotFoundError:
|
|
205
|
-
pass
|
|
206
202
|
|
|
207
203
|
utilization_getter = PrefixUtilizationGetter(
|
|
208
204
|
db=db, ip_prefixes=list(resources_map.values()), at=graphql_context.at
|
|
@@ -275,7 +271,7 @@ class PoolUtilization(ObjectType):
|
|
|
275
271
|
|
|
276
272
|
|
|
277
273
|
async def resolve_number_pool_allocation(
|
|
278
|
-
db: InfrahubDatabase, graphql_context: GraphqlContext, pool:
|
|
274
|
+
db: InfrahubDatabase, graphql_context: GraphqlContext, pool: Node, fields: dict, offset: int, limit: int
|
|
279
275
|
) -> dict:
|
|
280
276
|
response: dict[str, Any] = {}
|
|
281
277
|
query = await NumberPoolGetAllocated.init(
|
|
@@ -304,7 +300,7 @@ async def resolve_number_pool_allocation(
|
|
|
304
300
|
|
|
305
301
|
|
|
306
302
|
async def resolve_number_pool_utilization(
|
|
307
|
-
db: InfrahubDatabase, pool:
|
|
303
|
+
db: InfrahubDatabase, pool: Node, at: Timestamp | str | None, branch: Branch
|
|
308
304
|
) -> dict:
|
|
309
305
|
"""
|
|
310
306
|
Returns a mapping containg utilization info of a number pool.
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import contextlib
|
|
3
4
|
import ipaddress
|
|
4
5
|
from typing import TYPE_CHECKING, Any
|
|
5
6
|
|
|
@@ -13,7 +14,7 @@ from infrahub.graphql.field_extractor import extract_graphql_fields
|
|
|
13
14
|
if TYPE_CHECKING:
|
|
14
15
|
from graphql import GraphQLResolveInfo
|
|
15
16
|
|
|
16
|
-
from infrahub.core.
|
|
17
|
+
from infrahub.core.node import Node as InfrahubNode
|
|
17
18
|
from infrahub.graphql.initialization import GraphqlContext
|
|
18
19
|
|
|
19
20
|
|
|
@@ -106,22 +107,20 @@ async def search_resolver(
|
|
|
106
107
|
) -> dict[str, Any]:
|
|
107
108
|
graphql_context: GraphqlContext = info.context
|
|
108
109
|
response: dict[str, Any] = {}
|
|
109
|
-
results: list[
|
|
110
|
+
results: list[InfrahubNode] = []
|
|
110
111
|
|
|
111
112
|
fields = extract_graphql_fields(info=info)
|
|
112
113
|
|
|
113
114
|
if is_valid_uuid(q):
|
|
114
|
-
matching
|
|
115
|
+
matching = await NodeManager.get_one(
|
|
115
116
|
db=graphql_context.db, branch=graphql_context.branch, at=graphql_context.at, id=q
|
|
116
117
|
)
|
|
117
118
|
if matching:
|
|
118
119
|
results.append(matching)
|
|
119
120
|
else:
|
|
120
|
-
|
|
121
|
+
with contextlib.suppress(ValueError, ipaddress.AddressValueError):
|
|
121
122
|
# Convert any IPv6 address, network or partial address to collapsed format as it might be stored in db.
|
|
122
123
|
q = _collapse_ipv6(q)
|
|
123
|
-
except (ValueError, ipaddress.AddressValueError):
|
|
124
|
-
pass
|
|
125
124
|
|
|
126
125
|
for kind in [InfrahubKind.NODE, InfrahubKind.GENERICGROUP]:
|
|
127
126
|
objs = await NodeManager.query(
|
|
@@ -4,6 +4,7 @@ import ipaddress
|
|
|
4
4
|
from typing import TYPE_CHECKING, Any
|
|
5
5
|
|
|
6
6
|
from graphql.type.definition import GraphQLNonNull
|
|
7
|
+
from infrahub_sdk.utils import deep_merge_dict
|
|
7
8
|
from netaddr import IPSet
|
|
8
9
|
from opentelemetry import trace
|
|
9
10
|
|
|
@@ -233,6 +234,23 @@ async def _resolve_available_prefix_nodes(
|
|
|
233
234
|
return available_nodes
|
|
234
235
|
|
|
235
236
|
|
|
237
|
+
def _ensure_display_label_fields(
|
|
238
|
+
db: InfrahubDatabase, branch: Branch, schema: NodeSchema | GenericSchema, node_fields: dict[str, Any]
|
|
239
|
+
) -> None:
|
|
240
|
+
"""Ensure fields needed to compute display_label are included in node_fields.
|
|
241
|
+
|
|
242
|
+
This is mostly for virtual nodes (InternalIPPrefixAvailable, InternalIPRangeAvailable) that are not stored in the
|
|
243
|
+
database.
|
|
244
|
+
"""
|
|
245
|
+
if "display_label" not in node_fields or schema.kind not in [InfrahubKind.IPPREFIX, InfrahubKind.IPADDRESS]:
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
249
|
+
display_label_fields = schema_branch.generate_fields_for_display_label(name=schema.kind)
|
|
250
|
+
if display_label_fields:
|
|
251
|
+
deep_merge_dict(dicta=node_fields, dictb=display_label_fields)
|
|
252
|
+
|
|
253
|
+
|
|
236
254
|
def _filter_kinds(nodes: list[Node], kinds: list[str], limit: int | None) -> list[Node]:
|
|
237
255
|
filtered: list[Node] = []
|
|
238
256
|
available_node_kinds = [InfrahubKind.IPPREFIXAVAILABLE, InfrahubKind.IPRANGEAVAILABLE]
|
|
@@ -324,6 +342,8 @@ async def ipam_paginated_list_resolver( # noqa: PLR0915
|
|
|
324
342
|
edges = fields.get("edges", {})
|
|
325
343
|
node_fields = edges.get("node", {})
|
|
326
344
|
|
|
345
|
+
_ensure_display_label_fields(db=db, branch=graphql_context.branch, schema=schema, node_fields=node_fields)
|
|
346
|
+
|
|
327
347
|
permission_set: dict[str, Any] | None = None
|
|
328
348
|
permissions = (
|
|
329
349
|
await get_permissions(schema=schema, graphql_context=graphql_context)
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from typing import TYPE_CHECKING, Any
|
|
2
2
|
|
|
3
3
|
from graphql import GraphQLResolveInfo
|
|
4
|
-
from infrahub_sdk.utils import deep_merge_dict
|
|
5
4
|
|
|
6
5
|
from infrahub.core.branch.models import Branch
|
|
7
6
|
from infrahub.core.constants import BranchSupportType, RelationshipHierarchyDirection
|
|
@@ -12,6 +11,7 @@ from infrahub.core.schema.relationship_schema import RelationshipSchema
|
|
|
12
11
|
from infrahub.core.timestamp import Timestamp
|
|
13
12
|
from infrahub.database import InfrahubDatabase
|
|
14
13
|
from infrahub.graphql.field_extractor import extract_graphql_fields
|
|
14
|
+
from infrahub.utils import has_any_key
|
|
15
15
|
|
|
16
16
|
from ..loaders.peers import PeerRelationshipsDataLoader, QueryPeerParams
|
|
17
17
|
from ..types import RELATIONS_PROPERTY_MAP, RELATIONS_PROPERTY_MAP_REVERSED
|
|
@@ -195,6 +195,9 @@ class ManyRelationshipResolver:
|
|
|
195
195
|
offset: int | None = None,
|
|
196
196
|
limit: int | None = None,
|
|
197
197
|
) -> list[dict[str, Any]] | None:
|
|
198
|
+
include_source = has_any_key(data=node_fields, keys=["_relation__source", "source"])
|
|
199
|
+
include_owner = has_any_key(data=node_fields, keys=["_relation__owner", "owner"])
|
|
200
|
+
|
|
198
201
|
async with db.start_session(read_only=True) as dbs:
|
|
199
202
|
objs = await NodeManager.query_peers(
|
|
200
203
|
db=dbs,
|
|
@@ -209,6 +212,8 @@ class ManyRelationshipResolver:
|
|
|
209
212
|
branch=branch,
|
|
210
213
|
branch_agnostic=rel_schema.branch is BranchSupportType.AGNOSTIC,
|
|
211
214
|
fetch_peers=True,
|
|
215
|
+
include_source=include_source,
|
|
216
|
+
include_owner=include_owner,
|
|
212
217
|
)
|
|
213
218
|
if not objs:
|
|
214
219
|
return None
|
|
@@ -226,17 +231,11 @@ class ManyRelationshipResolver:
|
|
|
226
231
|
filters: dict[str, Any],
|
|
227
232
|
node_fields: dict[str, Any],
|
|
228
233
|
) -> list[dict[str, Any]] | None:
|
|
229
|
-
if node_fields and "display_label" in node_fields:
|
|
230
|
-
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
231
|
-
display_label_fields = schema_branch.generate_fields_for_display_label(name=rel_schema.peer)
|
|
232
|
-
if display_label_fields:
|
|
233
|
-
node_fields = deep_merge_dict(dicta=node_fields, dictb=display_label_fields)
|
|
234
|
-
|
|
235
234
|
if node_fields and "hfid" in node_fields:
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
235
|
+
node_fields["human_friendly_id"] = None
|
|
236
|
+
|
|
237
|
+
include_source = has_any_key(data=node_fields, keys=["_relation__source", "source"])
|
|
238
|
+
include_owner = has_any_key(data=node_fields, keys=["_relation__owner", "owner"])
|
|
240
239
|
|
|
241
240
|
query_params = QueryPeerParams(
|
|
242
241
|
branch=branch,
|
|
@@ -246,6 +245,8 @@ class ManyRelationshipResolver:
|
|
|
246
245
|
fields=node_fields,
|
|
247
246
|
at=at,
|
|
248
247
|
branch_agnostic=rel_schema.branch is BranchSupportType.AGNOSTIC,
|
|
248
|
+
include_source=include_source,
|
|
249
|
+
include_owner=include_owner,
|
|
249
250
|
)
|
|
250
251
|
if query_params in self._data_loader_instances:
|
|
251
252
|
loader = self._data_loader_instances[query_params]
|
|
@@ -9,6 +9,7 @@ from infrahub.core.constants import BranchSupportType, InfrahubKind, Relationshi
|
|
|
9
9
|
from infrahub.core.manager import NodeManager
|
|
10
10
|
from infrahub.exceptions import NodeNotFoundError
|
|
11
11
|
from infrahub.graphql.field_extractor import extract_graphql_fields
|
|
12
|
+
from infrahub.utils import has_any_key
|
|
12
13
|
|
|
13
14
|
from ..models import OrderModel
|
|
14
15
|
from ..parser import extract_selection
|
|
@@ -185,6 +186,9 @@ async def default_paginated_list_resolver(
|
|
|
185
186
|
|
|
186
187
|
objs = []
|
|
187
188
|
if edges or "hfid" in filters:
|
|
189
|
+
include_source = has_any_key(data=node_fields, keys=["_relation__source", "source"])
|
|
190
|
+
include_owner = has_any_key(data=node_fields, keys=["_relation__owner", "owner"])
|
|
191
|
+
|
|
188
192
|
objs = await NodeManager.query(
|
|
189
193
|
db=db,
|
|
190
194
|
schema=schema,
|
|
@@ -195,8 +199,8 @@ async def default_paginated_list_resolver(
|
|
|
195
199
|
limit=limit,
|
|
196
200
|
offset=offset,
|
|
197
201
|
account=graphql_context.account_session,
|
|
198
|
-
include_source=
|
|
199
|
-
include_owner=
|
|
202
|
+
include_source=include_source,
|
|
203
|
+
include_owner=include_owner,
|
|
200
204
|
partial_match=partial_match,
|
|
201
205
|
order=order,
|
|
202
206
|
)
|
|
@@ -2,7 +2,6 @@ from typing import TYPE_CHECKING, Any
|
|
|
2
2
|
|
|
3
3
|
from graphql import GraphQLResolveInfo
|
|
4
4
|
from graphql.type.definition import GraphQLNonNull
|
|
5
|
-
from infrahub_sdk.utils import deep_merge_dict
|
|
6
5
|
|
|
7
6
|
from infrahub.core.branch.models import Branch
|
|
8
7
|
from infrahub.core.constants import BranchSupportType
|
|
@@ -142,17 +141,8 @@ class SingleRelationshipResolver:
|
|
|
142
141
|
except (KeyError, IndexError):
|
|
143
142
|
return None
|
|
144
143
|
|
|
145
|
-
if node_fields and "display_label" in node_fields:
|
|
146
|
-
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
147
|
-
display_label_fields = schema_branch.generate_fields_for_display_label(name=rel_schema.peer)
|
|
148
|
-
if display_label_fields:
|
|
149
|
-
node_fields = deep_merge_dict(dicta=node_fields, dictb=display_label_fields)
|
|
150
|
-
|
|
151
144
|
if node_fields and "hfid" in node_fields:
|
|
152
|
-
|
|
153
|
-
hfid_fields = peer_schema.generate_fields_for_hfid()
|
|
154
|
-
if hfid_fields:
|
|
155
|
-
node_fields = deep_merge_dict(dicta=node_fields, dictb=hfid_fields)
|
|
145
|
+
node_fields["human_friendly_id"] = None
|
|
156
146
|
|
|
157
147
|
query_params = GetManyParams(
|
|
158
148
|
fields=node_fields,
|
infrahub/graphql/schema.py
CHANGED
|
@@ -39,6 +39,7 @@ from .queries import (
|
|
|
39
39
|
BranchQueryList,
|
|
40
40
|
DeprecatedIPAddressGetNextAvailable,
|
|
41
41
|
DeprecatedIPPrefixGetNextAvailable,
|
|
42
|
+
InfrahubBranchQueryList,
|
|
42
43
|
InfrahubInfo,
|
|
43
44
|
InfrahubIPAddressGetNextAvailable,
|
|
44
45
|
InfrahubIPPrefixGetNextAvailable,
|
|
@@ -65,6 +66,7 @@ class InfrahubBaseQuery(ObjectType):
|
|
|
65
66
|
|
|
66
67
|
Relationship = Relationship
|
|
67
68
|
|
|
69
|
+
InfrahubBranch = InfrahubBranchQueryList
|
|
68
70
|
InfrahubInfo = InfrahubInfo
|
|
69
71
|
InfrahubStatus = InfrahubStatus
|
|
70
72
|
|
|
@@ -21,7 +21,7 @@ from .attribute import (
|
|
|
21
21
|
StrAttributeType,
|
|
22
22
|
TextAttributeType,
|
|
23
23
|
)
|
|
24
|
-
from .branch import BranchType
|
|
24
|
+
from .branch import BranchType, InfrahubBranch, InfrahubBranchType
|
|
25
25
|
from .interface import InfrahubInterface
|
|
26
26
|
from .node import InfrahubObject
|
|
27
27
|
from .permission import PaginatedObjectPermission
|
|
@@ -41,6 +41,8 @@ __all__ = [
|
|
|
41
41
|
"DropdownType",
|
|
42
42
|
"IPHostType",
|
|
43
43
|
"IPNetworkType",
|
|
44
|
+
"InfrahubBranch",
|
|
45
|
+
"InfrahubBranchType",
|
|
44
46
|
"InfrahubInterface",
|
|
45
47
|
"InfrahubObject",
|
|
46
48
|
"InfrahubObjectType",
|