infrahub-server 1.5.4__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/artifact.py +5 -3
- infrahub/auth.py +5 -6
- infrahub/cli/db.py +3 -3
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +2 -2
- infrahub/cli/dev.py +30 -0
- infrahub/config.py +62 -14
- infrahub/constants/database.py +5 -5
- infrahub/core/branch/models.py +24 -6
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/diff/model/diff.py +2 -2
- infrahub/core/graph/constraints.py +2 -2
- infrahub/core/manager.py +191 -60
- infrahub/core/merge.py +29 -2
- infrahub/core/migrations/shared.py +2 -2
- infrahub/core/models.py +5 -6
- infrahub/core/node/__init__.py +12 -6
- infrahub/core/node/create.py +36 -8
- infrahub/core/node/ipam.py +4 -4
- infrahub/core/node/node_property_attribute.py +2 -2
- infrahub/core/node/standard.py +1 -1
- infrahub/core/query/attribute.py +1 -1
- infrahub/core/query/branch.py +11 -0
- infrahub/core/query/node.py +9 -5
- infrahub/core/query/standard_node.py +3 -0
- infrahub/core/relationship/model.py +15 -10
- infrahub/core/schema/__init__.py +3 -3
- infrahub/core/schema/generic_schema.py +1 -1
- infrahub/core/schema/schema_branch.py +35 -16
- infrahub/core/task/user_task.py +2 -2
- infrahub/core/validators/determiner.py +3 -6
- infrahub/core/validators/enum.py +2 -2
- infrahub/database/__init__.py +1 -1
- infrahub/dependencies/interface.py +2 -2
- infrahub/events/constants.py +2 -2
- infrahub/git/base.py +42 -1
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +5 -1
- infrahub/git/tasks.py +28 -1
- infrahub/git/utils.py +9 -0
- infrahub/graphql/analyzer.py +4 -4
- infrahub/graphql/loaders/peers.py +6 -0
- infrahub/graphql/mutations/computed_attribute.py +1 -1
- infrahub/graphql/mutations/convert_object_type.py +1 -1
- infrahub/graphql/mutations/display_label.py +1 -1
- infrahub/graphql/mutations/hfid.py +1 -1
- infrahub/graphql/mutations/ipam.py +1 -1
- infrahub/graphql/mutations/profile.py +9 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +1 -1
- infrahub/graphql/queries/__init__.py +2 -1
- infrahub/graphql/queries/branch.py +58 -3
- infrahub/graphql/queries/ipam.py +9 -4
- infrahub/graphql/queries/resource_manager.py +7 -11
- infrahub/graphql/queries/search.py +5 -6
- infrahub/graphql/resolvers/ipam.py +20 -0
- infrahub/graphql/resolvers/many_relationship.py +12 -11
- infrahub/graphql/resolvers/resolver.py +6 -2
- infrahub/graphql/resolvers/single_relationship.py +1 -11
- infrahub/graphql/schema.py +2 -0
- infrahub/graphql/types/__init__.py +3 -1
- infrahub/graphql/types/branch.py +98 -2
- infrahub/lock.py +6 -6
- infrahub/log.py +1 -1
- infrahub/message_bus/messages/__init__.py +0 -12
- infrahub/patch/constants.py +2 -2
- infrahub/profiles/node_applier.py +9 -0
- infrahub/proposed_change/tasks.py +1 -1
- infrahub/task_manager/task.py +4 -4
- infrahub/telemetry/constants.py +2 -2
- infrahub/trigger/models.py +2 -2
- infrahub/trigger/setup.py +6 -9
- infrahub/utils.py +19 -1
- infrahub/validators/tasks.py +1 -1
- infrahub/workers/infrahub_async.py +39 -1
- infrahub_sdk/async_typer.py +2 -1
- infrahub_sdk/batch.py +2 -2
- infrahub_sdk/client.py +121 -10
- infrahub_sdk/config.py +2 -2
- infrahub_sdk/ctl/branch.py +176 -2
- infrahub_sdk/ctl/check.py +3 -3
- infrahub_sdk/ctl/cli.py +2 -2
- infrahub_sdk/ctl/cli_commands.py +10 -9
- infrahub_sdk/ctl/generator.py +2 -2
- infrahub_sdk/ctl/graphql.py +3 -4
- infrahub_sdk/ctl/importer.py +2 -3
- infrahub_sdk/ctl/repository.py +5 -6
- infrahub_sdk/ctl/task.py +2 -4
- infrahub_sdk/ctl/utils.py +4 -4
- infrahub_sdk/ctl/validate.py +1 -2
- infrahub_sdk/diff.py +80 -3
- infrahub_sdk/graphql/constants.py +14 -1
- infrahub_sdk/graphql/renderers.py +5 -1
- infrahub_sdk/node/attribute.py +10 -10
- infrahub_sdk/node/constants.py +2 -3
- infrahub_sdk/node/node.py +54 -11
- infrahub_sdk/node/related_node.py +1 -2
- infrahub_sdk/node/relationship.py +1 -2
- infrahub_sdk/object_store.py +4 -4
- infrahub_sdk/operation.py +2 -2
- infrahub_sdk/protocols_base.py +0 -1
- infrahub_sdk/protocols_generator/generator.py +1 -1
- infrahub_sdk/pytest_plugin/items/jinja2_transform.py +1 -1
- infrahub_sdk/pytest_plugin/models.py +1 -1
- infrahub_sdk/pytest_plugin/plugin.py +1 -1
- infrahub_sdk/query_groups.py +2 -2
- infrahub_sdk/schema/__init__.py +10 -14
- infrahub_sdk/schema/main.py +2 -2
- infrahub_sdk/schema/repository.py +2 -2
- infrahub_sdk/spec/object.py +2 -2
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/template/__init__.py +2 -1
- infrahub_sdk/transfer/importer/json.py +3 -3
- infrahub_sdk/types.py +2 -2
- infrahub_sdk/utils.py +2 -2
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info}/METADATA +58 -59
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info}/RECORD +239 -245
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info}/WHEEL +1 -1
- infrahub_server-1.6.0.dist-info/entry_points.txt +12 -0
- infrahub_testcontainers/container.py +2 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- infrahub/core/schema/generated/__init__.py +0 -0
- infrahub/core/schema/generated/attribute_schema.py +0 -133
- infrahub/core/schema/generated/base_node_schema.py +0 -111
- infrahub/core/schema/generated/genericnode_schema.py +0 -30
- infrahub/core/schema/generated/node_schema.py +0 -40
- infrahub/core/schema/generated/relationship_schema.py +0 -141
- infrahub_server-1.5.4.dist-info/entry_points.txt +0 -13
- {infrahub_server-1.5.4.dist-info → infrahub_server-1.6.0.dist-info/licenses}/LICENSE.txt +0 -0
infrahub_sdk/ctl/task.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import Optional
|
|
4
|
-
|
|
5
3
|
import typer
|
|
6
4
|
from rich.console import Console
|
|
7
5
|
from rich.table import Table
|
|
@@ -75,8 +73,8 @@ async def list_tasks(
|
|
|
75
73
|
state: list[str] = typer.Option(
|
|
76
74
|
None, "--state", "-s", help="Filter by task state. Can be provided multiple times."
|
|
77
75
|
),
|
|
78
|
-
limit:
|
|
79
|
-
offset:
|
|
76
|
+
limit: int | None = typer.Option(None, help="Maximum number of tasks to retrieve."),
|
|
77
|
+
offset: int | None = typer.Option(None, help="Offset for pagination."),
|
|
80
78
|
include_related_nodes: bool = typer.Option(False, help="Include related nodes in the output."),
|
|
81
79
|
include_logs: bool = typer.Option(False, help="Include task logs in the output."),
|
|
82
80
|
json_output: bool = typer.Option(False, "--json", help="Output the result as JSON."),
|
infrahub_sdk/ctl/utils.py
CHANGED
|
@@ -3,10 +3,10 @@ from __future__ import annotations
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import logging
|
|
5
5
|
import traceback
|
|
6
|
-
from collections.abc import Coroutine
|
|
6
|
+
from collections.abc import Callable, Coroutine
|
|
7
7
|
from functools import wraps
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import TYPE_CHECKING, Any,
|
|
9
|
+
from typing import TYPE_CHECKING, Any, NoReturn, TypeVar
|
|
10
10
|
|
|
11
11
|
import typer
|
|
12
12
|
from click.exceptions import Exit
|
|
@@ -46,7 +46,7 @@ def init_logging(debug: bool = False) -> None:
|
|
|
46
46
|
|
|
47
47
|
log_level = "DEBUG" if debug else "INFO"
|
|
48
48
|
FORMAT = "%(message)s"
|
|
49
|
-
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
|
|
49
|
+
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler(show_path=debug)])
|
|
50
50
|
logging.getLogger("infrahubctl")
|
|
51
51
|
|
|
52
52
|
|
|
@@ -149,7 +149,7 @@ def print_graphql_errors(console: Console, errors: list) -> None:
|
|
|
149
149
|
console.print(f"[red]{escape(str(error))}")
|
|
150
150
|
|
|
151
151
|
|
|
152
|
-
def parse_cli_vars(variables:
|
|
152
|
+
def parse_cli_vars(variables: list[str] | None) -> dict[str, str]:
|
|
153
153
|
if not variables:
|
|
154
154
|
return {}
|
|
155
155
|
|
infrahub_sdk/ctl/validate.py
CHANGED
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import sys
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Optional
|
|
6
5
|
|
|
7
6
|
import typer
|
|
8
7
|
import ujson
|
|
@@ -58,7 +57,7 @@ async def validate_schema(schema: Path, _: str = CONFIG_PARAM) -> None:
|
|
|
58
57
|
@catch_exception(console=console)
|
|
59
58
|
def validate_graphql(
|
|
60
59
|
query: str,
|
|
61
|
-
variables:
|
|
60
|
+
variables: list[str] | None = typer.Argument(
|
|
62
61
|
None, help="Variables to pass along with the query. Format key=value key=value."
|
|
63
62
|
),
|
|
64
63
|
debug: bool = typer.Option(False, help="Display more troubleshooting information."),
|
infrahub_sdk/diff.py
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from
|
|
4
|
-
|
|
5
|
-
)
|
|
3
|
+
from datetime import datetime
|
|
4
|
+
from typing import Any
|
|
6
5
|
|
|
7
6
|
from typing_extensions import NotRequired, TypedDict
|
|
8
7
|
|
|
8
|
+
from infrahub_sdk.graphql.query import Query
|
|
9
|
+
|
|
9
10
|
|
|
10
11
|
class NodeDiff(TypedDict):
|
|
11
12
|
branch: str
|
|
@@ -35,6 +36,19 @@ class NodeDiffPeer(TypedDict):
|
|
|
35
36
|
summary: NodeDiffSummary
|
|
36
37
|
|
|
37
38
|
|
|
39
|
+
class DiffTreeData(TypedDict):
|
|
40
|
+
num_added: int
|
|
41
|
+
num_updated: int
|
|
42
|
+
num_removed: int
|
|
43
|
+
num_conflicts: int
|
|
44
|
+
to_time: str
|
|
45
|
+
from_time: str
|
|
46
|
+
base_branch: str
|
|
47
|
+
diff_branch: str
|
|
48
|
+
name: NotRequired[str | None]
|
|
49
|
+
nodes: list[NodeDiff]
|
|
50
|
+
|
|
51
|
+
|
|
38
52
|
def get_diff_summary_query() -> str:
|
|
39
53
|
return """
|
|
40
54
|
query GetDiffTree($branch_name: String!, $name: String, $from_time: DateTime, $to_time: DateTime) {
|
|
@@ -125,3 +139,66 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) ->
|
|
|
125
139
|
display_label=str(node_dict.get("label")),
|
|
126
140
|
elements=element_diffs,
|
|
127
141
|
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def get_diff_tree_query() -> Query:
|
|
145
|
+
node_structure = {
|
|
146
|
+
"uuid": None,
|
|
147
|
+
"kind": None,
|
|
148
|
+
"status": None,
|
|
149
|
+
"label": None,
|
|
150
|
+
"num_added": None,
|
|
151
|
+
"num_updated": None,
|
|
152
|
+
"num_removed": None,
|
|
153
|
+
"attributes": {
|
|
154
|
+
"name": None,
|
|
155
|
+
"status": None,
|
|
156
|
+
"num_added": None,
|
|
157
|
+
"num_updated": None,
|
|
158
|
+
"num_removed": None,
|
|
159
|
+
},
|
|
160
|
+
"relationships": {
|
|
161
|
+
"name": None,
|
|
162
|
+
"status": None,
|
|
163
|
+
"cardinality": None,
|
|
164
|
+
"num_added": None,
|
|
165
|
+
"num_updated": None,
|
|
166
|
+
"num_removed": None,
|
|
167
|
+
"elements": {
|
|
168
|
+
"status": None,
|
|
169
|
+
"num_added": None,
|
|
170
|
+
"num_updated": None,
|
|
171
|
+
"num_removed": None,
|
|
172
|
+
},
|
|
173
|
+
},
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return Query(
|
|
177
|
+
name="GetDiffTree",
|
|
178
|
+
query={
|
|
179
|
+
"DiffTree": {
|
|
180
|
+
"@filters": {
|
|
181
|
+
"branch": "$branch_name",
|
|
182
|
+
"name": "$name",
|
|
183
|
+
"from_time": "$from_time",
|
|
184
|
+
"to_time": "$to_time",
|
|
185
|
+
},
|
|
186
|
+
"name": None,
|
|
187
|
+
"to_time": None,
|
|
188
|
+
"from_time": None,
|
|
189
|
+
"base_branch": None,
|
|
190
|
+
"diff_branch": None,
|
|
191
|
+
"num_added": None,
|
|
192
|
+
"num_updated": None,
|
|
193
|
+
"num_removed": None,
|
|
194
|
+
"num_conflicts": None,
|
|
195
|
+
"nodes": node_structure,
|
|
196
|
+
},
|
|
197
|
+
},
|
|
198
|
+
variables={
|
|
199
|
+
"branch_name": str,
|
|
200
|
+
"name": str | None,
|
|
201
|
+
"from_time": datetime | None,
|
|
202
|
+
"to_time": datetime | None,
|
|
203
|
+
},
|
|
204
|
+
)
|
|
@@ -1 +1,14 @@
|
|
|
1
|
-
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
|
|
3
|
+
VARIABLE_TYPE_MAPPING = (
|
|
4
|
+
(str, "String!"),
|
|
5
|
+
(str | None, "String"),
|
|
6
|
+
(int, "Int!"),
|
|
7
|
+
(int | None, "Int"),
|
|
8
|
+
(float, "Float!"),
|
|
9
|
+
(float | None, "Float"),
|
|
10
|
+
(bool, "Boolean!"),
|
|
11
|
+
(bool | None, "Boolean"),
|
|
12
|
+
(datetime, "DateTime!"),
|
|
13
|
+
(datetime | None, "DateTime"),
|
|
14
|
+
)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
|
+
from datetime import datetime
|
|
4
5
|
from enum import Enum
|
|
5
6
|
from typing import Any
|
|
6
7
|
|
|
@@ -66,7 +67,10 @@ def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str:
|
|
|
66
67
|
return str(value)
|
|
67
68
|
|
|
68
69
|
|
|
69
|
-
|
|
70
|
+
GRAPHQL_VARIABLE_TYPES = type[str | int | float | bool | datetime | None]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def render_variables_to_string(data: dict[str, GRAPHQL_VARIABLE_TYPES]) -> str:
|
|
70
74
|
"""Render a dict into a variable string that will be used in a GraphQL Query.
|
|
71
75
|
|
|
72
76
|
The $ sign will be automatically added to the name of the query.
|
infrahub_sdk/node/attribute.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import ipaddress
|
|
4
|
-
from
|
|
4
|
+
from collections.abc import Callable
|
|
5
|
+
from typing import TYPE_CHECKING, Any, get_args
|
|
5
6
|
|
|
6
7
|
from ..protocols_base import CoreNodeBase
|
|
7
8
|
from ..uuidt import UUIDT
|
|
@@ -25,7 +26,7 @@ class Attribute:
|
|
|
25
26
|
self.name = name
|
|
26
27
|
self._schema = schema
|
|
27
28
|
|
|
28
|
-
if not isinstance(data, dict) or "value" not in data
|
|
29
|
+
if not isinstance(data, dict) or "value" not in data:
|
|
29
30
|
data = {"value": data}
|
|
30
31
|
|
|
31
32
|
self._properties_flag = PROPERTIES_FLAG
|
|
@@ -34,12 +35,12 @@ class Attribute:
|
|
|
34
35
|
|
|
35
36
|
self._read_only = ["updated_at", "is_inherited"]
|
|
36
37
|
|
|
37
|
-
self.id: str | None = data.get("id"
|
|
38
|
+
self.id: str | None = data.get("id")
|
|
38
39
|
|
|
39
|
-
self._value: Any | None = data.get("value"
|
|
40
|
+
self._value: Any | None = data.get("value")
|
|
40
41
|
self.value_has_been_mutated = False
|
|
41
|
-
self.is_default: bool | None = data.get("is_default"
|
|
42
|
-
self.is_from_profile: bool | None = data.get("is_from_profile"
|
|
42
|
+
self.is_default: bool | None = data.get("is_default")
|
|
43
|
+
self.is_from_profile: bool | None = data.get("is_from_profile")
|
|
43
44
|
|
|
44
45
|
if self._value:
|
|
45
46
|
value_mapper: dict[str, Callable] = {
|
|
@@ -49,11 +50,10 @@ class Attribute:
|
|
|
49
50
|
mapper = value_mapper.get(schema.kind, lambda value: value)
|
|
50
51
|
self._value = mapper(data.get("value"))
|
|
51
52
|
|
|
52
|
-
self.is_inherited: bool | None = data.get("is_inherited"
|
|
53
|
-
self.updated_at: str | None = data.get("updated_at"
|
|
53
|
+
self.is_inherited: bool | None = data.get("is_inherited")
|
|
54
|
+
self.updated_at: str | None = data.get("updated_at")
|
|
54
55
|
|
|
55
|
-
self.
|
|
56
|
-
self.is_protected: bool | None = data.get("is_protected", None)
|
|
56
|
+
self.is_protected: bool | None = data.get("is_protected")
|
|
57
57
|
|
|
58
58
|
self.source: NodeProperty | None = None
|
|
59
59
|
self.owner: NodeProperty | None = None
|
infrahub_sdk/node/constants.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
import ipaddress
|
|
2
2
|
import re
|
|
3
|
-
from typing import Union
|
|
4
3
|
|
|
5
|
-
PROPERTIES_FLAG = ["
|
|
4
|
+
PROPERTIES_FLAG = ["is_protected", "updated_at"]
|
|
6
5
|
PROPERTIES_OBJECT = ["source", "owner"]
|
|
7
6
|
SAFE_VALUE = re.compile(r"(^[\. /:a-zA-Z0-9_-]+$)|(^$)")
|
|
8
7
|
|
|
9
|
-
IP_TYPES =
|
|
8
|
+
IP_TYPES = ipaddress.IPv4Interface | ipaddress.IPv6Interface | ipaddress.IPv4Network | ipaddress.IPv6Network
|
|
10
9
|
|
|
11
10
|
ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = (
|
|
12
11
|
"calling artifact_fetch is only supported for nodes that are Artifact Definition target"
|
infrahub_sdk/node/node.py
CHANGED
|
@@ -284,12 +284,12 @@ class InfrahubNodeBase:
|
|
|
284
284
|
def _strip_unmodified_dict(data: dict, original_data: dict, variables: dict, item: str) -> None:
|
|
285
285
|
data_item = data.get(item)
|
|
286
286
|
if item in original_data and isinstance(original_data[item], dict) and isinstance(data_item, dict):
|
|
287
|
-
for item_key in original_data[item]
|
|
287
|
+
for item_key in original_data[item]:
|
|
288
288
|
for property_name in PROPERTIES_OBJECT:
|
|
289
289
|
if item_key == property_name and isinstance(original_data[item][property_name], dict):
|
|
290
290
|
if original_data[item][property_name].get("id"):
|
|
291
291
|
original_data[item][property_name] = original_data[item][property_name]["id"]
|
|
292
|
-
if item_key in data[item]
|
|
292
|
+
if item_key in data[item]:
|
|
293
293
|
if item_key == "id" and len(data[item].keys()) > 1:
|
|
294
294
|
# Related nodes typically require an ID. So the ID is only
|
|
295
295
|
# removed if it's the last key in the current context
|
|
@@ -335,8 +335,8 @@ class InfrahubNodeBase:
|
|
|
335
335
|
elif isinstance(relationship_property, RelationshipManagerBase) and not relationship_property.has_update:
|
|
336
336
|
data.pop(relationship)
|
|
337
337
|
|
|
338
|
-
for item in original_data
|
|
339
|
-
if item in data
|
|
338
|
+
for item in original_data:
|
|
339
|
+
if item in data:
|
|
340
340
|
if data[item] == original_data[item]:
|
|
341
341
|
if attr := getattr(self, item, None): # this should never be None, just a safety default value
|
|
342
342
|
if not isinstance(attr, Attribute) or not attr.value_has_been_mutated:
|
|
@@ -741,7 +741,7 @@ class InfrahubNode(InfrahubNodeBase):
|
|
|
741
741
|
|
|
742
742
|
if (
|
|
743
743
|
rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr]
|
|
744
|
-
and rel_schema.kind not in
|
|
744
|
+
and rel_schema.kind not in {RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT} # type: ignore[union-attr]
|
|
745
745
|
and not (include and rel_name in include)
|
|
746
746
|
):
|
|
747
747
|
continue
|
|
@@ -895,6 +895,7 @@ class InfrahubNode(InfrahubNodeBase):
|
|
|
895
895
|
branch: str,
|
|
896
896
|
related_nodes: list[InfrahubNode],
|
|
897
897
|
timeout: int | None = None,
|
|
898
|
+
recursive: bool = False,
|
|
898
899
|
) -> None:
|
|
899
900
|
"""Processes the Relationships of a InfrahubNode and add Related Nodes to a list.
|
|
900
901
|
|
|
@@ -903,6 +904,7 @@ class InfrahubNode(InfrahubNodeBase):
|
|
|
903
904
|
branch (str): The branch name.
|
|
904
905
|
related_nodes (list[InfrahubNode]): The list to which related nodes will be appended.
|
|
905
906
|
timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
|
|
907
|
+
recursive:(bool): Whether to recursively process relationships of related nodes.
|
|
906
908
|
"""
|
|
907
909
|
for rel_name in self._relationships:
|
|
908
910
|
rel = getattr(self, rel_name)
|
|
@@ -910,17 +912,37 @@ class InfrahubNode(InfrahubNodeBase):
|
|
|
910
912
|
relation = node_data["node"].get(rel_name, None)
|
|
911
913
|
if relation and relation.get("node", None):
|
|
912
914
|
related_node = await InfrahubNode.from_graphql(
|
|
913
|
-
client=self._client,
|
|
915
|
+
client=self._client,
|
|
916
|
+
branch=branch,
|
|
917
|
+
data=relation,
|
|
918
|
+
timeout=timeout,
|
|
914
919
|
)
|
|
915
920
|
related_nodes.append(related_node)
|
|
921
|
+
if recursive:
|
|
922
|
+
await related_node._process_relationships(
|
|
923
|
+
node_data=relation,
|
|
924
|
+
branch=branch,
|
|
925
|
+
related_nodes=related_nodes,
|
|
926
|
+
recursive=recursive,
|
|
927
|
+
)
|
|
916
928
|
elif rel and isinstance(rel, RelationshipManager):
|
|
917
929
|
peers = node_data["node"].get(rel_name, None)
|
|
918
930
|
if peers and peers["edges"]:
|
|
919
931
|
for peer in peers["edges"]:
|
|
920
932
|
related_node = await InfrahubNode.from_graphql(
|
|
921
|
-
client=self._client,
|
|
933
|
+
client=self._client,
|
|
934
|
+
branch=branch,
|
|
935
|
+
data=peer,
|
|
936
|
+
timeout=timeout,
|
|
922
937
|
)
|
|
923
938
|
related_nodes.append(related_node)
|
|
939
|
+
if recursive:
|
|
940
|
+
await related_node._process_relationships(
|
|
941
|
+
node_data=peer,
|
|
942
|
+
branch=branch,
|
|
943
|
+
related_nodes=related_nodes,
|
|
944
|
+
recursive=recursive,
|
|
945
|
+
)
|
|
924
946
|
|
|
925
947
|
async def get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode]:
|
|
926
948
|
"""Fetch all nodes that were allocated for the pool and a given resource.
|
|
@@ -1364,7 +1386,7 @@ class InfrahubNodeSync(InfrahubNodeBase):
|
|
|
1364
1386
|
|
|
1365
1387
|
if (
|
|
1366
1388
|
rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr]
|
|
1367
|
-
and rel_schema.kind not in
|
|
1389
|
+
and rel_schema.kind not in {RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT} # type: ignore[union-attr]
|
|
1368
1390
|
and not (include and rel_name in include)
|
|
1369
1391
|
):
|
|
1370
1392
|
continue
|
|
@@ -1520,6 +1542,7 @@ class InfrahubNodeSync(InfrahubNodeBase):
|
|
|
1520
1542
|
branch: str,
|
|
1521
1543
|
related_nodes: list[InfrahubNodeSync],
|
|
1522
1544
|
timeout: int | None = None,
|
|
1545
|
+
recursive: bool = False,
|
|
1523
1546
|
) -> None:
|
|
1524
1547
|
"""Processes the Relationships of a InfrahubNodeSync and add Related Nodes to a list.
|
|
1525
1548
|
|
|
@@ -1528,7 +1551,7 @@ class InfrahubNodeSync(InfrahubNodeBase):
|
|
|
1528
1551
|
branch (str): The branch name.
|
|
1529
1552
|
related_nodes (list[InfrahubNodeSync]): The list to which related nodes will be appended.
|
|
1530
1553
|
timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
|
|
1531
|
-
|
|
1554
|
+
recursive:(bool): Whether to recursively process relationships of related nodes.
|
|
1532
1555
|
"""
|
|
1533
1556
|
for rel_name in self._relationships:
|
|
1534
1557
|
rel = getattr(self, rel_name)
|
|
@@ -1536,17 +1559,37 @@ class InfrahubNodeSync(InfrahubNodeBase):
|
|
|
1536
1559
|
relation = node_data["node"].get(rel_name, None)
|
|
1537
1560
|
if relation and relation.get("node", None):
|
|
1538
1561
|
related_node = InfrahubNodeSync.from_graphql(
|
|
1539
|
-
client=self._client,
|
|
1562
|
+
client=self._client,
|
|
1563
|
+
branch=branch,
|
|
1564
|
+
data=relation,
|
|
1565
|
+
timeout=timeout,
|
|
1540
1566
|
)
|
|
1541
1567
|
related_nodes.append(related_node)
|
|
1568
|
+
if recursive:
|
|
1569
|
+
related_node._process_relationships(
|
|
1570
|
+
node_data=relation,
|
|
1571
|
+
branch=branch,
|
|
1572
|
+
related_nodes=related_nodes,
|
|
1573
|
+
recursive=recursive,
|
|
1574
|
+
)
|
|
1542
1575
|
elif rel and isinstance(rel, RelationshipManagerSync):
|
|
1543
1576
|
peers = node_data["node"].get(rel_name, None)
|
|
1544
1577
|
if peers and peers["edges"]:
|
|
1545
1578
|
for peer in peers["edges"]:
|
|
1546
1579
|
related_node = InfrahubNodeSync.from_graphql(
|
|
1547
|
-
client=self._client,
|
|
1580
|
+
client=self._client,
|
|
1581
|
+
branch=branch,
|
|
1582
|
+
data=peer,
|
|
1583
|
+
timeout=timeout,
|
|
1548
1584
|
)
|
|
1549
1585
|
related_nodes.append(related_node)
|
|
1586
|
+
if recursive:
|
|
1587
|
+
related_node._process_relationships(
|
|
1588
|
+
node_data=peer,
|
|
1589
|
+
branch=branch,
|
|
1590
|
+
related_nodes=related_nodes,
|
|
1591
|
+
recursive=recursive,
|
|
1592
|
+
)
|
|
1550
1593
|
|
|
1551
1594
|
def get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync]:
|
|
1552
1595
|
"""Fetch all nodes that were allocated for the pool and a given resource.
|
|
@@ -64,7 +64,7 @@ class RelatedNodeBase:
|
|
|
64
64
|
self._display_label = node_data.get("display_label", None)
|
|
65
65
|
self._typename = node_data.get("__typename", None)
|
|
66
66
|
|
|
67
|
-
self.updated_at: str | None = data.get("updated_at",
|
|
67
|
+
self.updated_at: str | None = data.get("updated_at", properties_data.get("updated_at", None))
|
|
68
68
|
|
|
69
69
|
# FIXME, we won't need that once we are only supporting paginated results
|
|
70
70
|
if self._typename and self._typename.startswith("Related"):
|
|
@@ -171,7 +171,6 @@ class RelatedNodeBase:
|
|
|
171
171
|
for prop_name in PROPERTIES_OBJECT:
|
|
172
172
|
properties[prop_name] = {"id": None, "display_label": None, "__typename": None}
|
|
173
173
|
|
|
174
|
-
if properties:
|
|
175
174
|
data["properties"] = properties
|
|
176
175
|
if peer_data:
|
|
177
176
|
data["node"].update(peer_data)
|
|
@@ -87,9 +87,8 @@ class RelationshipManagerBase:
|
|
|
87
87
|
properties[prop_name] = None
|
|
88
88
|
for prop_name in PROPERTIES_OBJECT:
|
|
89
89
|
properties[prop_name] = {"id": None, "display_label": None, "__typename": None}
|
|
90
|
-
|
|
91
|
-
if properties:
|
|
92
90
|
data["edges"]["properties"] = properties
|
|
91
|
+
|
|
93
92
|
if peer_data:
|
|
94
93
|
data["edges"]["node"].update(peer_data)
|
|
95
94
|
|
infrahub_sdk/object_store.py
CHANGED
|
@@ -33,7 +33,7 @@ class ObjectStore(ObjectStoreBase):
|
|
|
33
33
|
self.client.log.error(f"Unable to connect to {self.client.address} .. ")
|
|
34
34
|
raise
|
|
35
35
|
except httpx.HTTPStatusError as exc:
|
|
36
|
-
if exc.response.status_code in
|
|
36
|
+
if exc.response.status_code in {401, 403}:
|
|
37
37
|
response = exc.response.json()
|
|
38
38
|
errors = response.get("errors")
|
|
39
39
|
messages = [error.get("message") for error in errors]
|
|
@@ -54,7 +54,7 @@ class ObjectStore(ObjectStoreBase):
|
|
|
54
54
|
self.client.log.error(f"Unable to connect to {self.client.address} .. ")
|
|
55
55
|
raise
|
|
56
56
|
except httpx.HTTPStatusError as exc:
|
|
57
|
-
if exc.response.status_code in
|
|
57
|
+
if exc.response.status_code in {401, 403}:
|
|
58
58
|
response = exc.response.json()
|
|
59
59
|
errors = response.get("errors")
|
|
60
60
|
messages = [error.get("message") for error in errors]
|
|
@@ -81,7 +81,7 @@ class ObjectStoreSync(ObjectStoreBase):
|
|
|
81
81
|
self.client.log.error(f"Unable to connect to {self.client.address} .. ")
|
|
82
82
|
raise
|
|
83
83
|
except httpx.HTTPStatusError as exc:
|
|
84
|
-
if exc.response.status_code in
|
|
84
|
+
if exc.response.status_code in {401, 403}:
|
|
85
85
|
response = exc.response.json()
|
|
86
86
|
errors = response.get("errors")
|
|
87
87
|
messages = [error.get("message") for error in errors]
|
|
@@ -102,7 +102,7 @@ class ObjectStoreSync(ObjectStoreBase):
|
|
|
102
102
|
self.client.log.error(f"Unable to connect to {self.client.address} .. ")
|
|
103
103
|
raise
|
|
104
104
|
except httpx.HTTPStatusError as exc:
|
|
105
|
-
if exc.response.status_code in
|
|
105
|
+
if exc.response.status_code in {401, 403}:
|
|
106
106
|
response = exc.response.json()
|
|
107
107
|
errors = response.get("errors")
|
|
108
108
|
messages = [error.get("message") for error in errors]
|
infrahub_sdk/operation.py
CHANGED
|
@@ -65,14 +65,14 @@ class InfrahubOperation:
|
|
|
65
65
|
await self._init_client.schema.all(branch=self.branch_name)
|
|
66
66
|
|
|
67
67
|
for kind in data:
|
|
68
|
-
if kind in self._init_client.schema.cache[self.branch_name].nodes
|
|
68
|
+
if kind in self._init_client.schema.cache[self.branch_name].nodes:
|
|
69
69
|
for result in data[kind].get("edges", []):
|
|
70
70
|
node = await self.infrahub_node.from_graphql(
|
|
71
71
|
client=self._init_client, branch=self.branch_name, data=result
|
|
72
72
|
)
|
|
73
73
|
self._nodes.append(node)
|
|
74
74
|
await node._process_relationships(
|
|
75
|
-
node_data=result, branch=self.branch_name, related_nodes=self._related_nodes
|
|
75
|
+
node_data=result, branch=self.branch_name, related_nodes=self._related_nodes, recursive=True
|
|
76
76
|
)
|
|
77
77
|
|
|
78
78
|
for node in self._nodes + self._related_nodes:
|
infrahub_sdk/protocols_base.py
CHANGED
|
@@ -56,7 +56,7 @@ class CodeGenerator:
|
|
|
56
56
|
if not e.startswith("__")
|
|
57
57
|
and not e.endswith("__")
|
|
58
58
|
and e
|
|
59
|
-
not in
|
|
59
|
+
not in {"TYPE_CHECKING", "CoreNode", "Optional", "Protocol", "Union", "annotations", "runtime_checkable"}
|
|
60
60
|
]
|
|
61
61
|
|
|
62
62
|
self.sorted_generics = self._sort_and_filter_models(self.generics, filters=["CoreNode"] + self.base_protocols)
|
|
@@ -83,7 +83,7 @@ class InfrahubJinja2Item(InfrahubItem):
|
|
|
83
83
|
class InfrahubJinja2TransformSmokeItem(InfrahubJinja2Item):
|
|
84
84
|
def runtest(self) -> None:
|
|
85
85
|
file_path: Path = self.session.infrahub_config_path.parent / self.resource_config.template_path # type: ignore[attr-defined]
|
|
86
|
-
self.get_jinja2_environment().parse(file_path.read_text(), filename=file_path.name)
|
|
86
|
+
self.get_jinja2_environment().parse(file_path.read_text(encoding="utf-8"), filename=file_path.name)
|
|
87
87
|
|
|
88
88
|
|
|
89
89
|
class InfrahubJinja2TransformUnitRenderItem(InfrahubJinja2Item):
|
|
@@ -90,7 +90,7 @@ def pytest_sessionstart(session: Session) -> None:
|
|
|
90
90
|
|
|
91
91
|
|
|
92
92
|
def pytest_collect_file(parent: Collector | Item, file_path: Path) -> InfrahubYamlFile | None:
|
|
93
|
-
if file_path.suffix in
|
|
93
|
+
if file_path.suffix in {".yml", ".yaml"} and file_path.name.startswith("test_"):
|
|
94
94
|
return InfrahubYamlFile.from_parent(parent, path=file_path)
|
|
95
95
|
return None
|
|
96
96
|
|
infrahub_sdk/query_groups.py
CHANGED
|
@@ -168,7 +168,7 @@ class InfrahubGroupContext(InfrahubGroupContextBase):
|
|
|
168
168
|
return
|
|
169
169
|
|
|
170
170
|
# Calculate how many nodes should be deleted
|
|
171
|
-
self.unused_member_ids = set(existing_group.members.peer_ids) - set(members) # type: ignore
|
|
171
|
+
self.unused_member_ids = list(set(existing_group.members.peer_ids) - set(members)) # type: ignore[union-attr]
|
|
172
172
|
|
|
173
173
|
if not self.delete_unused_nodes:
|
|
174
174
|
return
|
|
@@ -262,7 +262,7 @@ class InfrahubGroupContextSync(InfrahubGroupContextBase):
|
|
|
262
262
|
return
|
|
263
263
|
|
|
264
264
|
# Calculate how many nodes should be deleted
|
|
265
|
-
self.unused_member_ids = set(existing_group.members.peer_ids) - set(members) # type: ignore
|
|
265
|
+
self.unused_member_ids = list(set(existing_group.members.peer_ids) - set(members)) # type: ignore[union-attr]
|
|
266
266
|
|
|
267
267
|
if not self.delete_unused_nodes:
|
|
268
268
|
return
|
infrahub_sdk/schema/__init__.py
CHANGED
|
@@ -7,12 +7,11 @@ import warnings
|
|
|
7
7
|
from collections.abc import MutableMapping
|
|
8
8
|
from enum import Enum
|
|
9
9
|
from time import sleep
|
|
10
|
-
from typing import TYPE_CHECKING, Any,
|
|
10
|
+
from typing import TYPE_CHECKING, Any, TypeAlias, TypedDict
|
|
11
11
|
from urllib.parse import urlencode
|
|
12
12
|
|
|
13
13
|
import httpx
|
|
14
14
|
from pydantic import BaseModel, Field
|
|
15
|
-
from typing_extensions import TypeAlias
|
|
16
15
|
|
|
17
16
|
from ..exceptions import (
|
|
18
17
|
BranchNotFoundError,
|
|
@@ -46,7 +45,7 @@ if TYPE_CHECKING:
|
|
|
46
45
|
from ..client import InfrahubClient, InfrahubClientSync, SchemaType, SchemaTypeSync
|
|
47
46
|
from ..node import InfrahubNode, InfrahubNodeSync
|
|
48
47
|
|
|
49
|
-
InfrahubNodeTypes =
|
|
48
|
+
InfrahubNodeTypes: TypeAlias = InfrahubNode | InfrahubNodeSync
|
|
50
49
|
|
|
51
50
|
|
|
52
51
|
__all__ = [
|
|
@@ -84,11 +83,11 @@ class EnumMutation(str, Enum):
|
|
|
84
83
|
remove = "SchemaEnumRemove"
|
|
85
84
|
|
|
86
85
|
|
|
87
|
-
MainSchemaTypes: TypeAlias =
|
|
88
|
-
MainSchemaTypesAPI: TypeAlias =
|
|
89
|
-
MainSchemaTypesAll: TypeAlias =
|
|
90
|
-
NodeSchema
|
|
91
|
-
|
|
86
|
+
MainSchemaTypes: TypeAlias = NodeSchema | GenericSchema
|
|
87
|
+
MainSchemaTypesAPI: TypeAlias = NodeSchemaAPI | GenericSchemaAPI | ProfileSchemaAPI | TemplateSchemaAPI
|
|
88
|
+
MainSchemaTypesAll: TypeAlias = (
|
|
89
|
+
NodeSchema | GenericSchema | NodeSchemaAPI | GenericSchemaAPI | ProfileSchemaAPI | TemplateSchemaAPI
|
|
90
|
+
)
|
|
92
91
|
|
|
93
92
|
|
|
94
93
|
class SchemaWarningType(Enum):
|
|
@@ -123,7 +122,7 @@ class InfrahubSchemaBase:
|
|
|
123
122
|
SchemaRoot(**data)
|
|
124
123
|
|
|
125
124
|
def validate_data_against_schema(self, schema: MainSchemaTypesAPI, data: dict) -> None:
|
|
126
|
-
for key in data
|
|
125
|
+
for key in data:
|
|
127
126
|
if key not in schema.relationship_names + schema.attribute_names:
|
|
128
127
|
identifier = f"{schema.kind}"
|
|
129
128
|
raise ValidationError(
|
|
@@ -155,7 +154,6 @@ class InfrahubSchemaBase:
|
|
|
155
154
|
source: str | None = None,
|
|
156
155
|
owner: str | None = None,
|
|
157
156
|
is_protected: bool | None = None,
|
|
158
|
-
is_visible: bool | None = None,
|
|
159
157
|
) -> dict[str, Any]:
|
|
160
158
|
obj_data: dict[str, Any] = {}
|
|
161
159
|
item_metadata: dict[str, Any] = {}
|
|
@@ -165,8 +163,6 @@ class InfrahubSchemaBase:
|
|
|
165
163
|
item_metadata["owner"] = str(owner)
|
|
166
164
|
if is_protected is not None:
|
|
167
165
|
item_metadata["is_protected"] = is_protected
|
|
168
|
-
if is_visible is not None:
|
|
169
|
-
item_metadata["is_visible"] = is_visible
|
|
170
166
|
|
|
171
167
|
for key, value in data.items():
|
|
172
168
|
obj_data[key] = {}
|
|
@@ -194,12 +190,12 @@ class InfrahubSchemaBase:
|
|
|
194
190
|
hash=status["hash"], previous_hash=status["previous_hash"], warnings=status.get("warnings") or []
|
|
195
191
|
)
|
|
196
192
|
|
|
197
|
-
if response.status_code in
|
|
193
|
+
if response.status_code in {
|
|
198
194
|
httpx.codes.BAD_REQUEST,
|
|
199
195
|
httpx.codes.UNPROCESSABLE_ENTITY,
|
|
200
196
|
httpx.codes.UNAUTHORIZED,
|
|
201
197
|
httpx.codes.FORBIDDEN,
|
|
202
|
-
|
|
198
|
+
}:
|
|
203
199
|
return SchemaLoadResponse(errors=response.json())
|
|
204
200
|
|
|
205
201
|
response.raise_for_status()
|