infrahub-server 1.4.12__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +27 -3
- infrahub/auth.py +5 -5
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +160 -157
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +56 -9
- infrahub/computed_attribute/tasks.py +19 -7
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +35 -24
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +9 -5
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +72 -10
- infrahub/core/changelog/models.py +2 -10
- infrahub/core/constants/__init__.py +4 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/calculator.py +2 -2
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/delete_query.py +9 -5
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/diff/query/merge.py +39 -23
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +7 -4
- infrahub/core/manager.py +3 -81
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +13 -10
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
- infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +26 -5
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +66 -19
- infrahub/core/models.py +2 -2
- infrahub/core/node/__init__.py +207 -54
- infrahub/core/node/create.py +53 -49
- infrahub/core/node/lock_utils.py +124 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +82 -15
- infrahub/core/query/diff.py +61 -16
- infrahub/core/query/ipam.py +16 -4
- infrahub/core/query/node.py +92 -212
- infrahub/core/query/relationship.py +44 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/relationship/model.py +69 -24
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/check.py +1 -1
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/core/transform.py +1 -1
- infrahub/core/schema/definitions/internal.py +12 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +3 -0
- infrahub/core/schema/node_schema.py +1 -0
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +295 -10
- infrahub/core/schema/schema_branch_display.py +135 -0
- infrahub/core/schema/schema_branch_hfid.py +120 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +42 -2
- infrahub/git/integrator.py +22 -14
- infrahub/git/tasks.py +52 -2
- infrahub/graphql/analyzer.py +9 -0
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +16 -6
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +213 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +16 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +44 -13
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +73 -41
- infrahub/graphql/mutations/main.py +61 -178
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +8 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +119 -42
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/message_bus/types.py +1 -0
- infrahub/patch/plan_writer.py +2 -2
- infrahub/permissions/constants.py +2 -0
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +98 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +67 -14
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +9 -1
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workers/dependencies.py +3 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +118 -3
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/models.py +17 -2
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/branch.py +17 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +376 -95
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/branch.py +3 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +20 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +40 -10
- infrahub_sdk/ctl/task.py +110 -0
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/node/relationship.py +1 -3
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +54 -6
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/task/models.py +6 -4
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +233 -176
- infrahub_testcontainers/container.py +114 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -0
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
infrahub_sdk/config.py
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import ssl
|
|
3
4
|
from copy import deepcopy
|
|
4
5
|
from typing import Any
|
|
5
6
|
|
|
6
|
-
from pydantic import Field, field_validator, model_validator
|
|
7
|
+
from pydantic import Field, PrivateAttr, field_validator, model_validator
|
|
7
8
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
8
9
|
from typing_extensions import Self
|
|
9
10
|
|
|
@@ -78,6 +79,7 @@ class ConfigBase(BaseSettings):
|
|
|
78
79
|
Can be useful to test with self-signed certificates.""",
|
|
79
80
|
)
|
|
80
81
|
tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
|
|
82
|
+
_ssl_context: ssl.SSLContext | None = PrivateAttr(default=None)
|
|
81
83
|
|
|
82
84
|
@model_validator(mode="before")
|
|
83
85
|
@classmethod
|
|
@@ -133,6 +135,28 @@ class ConfigBase(BaseSettings):
|
|
|
133
135
|
def password_authentication(self) -> bool:
|
|
134
136
|
return bool(self.username)
|
|
135
137
|
|
|
138
|
+
@property
|
|
139
|
+
def tls_context(self) -> ssl.SSLContext:
|
|
140
|
+
if self._ssl_context:
|
|
141
|
+
return self._ssl_context
|
|
142
|
+
|
|
143
|
+
if self.tls_insecure:
|
|
144
|
+
self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
|
145
|
+
self._ssl_context.check_hostname = False
|
|
146
|
+
self._ssl_context.verify_mode = ssl.CERT_NONE
|
|
147
|
+
return self._ssl_context
|
|
148
|
+
|
|
149
|
+
if self.tls_ca_file:
|
|
150
|
+
self._ssl_context = ssl.create_default_context(cafile=self.tls_ca_file)
|
|
151
|
+
|
|
152
|
+
if self._ssl_context is None:
|
|
153
|
+
self._ssl_context = ssl.create_default_context()
|
|
154
|
+
|
|
155
|
+
return self._ssl_context
|
|
156
|
+
|
|
157
|
+
def set_ssl_context(self, context: ssl.SSLContext) -> None:
|
|
158
|
+
self._ssl_context = context
|
|
159
|
+
|
|
136
160
|
|
|
137
161
|
class Config(ConfigBase):
|
|
138
162
|
recorder: RecorderType = Field(default=RecorderType.NONE, description="Select builtin recorder for later replay.")
|
|
@@ -174,4 +198,7 @@ class Config(ConfigBase):
|
|
|
174
198
|
if field not in covered_keys:
|
|
175
199
|
config[field] = deepcopy(getattr(self, field))
|
|
176
200
|
|
|
177
|
-
|
|
201
|
+
new_config = Config(**config)
|
|
202
|
+
if self._ssl_context:
|
|
203
|
+
new_config.set_ssl_context(self._ssl_context)
|
|
204
|
+
return new_config
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, model_validator
|
|
6
|
+
|
|
7
|
+
CONVERT_OBJECT_MUTATION = """
|
|
8
|
+
mutation($node_id: String!, $target_kind: String!, $fields_mapping: GenericScalar!) {
|
|
9
|
+
ConvertObjectType(data: {
|
|
10
|
+
node_id: $node_id,
|
|
11
|
+
target_kind: $target_kind,
|
|
12
|
+
fields_mapping: $fields_mapping
|
|
13
|
+
}) {
|
|
14
|
+
ok
|
|
15
|
+
node
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ConversionFieldValue(BaseModel): # Only one of these fields can be not None
|
|
22
|
+
"""
|
|
23
|
+
Holds the new value of the destination field during an object conversion.
|
|
24
|
+
Use `attribute_value` to specify the new raw value of an attribute.
|
|
25
|
+
Use `peer_id` to specify new peer of a cardinality one relationship.
|
|
26
|
+
Use `peers_ids` to specify new peers of a cardinality many relationship.
|
|
27
|
+
Only one of `attribute_value`, `peer_id` and `peers_ids` can be specified.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
attribute_value: Any | None = None
|
|
31
|
+
peer_id: str | None = None
|
|
32
|
+
peers_ids: list[str] | None = None
|
|
33
|
+
|
|
34
|
+
@model_validator(mode="after")
|
|
35
|
+
def check_only_one_field(self) -> ConversionFieldValue:
|
|
36
|
+
fields = [self.attribute_value, self.peer_id, self.peers_ids]
|
|
37
|
+
set_fields = [f for f in fields if f is not None]
|
|
38
|
+
if len(set_fields) != 1:
|
|
39
|
+
raise ValueError("Exactly one of `attribute_value`, `peer_id`, or `peers_ids` must be set")
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class ConversionFieldInput(BaseModel):
|
|
44
|
+
"""
|
|
45
|
+
Indicates how to fill in the value of the destination field during an object conversion.
|
|
46
|
+
Use `source_field` to reuse the value of the corresponding field of the object being converted.
|
|
47
|
+
Use `data` to specify the new value for the field.
|
|
48
|
+
Use `use_default_value` to set the destination field to its schema default.
|
|
49
|
+
Only one of `source_field`, `data`, or `use_default_value` can be specified.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
source_field: str | None = None
|
|
53
|
+
data: ConversionFieldValue | None = None
|
|
54
|
+
use_default_value: bool = False
|
|
55
|
+
|
|
56
|
+
@model_validator(mode="after")
|
|
57
|
+
def check_only_one_field(self) -> ConversionFieldInput:
|
|
58
|
+
fields_set = [self.source_field is not None, self.data is not None, self.use_default_value is True]
|
|
59
|
+
if sum(fields_set) != 1:
|
|
60
|
+
raise ValueError("Exactly one of `source_field`, `data` or `use_default_value` must be set")
|
|
61
|
+
return self
|
infrahub_sdk/ctl/branch.py
CHANGED
|
@@ -46,6 +46,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
|
|
|
46
46
|
table.add_column("Sync with Git")
|
|
47
47
|
table.add_column("Has Schema Changes")
|
|
48
48
|
table.add_column("Is Default")
|
|
49
|
+
table.add_column("Status")
|
|
49
50
|
|
|
50
51
|
# identify the default branch and always print it first
|
|
51
52
|
default_branch = [branch for branch in branches.values() if branch.is_default][0]
|
|
@@ -57,6 +58,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
|
|
|
57
58
|
"[green]True" if default_branch.sync_with_git else "[#FF7F50]False",
|
|
58
59
|
"[green]True" if default_branch.has_schema_changes else "[#FF7F50]False",
|
|
59
60
|
"[green]True" if default_branch.is_default else "[#FF7F50]False",
|
|
61
|
+
default_branch.status,
|
|
60
62
|
)
|
|
61
63
|
|
|
62
64
|
for branch in branches.values():
|
|
@@ -71,6 +73,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
|
|
|
71
73
|
"[green]True" if branch.sync_with_git else "[#FF7F50]False",
|
|
72
74
|
"[green]True" if default_branch.has_schema_changes else "[#FF7F50]False",
|
|
73
75
|
"[green]True" if branch.is_default else "[#FF7F50]False",
|
|
76
|
+
branch.status,
|
|
74
77
|
)
|
|
75
78
|
|
|
76
79
|
console.print(table)
|
infrahub_sdk/ctl/check.py
CHANGED
|
@@ -11,10 +11,9 @@ import typer
|
|
|
11
11
|
from rich.console import Console
|
|
12
12
|
from rich.logging import RichHandler
|
|
13
13
|
|
|
14
|
-
from ..ctl import config
|
|
15
14
|
from ..ctl.client import initialize_client
|
|
16
15
|
from ..ctl.exceptions import QueryNotFoundError
|
|
17
|
-
from ..ctl.repository import get_repository_config
|
|
16
|
+
from ..ctl.repository import find_repository_config_file, get_repository_config
|
|
18
17
|
from ..ctl.utils import catch_exception, execute_graphql_query
|
|
19
18
|
from ..exceptions import ModuleImportError
|
|
20
19
|
|
|
@@ -59,7 +58,7 @@ def run(
|
|
|
59
58
|
FORMAT = "%(message)s"
|
|
60
59
|
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
|
|
61
60
|
|
|
62
|
-
repository_config = get_repository_config(
|
|
61
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
63
62
|
|
|
64
63
|
if list_available:
|
|
65
64
|
list_checks(repository_config=repository_config)
|
infrahub_sdk/ctl/cli_commands.py
CHANGED
|
@@ -20,18 +20,19 @@ from rich.table import Table
|
|
|
20
20
|
|
|
21
21
|
from .. import __version__ as sdk_version
|
|
22
22
|
from ..async_typer import AsyncTyper
|
|
23
|
-
from ..ctl import config
|
|
24
23
|
from ..ctl.branch import app as branch_app
|
|
25
24
|
from ..ctl.check import run as run_check
|
|
26
25
|
from ..ctl.client import initialize_client, initialize_client_sync
|
|
27
26
|
from ..ctl.exceptions import QueryNotFoundError
|
|
28
27
|
from ..ctl.generator import run as run_generator
|
|
28
|
+
from ..ctl.graphql import app as graphql_app
|
|
29
29
|
from ..ctl.menu import app as menu_app
|
|
30
30
|
from ..ctl.object import app as object_app
|
|
31
31
|
from ..ctl.render import list_jinja2_transforms, print_template_errors
|
|
32
32
|
from ..ctl.repository import app as repository_app
|
|
33
|
-
from ..ctl.repository import get_repository_config
|
|
33
|
+
from ..ctl.repository import find_repository_config_file, get_repository_config
|
|
34
34
|
from ..ctl.schema import app as schema_app
|
|
35
|
+
from ..ctl.task import app as task_app
|
|
35
36
|
from ..ctl.transform import list_transforms
|
|
36
37
|
from ..ctl.utils import (
|
|
37
38
|
catch_exception,
|
|
@@ -46,7 +47,7 @@ from ..protocols_generator.generator import CodeGenerator
|
|
|
46
47
|
from ..schema import MainSchemaTypesAll, SchemaRoot
|
|
47
48
|
from ..template import Jinja2Template
|
|
48
49
|
from ..template.exceptions import JinjaTemplateError
|
|
49
|
-
from ..utils import
|
|
50
|
+
from ..utils import write_to_file
|
|
50
51
|
from ..yaml import SchemaFile
|
|
51
52
|
from .exporter import dump
|
|
52
53
|
from .importer import load
|
|
@@ -63,6 +64,8 @@ app.add_typer(validate_app, name="validate")
|
|
|
63
64
|
app.add_typer(repository_app, name="repository")
|
|
64
65
|
app.add_typer(menu_app, name="menu")
|
|
65
66
|
app.add_typer(object_app, name="object")
|
|
67
|
+
app.add_typer(graphql_app, name="graphql")
|
|
68
|
+
app.add_typer(task_app, name="task")
|
|
66
69
|
|
|
67
70
|
app.command(name="dump")(dump)
|
|
68
71
|
app.command(name="load")(load)
|
|
@@ -208,7 +211,6 @@ async def _run_transform(
|
|
|
208
211
|
debug: Prints debug info to the command line
|
|
209
212
|
repository_config: Repository config object. This is used to load the graphql query from the repository.
|
|
210
213
|
"""
|
|
211
|
-
branch = get_branch(branch)
|
|
212
214
|
|
|
213
215
|
try:
|
|
214
216
|
response = execute_graphql_query(
|
|
@@ -260,7 +262,7 @@ async def render(
|
|
|
260
262
|
"""Render a local Jinja2 Transform for debugging purpose."""
|
|
261
263
|
|
|
262
264
|
variables_dict = parse_cli_vars(variables)
|
|
263
|
-
repository_config = get_repository_config(
|
|
265
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
264
266
|
|
|
265
267
|
if list_available or not transform_name:
|
|
266
268
|
list_jinja2_transforms(config=repository_config)
|
|
@@ -270,7 +272,7 @@ async def render(
|
|
|
270
272
|
try:
|
|
271
273
|
transform_config = repository_config.get_jinja2_transform(name=transform_name)
|
|
272
274
|
except KeyError as exc:
|
|
273
|
-
console.print(f'[red]Unable to find "{transform_name}" in
|
|
275
|
+
console.print(f'[red]Unable to find "{transform_name}" in repository config file')
|
|
274
276
|
list_jinja2_transforms(config=repository_config)
|
|
275
277
|
raise typer.Exit(1) from exc
|
|
276
278
|
|
|
@@ -310,7 +312,7 @@ def transform(
|
|
|
310
312
|
"""Render a local transform (TransformPython) for debugging purpose."""
|
|
311
313
|
|
|
312
314
|
variables_dict = parse_cli_vars(variables)
|
|
313
|
-
repository_config = get_repository_config(
|
|
315
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
314
316
|
|
|
315
317
|
if list_available or not transform_name:
|
|
316
318
|
list_transforms(config=repository_config)
|
|
@@ -409,7 +411,6 @@ def info( # noqa: PLR0915
|
|
|
409
411
|
_: str = CONFIG_PARAM,
|
|
410
412
|
) -> None:
|
|
411
413
|
"""Display the status of the Python SDK."""
|
|
412
|
-
|
|
413
414
|
info: dict[str, Any] = {
|
|
414
415
|
"error": None,
|
|
415
416
|
"status": ":x:",
|
|
@@ -417,12 +418,17 @@ def info( # noqa: PLR0915
|
|
|
417
418
|
"user_info": {},
|
|
418
419
|
"groups": {},
|
|
419
420
|
}
|
|
421
|
+
client = initialize_client_sync()
|
|
422
|
+
fetch_user_details = bool(client.config.username) or bool(client.config.api_token)
|
|
423
|
+
|
|
420
424
|
try:
|
|
421
|
-
client = initialize_client_sync()
|
|
422
425
|
info["infrahub_version"] = client.get_version()
|
|
423
|
-
|
|
426
|
+
|
|
427
|
+
if fetch_user_details:
|
|
428
|
+
info["user_info"] = client.get_user()
|
|
429
|
+
info["groups"] = client.get_user_permissions()
|
|
430
|
+
|
|
424
431
|
info["status"] = ":white_heavy_check_mark:"
|
|
425
|
-
info["groups"] = client.get_user_permissions()
|
|
426
432
|
except Exception as e:
|
|
427
433
|
info["error"] = f"{e!s} ({e.__class__.__name__})"
|
|
428
434
|
|
|
@@ -469,7 +475,7 @@ def info( # noqa: PLR0915
|
|
|
469
475
|
pretty_model = Pretty(client.config.model_dump(), expand_all=True)
|
|
470
476
|
layout["client_info"].update(Panel(pretty_model, title="Client Info"))
|
|
471
477
|
|
|
472
|
-
# Infrahub information
|
|
478
|
+
# Infrahub information panel
|
|
473
479
|
infrahub_info = Table(show_header=False, box=None)
|
|
474
480
|
if info["user_info"]:
|
|
475
481
|
infrahub_info.add_row("User:", info["user_info"]["AccountProfile"]["display_label"])
|
|
@@ -487,6 +493,8 @@ def info( # noqa: PLR0915
|
|
|
487
493
|
infrahub_info.add_row("Groups:", "")
|
|
488
494
|
for group, roles in groups.items():
|
|
489
495
|
infrahub_info.add_row("", group, ", ".join(roles))
|
|
496
|
+
else:
|
|
497
|
+
infrahub_info.add_row("User:", "anonymous")
|
|
490
498
|
|
|
491
499
|
layout["infrahub_info"].update(Panel(infrahub_info, title="Infrahub Info"))
|
|
492
500
|
|
infrahub_sdk/ctl/config.py
CHANGED
|
@@ -2,16 +2,22 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import sys
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
|
|
7
|
-
import toml
|
|
8
8
|
import typer
|
|
9
9
|
from pydantic import Field, ValidationError, field_validator
|
|
10
10
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
11
11
|
|
|
12
|
+
if sys.version_info >= (3, 11):
|
|
13
|
+
import tomllib
|
|
14
|
+
else:
|
|
15
|
+
import tomli as tomllib
|
|
16
|
+
|
|
12
17
|
DEFAULT_CONFIG_FILE = "infrahubctl.toml"
|
|
13
18
|
ENVVAR_CONFIG_FILE = "INFRAHUBCTL_CONFIG"
|
|
14
19
|
INFRAHUB_REPO_CONFIG_FILE = ".infrahub.yml"
|
|
20
|
+
INFRAHUB_REPO_CONFIG_FILE_ALT = ".infrahub.yaml"
|
|
15
21
|
|
|
16
22
|
|
|
17
23
|
class Settings(BaseSettings):
|
|
@@ -59,7 +65,7 @@ class ConfiguredSettings:
|
|
|
59
65
|
|
|
60
66
|
if config_file.is_file():
|
|
61
67
|
config_string = config_file.read_text(encoding="utf-8")
|
|
62
|
-
config_tmp =
|
|
68
|
+
config_tmp = tomllib.loads(config_string)
|
|
63
69
|
|
|
64
70
|
self._settings = Settings(**config_tmp)
|
|
65
71
|
return
|
infrahub_sdk/ctl/generator.py
CHANGED
|
@@ -6,9 +6,8 @@ from typing import TYPE_CHECKING, Optional
|
|
|
6
6
|
import typer
|
|
7
7
|
from rich.console import Console
|
|
8
8
|
|
|
9
|
-
from ..ctl import config
|
|
10
9
|
from ..ctl.client import initialize_client
|
|
11
|
-
from ..ctl.repository import get_repository_config
|
|
10
|
+
from ..ctl.repository import find_repository_config_file, get_repository_config
|
|
12
11
|
from ..ctl.utils import execute_graphql_query, init_logging, parse_cli_vars
|
|
13
12
|
from ..exceptions import ModuleImportError
|
|
14
13
|
from ..node import InfrahubNode
|
|
@@ -26,7 +25,7 @@ async def run(
|
|
|
26
25
|
variables: Optional[list[str]] = None,
|
|
27
26
|
) -> None:
|
|
28
27
|
init_logging(debug=debug)
|
|
29
|
-
repository_config = get_repository_config(
|
|
28
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
30
29
|
|
|
31
30
|
if list_available or not generator_name:
|
|
32
31
|
list_generators(repository_config=repository_config)
|
|
@@ -65,6 +64,8 @@ async def run(
|
|
|
65
64
|
branch=branch or "",
|
|
66
65
|
params=variables_dict,
|
|
67
66
|
convert_query_response=generator_config.convert_query_response,
|
|
67
|
+
execute_in_proposed_change=generator_config.execute_in_proposed_change,
|
|
68
|
+
execute_after_merge=generator_config.execute_after_merge,
|
|
68
69
|
infrahub_node=InfrahubNode,
|
|
69
70
|
)
|
|
70
71
|
await generator._init_client.schema.all(branch=generator.branch_name)
|
|
@@ -94,6 +95,8 @@ async def run(
|
|
|
94
95
|
branch=branch or "",
|
|
95
96
|
params=params,
|
|
96
97
|
convert_query_response=generator_config.convert_query_response,
|
|
98
|
+
execute_in_proposed_change=generator_config.execute_in_proposed_change,
|
|
99
|
+
execute_after_merge=generator_config.execute_after_merge,
|
|
97
100
|
infrahub_node=InfrahubNode,
|
|
98
101
|
)
|
|
99
102
|
data = execute_graphql_query(
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from ariadne_codegen.client_generators.package import PackageGenerator, get_package_generator
|
|
10
|
+
from ariadne_codegen.exceptions import ParsingError
|
|
11
|
+
from ariadne_codegen.plugins.explorer import get_plugins_types
|
|
12
|
+
from ariadne_codegen.plugins.manager import PluginManager
|
|
13
|
+
from ariadne_codegen.schema import (
|
|
14
|
+
filter_fragments_definitions,
|
|
15
|
+
filter_operations_definitions,
|
|
16
|
+
get_graphql_schema_from_path,
|
|
17
|
+
)
|
|
18
|
+
from ariadne_codegen.settings import ClientSettings, CommentsStrategy
|
|
19
|
+
from ariadne_codegen.utils import ast_to_str
|
|
20
|
+
from graphql import DefinitionNode, GraphQLSchema, NoUnusedFragmentsRule, parse, specified_rules, validate
|
|
21
|
+
from rich.console import Console
|
|
22
|
+
|
|
23
|
+
from ..async_typer import AsyncTyper
|
|
24
|
+
from ..ctl.client import initialize_client
|
|
25
|
+
from ..ctl.utils import catch_exception
|
|
26
|
+
from ..graphql.utils import insert_fragments_inline, remove_fragment_import
|
|
27
|
+
from .parameters import CONFIG_PARAM
|
|
28
|
+
|
|
29
|
+
app = AsyncTyper()
|
|
30
|
+
console = Console()
|
|
31
|
+
|
|
32
|
+
ARIADNE_PLUGINS = [
|
|
33
|
+
"infrahub_sdk.graphql.plugin.PydanticBaseModelPlugin",
|
|
34
|
+
"infrahub_sdk.graphql.plugin.FutureAnnotationPlugin",
|
|
35
|
+
"infrahub_sdk.graphql.plugin.StandardTypeHintPlugin",
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def find_gql_files(query_path: Path) -> list[Path]:
|
|
40
|
+
"""
|
|
41
|
+
Find all files with .gql extension in the specified directory.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
query_path: Path to the directory to search for .gql files
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
List of Path objects for all .gql files found
|
|
48
|
+
"""
|
|
49
|
+
if not query_path.exists():
|
|
50
|
+
raise FileNotFoundError(f"File or directory not found: {query_path}")
|
|
51
|
+
|
|
52
|
+
if not query_path.is_dir() and query_path.is_file():
|
|
53
|
+
return [query_path]
|
|
54
|
+
|
|
55
|
+
return list(query_path.glob("**/*.gql"))
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_graphql_query(queries_path: Path, schema: GraphQLSchema) -> tuple[DefinitionNode, ...]:
|
|
59
|
+
"""Get GraphQL queries definitions from a single GraphQL file."""
|
|
60
|
+
|
|
61
|
+
if not queries_path.exists():
|
|
62
|
+
raise FileNotFoundError(f"File not found: {queries_path}")
|
|
63
|
+
if not queries_path.is_file():
|
|
64
|
+
raise ValueError(f"{queries_path} is not a file")
|
|
65
|
+
|
|
66
|
+
queries_str = queries_path.read_text(encoding="utf-8")
|
|
67
|
+
queries_ast = parse(queries_str)
|
|
68
|
+
validation_errors = validate(
|
|
69
|
+
schema=schema,
|
|
70
|
+
document_ast=queries_ast,
|
|
71
|
+
rules=[r for r in specified_rules if r is not NoUnusedFragmentsRule],
|
|
72
|
+
)
|
|
73
|
+
if validation_errors:
|
|
74
|
+
raise ValueError("\n\n".join(error.message for error in validation_errors))
|
|
75
|
+
return queries_ast.definitions
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def generate_result_types(directory: Path, package: PackageGenerator, fragment: ast.Module) -> None:
|
|
79
|
+
for file_name, module in package._result_types_files.items():
|
|
80
|
+
file_path = directory / file_name
|
|
81
|
+
|
|
82
|
+
insert_fragments_inline(module, fragment)
|
|
83
|
+
remove_fragment_import(module)
|
|
84
|
+
|
|
85
|
+
code = package._add_comments_to_code(ast_to_str(module), package.queries_source)
|
|
86
|
+
if package.plugin_manager:
|
|
87
|
+
code = package.plugin_manager.generate_result_types_code(code)
|
|
88
|
+
file_path.write_text(code)
|
|
89
|
+
package._generated_files.append(file_path.name)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@app.callback()
|
|
93
|
+
def callback() -> None:
|
|
94
|
+
"""
|
|
95
|
+
Various GraphQL related commands.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@app.command()
|
|
100
|
+
@catch_exception(console=console)
|
|
101
|
+
async def export_schema(
|
|
102
|
+
destination: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
|
|
103
|
+
_: str = CONFIG_PARAM,
|
|
104
|
+
) -> None:
|
|
105
|
+
"""Export the GraphQL schema to a file."""
|
|
106
|
+
|
|
107
|
+
client = initialize_client()
|
|
108
|
+
schema_text = await client.schema.get_graphql_schema()
|
|
109
|
+
|
|
110
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
111
|
+
destination.write_text(schema_text)
|
|
112
|
+
console.print(f"[green]Schema exported to {destination}")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@app.command()
|
|
116
|
+
@catch_exception(console=console)
|
|
117
|
+
async def generate_return_types(
|
|
118
|
+
query: Optional[Path] = typer.Argument(
|
|
119
|
+
None, help="Location of the GraphQL query file(s). Defaults to current directory if not specified."
|
|
120
|
+
),
|
|
121
|
+
schema: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
|
|
122
|
+
_: str = CONFIG_PARAM,
|
|
123
|
+
) -> None:
|
|
124
|
+
"""Create Pydantic Models for GraphQL query return types"""
|
|
125
|
+
|
|
126
|
+
query = Path.cwd() if query is None else query
|
|
127
|
+
|
|
128
|
+
# Load the GraphQL schema
|
|
129
|
+
if not schema.exists():
|
|
130
|
+
raise FileNotFoundError(f"GraphQL Schema file not found: {schema}")
|
|
131
|
+
graphql_schema = get_graphql_schema_from_path(schema_path=str(schema))
|
|
132
|
+
|
|
133
|
+
# Initialize the plugin manager
|
|
134
|
+
plugin_manager = PluginManager(
|
|
135
|
+
schema=graphql_schema,
|
|
136
|
+
plugins_types=get_plugins_types(plugins_strs=ARIADNE_PLUGINS),
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Find the GraphQL files and organize them by directory
|
|
140
|
+
gql_files = find_gql_files(query)
|
|
141
|
+
gql_per_directory: dict[Path, list[Path]] = defaultdict(list)
|
|
142
|
+
for gql_file in gql_files:
|
|
143
|
+
gql_per_directory[gql_file.parent].append(gql_file)
|
|
144
|
+
|
|
145
|
+
# Generate the Pydantic Models for the GraphQL queries
|
|
146
|
+
for directory, gql_files in gql_per_directory.items():
|
|
147
|
+
for gql_file in gql_files:
|
|
148
|
+
try:
|
|
149
|
+
definitions = get_graphql_query(queries_path=gql_file, schema=graphql_schema)
|
|
150
|
+
except ValueError as exc:
|
|
151
|
+
console.print(f"[red]Error generating result types for {gql_file}: {exc}")
|
|
152
|
+
continue
|
|
153
|
+
queries = filter_operations_definitions(definitions)
|
|
154
|
+
fragments = filter_fragments_definitions(definitions)
|
|
155
|
+
|
|
156
|
+
package_generator = get_package_generator(
|
|
157
|
+
schema=graphql_schema,
|
|
158
|
+
fragments=fragments,
|
|
159
|
+
settings=ClientSettings(
|
|
160
|
+
schema_path=str(schema),
|
|
161
|
+
target_package_name=directory.name,
|
|
162
|
+
queries_path=str(directory),
|
|
163
|
+
include_comments=CommentsStrategy.NONE,
|
|
164
|
+
),
|
|
165
|
+
plugin_manager=plugin_manager,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
parsing_failed = False
|
|
169
|
+
try:
|
|
170
|
+
for query_operation in queries:
|
|
171
|
+
package_generator.add_operation(query_operation)
|
|
172
|
+
except ParsingError as exc:
|
|
173
|
+
console.print(f"[red]Unable to process {gql_file.name}: {exc}")
|
|
174
|
+
parsing_failed = True
|
|
175
|
+
|
|
176
|
+
if parsing_failed:
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
module_fragment = package_generator.fragments_generator.generate()
|
|
180
|
+
|
|
181
|
+
generate_result_types(directory=directory, package=package_generator, fragment=module_fragment)
|
|
182
|
+
|
|
183
|
+
for file_name in package_generator._result_types_files.keys():
|
|
184
|
+
console.print(f"[green]Generated {file_name} in {directory}")
|
infrahub_sdk/ctl/repository.py
CHANGED
|
@@ -24,11 +24,49 @@ app = AsyncTyper()
|
|
|
24
24
|
console = Console()
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
def find_repository_config_file(base_path: Path | None = None) -> Path:
|
|
28
|
+
"""Find the repository config file, checking for both .yml and .yaml extensions.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
base_path: Base directory to search in. If None, uses current directory.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Path to the config file.
|
|
35
|
+
|
|
36
|
+
Raises:
|
|
37
|
+
FileNotFoundError: If neither .infrahub.yml nor .infrahub.yaml exists.
|
|
38
|
+
"""
|
|
39
|
+
if base_path is None:
|
|
40
|
+
base_path = Path()
|
|
41
|
+
|
|
42
|
+
yml_path = base_path / ".infrahub.yml"
|
|
43
|
+
yaml_path = base_path / ".infrahub.yaml"
|
|
44
|
+
|
|
45
|
+
# Prefer .yml if both exist
|
|
46
|
+
if yml_path.exists():
|
|
47
|
+
return yml_path
|
|
48
|
+
if yaml_path.exists():
|
|
49
|
+
return yaml_path
|
|
50
|
+
# For backward compatibility, return .yml path for error messages
|
|
51
|
+
return yml_path
|
|
52
|
+
|
|
53
|
+
|
|
27
54
|
def get_repository_config(repo_config_file: Path) -> InfrahubRepositoryConfig:
|
|
55
|
+
# If the file doesn't exist, try to find it with alternate extension
|
|
56
|
+
if not repo_config_file.exists():
|
|
57
|
+
if repo_config_file.name == ".infrahub.yml":
|
|
58
|
+
alt_path = repo_config_file.parent / ".infrahub.yaml"
|
|
59
|
+
if alt_path.exists():
|
|
60
|
+
repo_config_file = alt_path
|
|
61
|
+
elif repo_config_file.name == ".infrahub.yaml":
|
|
62
|
+
alt_path = repo_config_file.parent / ".infrahub.yml"
|
|
63
|
+
if alt_path.exists():
|
|
64
|
+
repo_config_file = alt_path
|
|
65
|
+
|
|
28
66
|
try:
|
|
29
67
|
config_file_data = load_repository_config_file(repo_config_file)
|
|
30
68
|
except FileNotFoundError as exc:
|
|
31
|
-
console.print(f"[red]File not found {exc}")
|
|
69
|
+
console.print(f"[red]File not found {exc} (also checked for .infrahub.yml and .infrahub.yaml)")
|
|
32
70
|
raise typer.Exit(1) from exc
|
|
33
71
|
except FileNotValidError as exc:
|
|
34
72
|
console.print(f"[red]{exc.message}")
|
infrahub_sdk/ctl/schema.py
CHANGED
|
@@ -14,6 +14,7 @@ from ..async_typer import AsyncTyper
|
|
|
14
14
|
from ..ctl.client import initialize_client
|
|
15
15
|
from ..ctl.utils import catch_exception, init_logging
|
|
16
16
|
from ..queries import SCHEMA_HASH_SYNC_STATUS
|
|
17
|
+
from ..schema import SchemaWarning
|
|
17
18
|
from ..yaml import SchemaFile
|
|
18
19
|
from .parameters import CONFIG_PARAM
|
|
19
20
|
from .utils import load_yamlfile_from_disk_and_exit
|
|
@@ -73,14 +74,29 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche
|
|
|
73
74
|
loc_type = loc_path[-1]
|
|
74
75
|
input_str = error.get("input", None)
|
|
75
76
|
error_message = f"{loc_type} ({input_str}) | {error['msg']} ({error['type']})"
|
|
76
|
-
console.print(
|
|
77
|
+
console.print(
|
|
78
|
+
f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
|
|
79
|
+
)
|
|
77
80
|
|
|
78
81
|
elif len(loc_path) > 6:
|
|
79
82
|
loc_type = loc_path[5]
|
|
80
|
-
|
|
83
|
+
error_data = node[loc_type]
|
|
84
|
+
attribute = loc_path[6]
|
|
85
|
+
|
|
86
|
+
if isinstance(attribute, str):
|
|
87
|
+
input_label = None
|
|
88
|
+
for data in error_data:
|
|
89
|
+
if data.get(attribute) is not None:
|
|
90
|
+
input_label = data.get("name", None)
|
|
91
|
+
break
|
|
92
|
+
else:
|
|
93
|
+
input_label = error_data[attribute].get("name", None)
|
|
94
|
+
|
|
81
95
|
input_str = error.get("input", None)
|
|
82
96
|
error_message = f"{loc_type[:-1].title()}: {input_label} ({input_str}) | {error['msg']} ({error['type']})"
|
|
83
|
-
console.print(
|
|
97
|
+
console.print(
|
|
98
|
+
f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
|
|
99
|
+
)
|
|
84
100
|
|
|
85
101
|
|
|
86
102
|
def handle_non_detail_errors(response: dict[str, Any]) -> None:
|
|
@@ -137,6 +153,8 @@ async def load(
|
|
|
137
153
|
|
|
138
154
|
console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.")
|
|
139
155
|
|
|
156
|
+
_display_schema_warnings(console=console, warnings=response.warnings)
|
|
157
|
+
|
|
140
158
|
if response.schema_updated and wait:
|
|
141
159
|
waited = 0
|
|
142
160
|
continue_waiting = True
|
|
@@ -172,12 +190,24 @@ async def check(
|
|
|
172
190
|
|
|
173
191
|
success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch)
|
|
174
192
|
|
|
175
|
-
if not success:
|
|
193
|
+
if not success or not response:
|
|
176
194
|
display_schema_load_errors(response=response or {}, schemas_data=schemas_data)
|
|
195
|
+
return
|
|
196
|
+
|
|
197
|
+
for schema_file in schemas_data:
|
|
198
|
+
console.print(f"[green] schema '{schema_file.location}' is Valid!")
|
|
199
|
+
|
|
200
|
+
warnings = response.pop("warnings", [])
|
|
201
|
+
schema_warnings = [SchemaWarning.model_validate(warning) for warning in warnings]
|
|
202
|
+
_display_schema_warnings(console=console, warnings=schema_warnings)
|
|
203
|
+
if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
|
|
204
|
+
print("No diff")
|
|
177
205
|
else:
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
206
|
+
print(yaml.safe_dump(data=response, indent=4))
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def _display_schema_warnings(console: Console, warnings: list[SchemaWarning]) -> None:
|
|
210
|
+
for warning in warnings:
|
|
211
|
+
console.print(
|
|
212
|
+
f"[yellow] {warning.type.value}: {warning.message} [{', '.join([kind.display for kind in warning.kinds])}]"
|
|
213
|
+
)
|