infrahub-server 1.4.10__py3-none-any.whl → 1.5.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +3 -0
- infrahub/auth.py +5 -5
- infrahub/cli/db.py +26 -2
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +25 -22
- infrahub/core/branch/models.py +2 -2
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +4 -3
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +2 -2
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +9 -84
- infrahub/core/migrations/graph/__init__.py +6 -0
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +166 -0
- infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
- infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
- infrahub/core/migrations/schema/node_attribute_add.py +5 -2
- infrahub/core/migrations/shared.py +5 -6
- infrahub/core/node/__init__.py +165 -42
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/create.py +67 -35
- infrahub/core/node/lock_utils.py +98 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +27 -15
- infrahub/core/query/node.py +61 -185
- infrahub/core/query/relationship.py +43 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +60 -20
- infrahub/core/schema/attribute_schema.py +0 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/internal.py +14 -1
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +137 -2
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +186 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +38 -1
- infrahub/git/integrator.py +22 -14
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +2 -2
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +212 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +11 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +34 -13
- infrahub/graphql/mutations/display_label.py +111 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +118 -0
- infrahub/graphql/mutations/ipam.py +21 -8
- infrahub/graphql/mutations/main.py +37 -153
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +2 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +185 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +67 -30
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/middleware.py +26 -1
- infrahub/patch/plan_writer.py +2 -2
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +99 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +10 -1
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +16 -3
- infrahub/services/__init__.py +8 -5
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/tasks.py +3 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workflows/catalogue.py +110 -3
- infrahub/workflows/initialization.py +16 -0
- infrahub/workflows/models.py +17 -2
- infrahub_sdk/branch.py +5 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +364 -84
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +18 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +18 -3
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +38 -0
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/object.py +120 -7
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +9 -11
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +177 -134
- infrahub_testcontainers/container.py +1 -1
- infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, model_validator
|
|
6
|
+
|
|
7
|
+
CONVERT_OBJECT_MUTATION = """
|
|
8
|
+
mutation($node_id: String!, $target_kind: String!, $fields_mapping: GenericScalar!) {
|
|
9
|
+
ConvertObjectType(data: {
|
|
10
|
+
node_id: $node_id,
|
|
11
|
+
target_kind: $target_kind,
|
|
12
|
+
fields_mapping: $fields_mapping
|
|
13
|
+
}) {
|
|
14
|
+
ok
|
|
15
|
+
node
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ConversionFieldValue(BaseModel): # Only one of these fields can be not None
|
|
22
|
+
"""
|
|
23
|
+
Holds the new value of the destination field during an object conversion.
|
|
24
|
+
Use `attribute_value` to specify the new raw value of an attribute.
|
|
25
|
+
Use `peer_id` to specify new peer of a cardinality one relationship.
|
|
26
|
+
Use `peers_ids` to specify new peers of a cardinality many relationship.
|
|
27
|
+
Only one of `attribute_value`, `peer_id` and `peers_ids` can be specified.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
attribute_value: Any | None = None
|
|
31
|
+
peer_id: str | None = None
|
|
32
|
+
peers_ids: list[str] | None = None
|
|
33
|
+
|
|
34
|
+
@model_validator(mode="after")
|
|
35
|
+
def check_only_one_field(self) -> ConversionFieldValue:
|
|
36
|
+
fields = [self.attribute_value, self.peer_id, self.peers_ids]
|
|
37
|
+
set_fields = [f for f in fields if f is not None]
|
|
38
|
+
if len(set_fields) != 1:
|
|
39
|
+
raise ValueError("Exactly one of `attribute_value`, `peer_id`, or `peers_ids` must be set")
|
|
40
|
+
return self
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class ConversionFieldInput(BaseModel):
|
|
44
|
+
"""
|
|
45
|
+
Indicates how to fill in the value of the destination field during an object conversion.
|
|
46
|
+
Use `source_field` to reuse the value of the corresponding field of the object being converted.
|
|
47
|
+
Use `data` to specify the new value for the field.
|
|
48
|
+
Use `use_default_value` to set the destination field to its schema default.
|
|
49
|
+
Only one of `source_field`, `data`, or `use_default_value` can be specified.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
source_field: str | None = None
|
|
53
|
+
data: ConversionFieldValue | None = None
|
|
54
|
+
use_default_value: bool = False
|
|
55
|
+
|
|
56
|
+
@model_validator(mode="after")
|
|
57
|
+
def check_only_one_field(self) -> ConversionFieldInput:
|
|
58
|
+
fields_set = [self.source_field is not None, self.data is not None, self.use_default_value is True]
|
|
59
|
+
if sum(fields_set) != 1:
|
|
60
|
+
raise ValueError("Exactly one of `source_field`, `data` or `use_default_value` must be set")
|
|
61
|
+
return self
|
infrahub_sdk/ctl/check.py
CHANGED
|
@@ -11,10 +11,9 @@ import typer
|
|
|
11
11
|
from rich.console import Console
|
|
12
12
|
from rich.logging import RichHandler
|
|
13
13
|
|
|
14
|
-
from ..ctl import config
|
|
15
14
|
from ..ctl.client import initialize_client
|
|
16
15
|
from ..ctl.exceptions import QueryNotFoundError
|
|
17
|
-
from ..ctl.repository import get_repository_config
|
|
16
|
+
from ..ctl.repository import find_repository_config_file, get_repository_config
|
|
18
17
|
from ..ctl.utils import catch_exception, execute_graphql_query
|
|
19
18
|
from ..exceptions import ModuleImportError
|
|
20
19
|
|
|
@@ -59,7 +58,7 @@ def run(
|
|
|
59
58
|
FORMAT = "%(message)s"
|
|
60
59
|
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
|
|
61
60
|
|
|
62
|
-
repository_config = get_repository_config(
|
|
61
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
63
62
|
|
|
64
63
|
if list_available:
|
|
65
64
|
list_checks(repository_config=repository_config)
|
infrahub_sdk/ctl/cli_commands.py
CHANGED
|
@@ -20,17 +20,17 @@ from rich.table import Table
|
|
|
20
20
|
|
|
21
21
|
from .. import __version__ as sdk_version
|
|
22
22
|
from ..async_typer import AsyncTyper
|
|
23
|
-
from ..ctl import config
|
|
24
23
|
from ..ctl.branch import app as branch_app
|
|
25
24
|
from ..ctl.check import run as run_check
|
|
26
25
|
from ..ctl.client import initialize_client, initialize_client_sync
|
|
27
26
|
from ..ctl.exceptions import QueryNotFoundError
|
|
28
27
|
from ..ctl.generator import run as run_generator
|
|
28
|
+
from ..ctl.graphql import app as graphql_app
|
|
29
29
|
from ..ctl.menu import app as menu_app
|
|
30
30
|
from ..ctl.object import app as object_app
|
|
31
31
|
from ..ctl.render import list_jinja2_transforms, print_template_errors
|
|
32
32
|
from ..ctl.repository import app as repository_app
|
|
33
|
-
from ..ctl.repository import get_repository_config
|
|
33
|
+
from ..ctl.repository import find_repository_config_file, get_repository_config
|
|
34
34
|
from ..ctl.schema import app as schema_app
|
|
35
35
|
from ..ctl.transform import list_transforms
|
|
36
36
|
from ..ctl.utils import (
|
|
@@ -46,7 +46,7 @@ from ..protocols_generator.generator import CodeGenerator
|
|
|
46
46
|
from ..schema import MainSchemaTypesAll, SchemaRoot
|
|
47
47
|
from ..template import Jinja2Template
|
|
48
48
|
from ..template.exceptions import JinjaTemplateError
|
|
49
|
-
from ..utils import
|
|
49
|
+
from ..utils import write_to_file
|
|
50
50
|
from ..yaml import SchemaFile
|
|
51
51
|
from .exporter import dump
|
|
52
52
|
from .importer import load
|
|
@@ -63,6 +63,7 @@ app.add_typer(validate_app, name="validate")
|
|
|
63
63
|
app.add_typer(repository_app, name="repository")
|
|
64
64
|
app.add_typer(menu_app, name="menu")
|
|
65
65
|
app.add_typer(object_app, name="object")
|
|
66
|
+
app.add_typer(graphql_app, name="graphql")
|
|
66
67
|
|
|
67
68
|
app.command(name="dump")(dump)
|
|
68
69
|
app.command(name="load")(load)
|
|
@@ -208,7 +209,6 @@ async def _run_transform(
|
|
|
208
209
|
debug: Prints debug info to the command line
|
|
209
210
|
repository_config: Repository config object. This is used to load the graphql query from the repository.
|
|
210
211
|
"""
|
|
211
|
-
branch = get_branch(branch)
|
|
212
212
|
|
|
213
213
|
try:
|
|
214
214
|
response = execute_graphql_query(
|
|
@@ -260,7 +260,7 @@ async def render(
|
|
|
260
260
|
"""Render a local Jinja2 Transform for debugging purpose."""
|
|
261
261
|
|
|
262
262
|
variables_dict = parse_cli_vars(variables)
|
|
263
|
-
repository_config = get_repository_config(
|
|
263
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
264
264
|
|
|
265
265
|
if list_available or not transform_name:
|
|
266
266
|
list_jinja2_transforms(config=repository_config)
|
|
@@ -270,7 +270,7 @@ async def render(
|
|
|
270
270
|
try:
|
|
271
271
|
transform_config = repository_config.get_jinja2_transform(name=transform_name)
|
|
272
272
|
except KeyError as exc:
|
|
273
|
-
console.print(f'[red]Unable to find "{transform_name}" in
|
|
273
|
+
console.print(f'[red]Unable to find "{transform_name}" in repository config file')
|
|
274
274
|
list_jinja2_transforms(config=repository_config)
|
|
275
275
|
raise typer.Exit(1) from exc
|
|
276
276
|
|
|
@@ -310,7 +310,7 @@ def transform(
|
|
|
310
310
|
"""Render a local transform (TransformPython) for debugging purpose."""
|
|
311
311
|
|
|
312
312
|
variables_dict = parse_cli_vars(variables)
|
|
313
|
-
repository_config = get_repository_config(
|
|
313
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
314
314
|
|
|
315
315
|
if list_available or not transform_name:
|
|
316
316
|
list_transforms(config=repository_config)
|
|
@@ -409,7 +409,6 @@ def info( # noqa: PLR0915
|
|
|
409
409
|
_: str = CONFIG_PARAM,
|
|
410
410
|
) -> None:
|
|
411
411
|
"""Display the status of the Python SDK."""
|
|
412
|
-
|
|
413
412
|
info: dict[str, Any] = {
|
|
414
413
|
"error": None,
|
|
415
414
|
"status": ":x:",
|
|
@@ -417,12 +416,17 @@ def info( # noqa: PLR0915
|
|
|
417
416
|
"user_info": {},
|
|
418
417
|
"groups": {},
|
|
419
418
|
}
|
|
419
|
+
client = initialize_client_sync()
|
|
420
|
+
fetch_user_details = bool(client.config.username) or bool(client.config.api_token)
|
|
421
|
+
|
|
420
422
|
try:
|
|
421
|
-
client = initialize_client_sync()
|
|
422
423
|
info["infrahub_version"] = client.get_version()
|
|
423
|
-
|
|
424
|
+
|
|
425
|
+
if fetch_user_details:
|
|
426
|
+
info["user_info"] = client.get_user()
|
|
427
|
+
info["groups"] = client.get_user_permissions()
|
|
428
|
+
|
|
424
429
|
info["status"] = ":white_heavy_check_mark:"
|
|
425
|
-
info["groups"] = client.get_user_permissions()
|
|
426
430
|
except Exception as e:
|
|
427
431
|
info["error"] = f"{e!s} ({e.__class__.__name__})"
|
|
428
432
|
|
|
@@ -469,7 +473,7 @@ def info( # noqa: PLR0915
|
|
|
469
473
|
pretty_model = Pretty(client.config.model_dump(), expand_all=True)
|
|
470
474
|
layout["client_info"].update(Panel(pretty_model, title="Client Info"))
|
|
471
475
|
|
|
472
|
-
# Infrahub information
|
|
476
|
+
# Infrahub information panel
|
|
473
477
|
infrahub_info = Table(show_header=False, box=None)
|
|
474
478
|
if info["user_info"]:
|
|
475
479
|
infrahub_info.add_row("User:", info["user_info"]["AccountProfile"]["display_label"])
|
|
@@ -487,6 +491,8 @@ def info( # noqa: PLR0915
|
|
|
487
491
|
infrahub_info.add_row("Groups:", "")
|
|
488
492
|
for group, roles in groups.items():
|
|
489
493
|
infrahub_info.add_row("", group, ", ".join(roles))
|
|
494
|
+
else:
|
|
495
|
+
infrahub_info.add_row("User:", "anonymous")
|
|
490
496
|
|
|
491
497
|
layout["infrahub_info"].update(Panel(infrahub_info, title="Infrahub Info"))
|
|
492
498
|
|
infrahub_sdk/ctl/config.py
CHANGED
|
@@ -2,16 +2,22 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
import sys
|
|
5
6
|
from pathlib import Path
|
|
6
7
|
|
|
7
|
-
import toml
|
|
8
8
|
import typer
|
|
9
9
|
from pydantic import Field, ValidationError, field_validator
|
|
10
10
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
11
11
|
|
|
12
|
+
if sys.version_info >= (3, 11):
|
|
13
|
+
import tomllib
|
|
14
|
+
else:
|
|
15
|
+
import tomli as tomllib
|
|
16
|
+
|
|
12
17
|
DEFAULT_CONFIG_FILE = "infrahubctl.toml"
|
|
13
18
|
ENVVAR_CONFIG_FILE = "INFRAHUBCTL_CONFIG"
|
|
14
19
|
INFRAHUB_REPO_CONFIG_FILE = ".infrahub.yml"
|
|
20
|
+
INFRAHUB_REPO_CONFIG_FILE_ALT = ".infrahub.yaml"
|
|
15
21
|
|
|
16
22
|
|
|
17
23
|
class Settings(BaseSettings):
|
|
@@ -59,7 +65,7 @@ class ConfiguredSettings:
|
|
|
59
65
|
|
|
60
66
|
if config_file.is_file():
|
|
61
67
|
config_string = config_file.read_text(encoding="utf-8")
|
|
62
|
-
config_tmp =
|
|
68
|
+
config_tmp = tomllib.loads(config_string)
|
|
63
69
|
|
|
64
70
|
self._settings = Settings(**config_tmp)
|
|
65
71
|
return
|
infrahub_sdk/ctl/generator.py
CHANGED
|
@@ -6,9 +6,8 @@ from typing import TYPE_CHECKING, Optional
|
|
|
6
6
|
import typer
|
|
7
7
|
from rich.console import Console
|
|
8
8
|
|
|
9
|
-
from ..ctl import config
|
|
10
9
|
from ..ctl.client import initialize_client
|
|
11
|
-
from ..ctl.repository import get_repository_config
|
|
10
|
+
from ..ctl.repository import find_repository_config_file, get_repository_config
|
|
12
11
|
from ..ctl.utils import execute_graphql_query, init_logging, parse_cli_vars
|
|
13
12
|
from ..exceptions import ModuleImportError
|
|
14
13
|
from ..node import InfrahubNode
|
|
@@ -26,7 +25,7 @@ async def run(
|
|
|
26
25
|
variables: Optional[list[str]] = None,
|
|
27
26
|
) -> None:
|
|
28
27
|
init_logging(debug=debug)
|
|
29
|
-
repository_config = get_repository_config(
|
|
28
|
+
repository_config = get_repository_config(find_repository_config_file())
|
|
30
29
|
|
|
31
30
|
if list_available or not generator_name:
|
|
32
31
|
list_generators(repository_config=repository_config)
|
|
@@ -65,6 +64,8 @@ async def run(
|
|
|
65
64
|
branch=branch or "",
|
|
66
65
|
params=variables_dict,
|
|
67
66
|
convert_query_response=generator_config.convert_query_response,
|
|
67
|
+
execute_in_proposed_change=generator_config.execute_in_proposed_change,
|
|
68
|
+
execute_after_merge=generator_config.execute_after_merge,
|
|
68
69
|
infrahub_node=InfrahubNode,
|
|
69
70
|
)
|
|
70
71
|
await generator._init_client.schema.all(branch=generator.branch_name)
|
|
@@ -94,6 +95,8 @@ async def run(
|
|
|
94
95
|
branch=branch or "",
|
|
95
96
|
params=params,
|
|
96
97
|
convert_query_response=generator_config.convert_query_response,
|
|
98
|
+
execute_in_proposed_change=generator_config.execute_in_proposed_change,
|
|
99
|
+
execute_after_merge=generator_config.execute_after_merge,
|
|
97
100
|
infrahub_node=InfrahubNode,
|
|
98
101
|
)
|
|
99
102
|
data = execute_graphql_query(
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from ariadne_codegen.client_generators.package import PackageGenerator, get_package_generator
|
|
10
|
+
from ariadne_codegen.exceptions import ParsingError
|
|
11
|
+
from ariadne_codegen.plugins.explorer import get_plugins_types
|
|
12
|
+
from ariadne_codegen.plugins.manager import PluginManager
|
|
13
|
+
from ariadne_codegen.schema import (
|
|
14
|
+
filter_fragments_definitions,
|
|
15
|
+
filter_operations_definitions,
|
|
16
|
+
get_graphql_schema_from_path,
|
|
17
|
+
)
|
|
18
|
+
from ariadne_codegen.settings import ClientSettings, CommentsStrategy
|
|
19
|
+
from ariadne_codegen.utils import ast_to_str
|
|
20
|
+
from graphql import DefinitionNode, GraphQLSchema, NoUnusedFragmentsRule, parse, specified_rules, validate
|
|
21
|
+
from rich.console import Console
|
|
22
|
+
|
|
23
|
+
from ..async_typer import AsyncTyper
|
|
24
|
+
from ..ctl.client import initialize_client
|
|
25
|
+
from ..ctl.utils import catch_exception
|
|
26
|
+
from ..graphql.utils import insert_fragments_inline, remove_fragment_import
|
|
27
|
+
from .parameters import CONFIG_PARAM
|
|
28
|
+
|
|
29
|
+
app = AsyncTyper()
|
|
30
|
+
console = Console()
|
|
31
|
+
|
|
32
|
+
ARIADNE_PLUGINS = [
|
|
33
|
+
"infrahub_sdk.graphql.plugin.PydanticBaseModelPlugin",
|
|
34
|
+
"infrahub_sdk.graphql.plugin.FutureAnnotationPlugin",
|
|
35
|
+
"infrahub_sdk.graphql.plugin.StandardTypeHintPlugin",
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def find_gql_files(query_path: Path) -> list[Path]:
|
|
40
|
+
"""
|
|
41
|
+
Find all files with .gql extension in the specified directory.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
query_path: Path to the directory to search for .gql files
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
List of Path objects for all .gql files found
|
|
48
|
+
"""
|
|
49
|
+
if not query_path.exists():
|
|
50
|
+
raise FileNotFoundError(f"File or directory not found: {query_path}")
|
|
51
|
+
|
|
52
|
+
if not query_path.is_dir() and query_path.is_file():
|
|
53
|
+
return [query_path]
|
|
54
|
+
|
|
55
|
+
return list(query_path.glob("**/*.gql"))
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_graphql_query(queries_path: Path, schema: GraphQLSchema) -> tuple[DefinitionNode, ...]:
|
|
59
|
+
"""Get GraphQL queries definitions from a single GraphQL file."""
|
|
60
|
+
|
|
61
|
+
if not queries_path.exists():
|
|
62
|
+
raise FileNotFoundError(f"File not found: {queries_path}")
|
|
63
|
+
if not queries_path.is_file():
|
|
64
|
+
raise ValueError(f"{queries_path} is not a file")
|
|
65
|
+
|
|
66
|
+
queries_str = queries_path.read_text(encoding="utf-8")
|
|
67
|
+
queries_ast = parse(queries_str)
|
|
68
|
+
validation_errors = validate(
|
|
69
|
+
schema=schema,
|
|
70
|
+
document_ast=queries_ast,
|
|
71
|
+
rules=[r for r in specified_rules if r is not NoUnusedFragmentsRule],
|
|
72
|
+
)
|
|
73
|
+
if validation_errors:
|
|
74
|
+
raise ValueError("\n\n".join(error.message for error in validation_errors))
|
|
75
|
+
return queries_ast.definitions
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def generate_result_types(directory: Path, package: PackageGenerator, fragment: ast.Module) -> None:
|
|
79
|
+
for file_name, module in package._result_types_files.items():
|
|
80
|
+
file_path = directory / file_name
|
|
81
|
+
|
|
82
|
+
insert_fragments_inline(module, fragment)
|
|
83
|
+
remove_fragment_import(module)
|
|
84
|
+
|
|
85
|
+
code = package._add_comments_to_code(ast_to_str(module), package.queries_source)
|
|
86
|
+
if package.plugin_manager:
|
|
87
|
+
code = package.plugin_manager.generate_result_types_code(code)
|
|
88
|
+
file_path.write_text(code)
|
|
89
|
+
package._generated_files.append(file_path.name)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@app.callback()
|
|
93
|
+
def callback() -> None:
|
|
94
|
+
"""
|
|
95
|
+
Various GraphQL related commands.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@app.command()
|
|
100
|
+
@catch_exception(console=console)
|
|
101
|
+
async def export_schema(
|
|
102
|
+
destination: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
|
|
103
|
+
_: str = CONFIG_PARAM,
|
|
104
|
+
) -> None:
|
|
105
|
+
"""Export the GraphQL schema to a file."""
|
|
106
|
+
|
|
107
|
+
client = initialize_client()
|
|
108
|
+
schema_text = await client.schema.get_graphql_schema()
|
|
109
|
+
|
|
110
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
111
|
+
destination.write_text(schema_text)
|
|
112
|
+
console.print(f"[green]Schema exported to {destination}")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@app.command()
|
|
116
|
+
@catch_exception(console=console)
|
|
117
|
+
async def generate_return_types(
|
|
118
|
+
query: Optional[Path] = typer.Argument(
|
|
119
|
+
None, help="Location of the GraphQL query file(s). Defaults to current directory if not specified."
|
|
120
|
+
),
|
|
121
|
+
schema: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
|
|
122
|
+
_: str = CONFIG_PARAM,
|
|
123
|
+
) -> None:
|
|
124
|
+
"""Create Pydantic Models for GraphQL query return types"""
|
|
125
|
+
|
|
126
|
+
query = Path.cwd() if query is None else query
|
|
127
|
+
|
|
128
|
+
# Load the GraphQL schema
|
|
129
|
+
if not schema.exists():
|
|
130
|
+
raise FileNotFoundError(f"GraphQL Schema file not found: {schema}")
|
|
131
|
+
graphql_schema = get_graphql_schema_from_path(schema_path=str(schema))
|
|
132
|
+
|
|
133
|
+
# Initialize the plugin manager
|
|
134
|
+
plugin_manager = PluginManager(
|
|
135
|
+
schema=graphql_schema,
|
|
136
|
+
plugins_types=get_plugins_types(plugins_strs=ARIADNE_PLUGINS),
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Find the GraphQL files and organize them by directory
|
|
140
|
+
gql_files = find_gql_files(query)
|
|
141
|
+
gql_per_directory: dict[Path, list[Path]] = defaultdict(list)
|
|
142
|
+
for gql_file in gql_files:
|
|
143
|
+
gql_per_directory[gql_file.parent].append(gql_file)
|
|
144
|
+
|
|
145
|
+
# Generate the Pydantic Models for the GraphQL queries
|
|
146
|
+
for directory, gql_files in gql_per_directory.items():
|
|
147
|
+
for gql_file in gql_files:
|
|
148
|
+
try:
|
|
149
|
+
definitions = get_graphql_query(queries_path=gql_file, schema=graphql_schema)
|
|
150
|
+
except ValueError as exc:
|
|
151
|
+
console.print(f"[red]Error generating result types for {gql_file}: {exc}")
|
|
152
|
+
continue
|
|
153
|
+
queries = filter_operations_definitions(definitions)
|
|
154
|
+
fragments = filter_fragments_definitions(definitions)
|
|
155
|
+
|
|
156
|
+
package_generator = get_package_generator(
|
|
157
|
+
schema=graphql_schema,
|
|
158
|
+
fragments=fragments,
|
|
159
|
+
settings=ClientSettings(
|
|
160
|
+
schema_path=str(schema),
|
|
161
|
+
target_package_name=directory.name,
|
|
162
|
+
queries_path=str(directory),
|
|
163
|
+
include_comments=CommentsStrategy.NONE,
|
|
164
|
+
),
|
|
165
|
+
plugin_manager=plugin_manager,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
parsing_failed = False
|
|
169
|
+
try:
|
|
170
|
+
for query_operation in queries:
|
|
171
|
+
package_generator.add_operation(query_operation)
|
|
172
|
+
except ParsingError as exc:
|
|
173
|
+
console.print(f"[red]Unable to process {gql_file.name}: {exc}")
|
|
174
|
+
parsing_failed = True
|
|
175
|
+
|
|
176
|
+
if parsing_failed:
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
module_fragment = package_generator.fragments_generator.generate()
|
|
180
|
+
|
|
181
|
+
generate_result_types(directory=directory, package=package_generator, fragment=module_fragment)
|
|
182
|
+
|
|
183
|
+
for file_name in package_generator._result_types_files.keys():
|
|
184
|
+
console.print(f"[green]Generated {file_name} in {directory}")
|
infrahub_sdk/ctl/repository.py
CHANGED
|
@@ -24,11 +24,49 @@ app = AsyncTyper()
|
|
|
24
24
|
console = Console()
|
|
25
25
|
|
|
26
26
|
|
|
27
|
+
def find_repository_config_file(base_path: Path | None = None) -> Path:
|
|
28
|
+
"""Find the repository config file, checking for both .yml and .yaml extensions.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
base_path: Base directory to search in. If None, uses current directory.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Path to the config file.
|
|
35
|
+
|
|
36
|
+
Raises:
|
|
37
|
+
FileNotFoundError: If neither .infrahub.yml nor .infrahub.yaml exists.
|
|
38
|
+
"""
|
|
39
|
+
if base_path is None:
|
|
40
|
+
base_path = Path()
|
|
41
|
+
|
|
42
|
+
yml_path = base_path / ".infrahub.yml"
|
|
43
|
+
yaml_path = base_path / ".infrahub.yaml"
|
|
44
|
+
|
|
45
|
+
# Prefer .yml if both exist
|
|
46
|
+
if yml_path.exists():
|
|
47
|
+
return yml_path
|
|
48
|
+
if yaml_path.exists():
|
|
49
|
+
return yaml_path
|
|
50
|
+
# For backward compatibility, return .yml path for error messages
|
|
51
|
+
return yml_path
|
|
52
|
+
|
|
53
|
+
|
|
27
54
|
def get_repository_config(repo_config_file: Path) -> InfrahubRepositoryConfig:
|
|
55
|
+
# If the file doesn't exist, try to find it with alternate extension
|
|
56
|
+
if not repo_config_file.exists():
|
|
57
|
+
if repo_config_file.name == ".infrahub.yml":
|
|
58
|
+
alt_path = repo_config_file.parent / ".infrahub.yaml"
|
|
59
|
+
if alt_path.exists():
|
|
60
|
+
repo_config_file = alt_path
|
|
61
|
+
elif repo_config_file.name == ".infrahub.yaml":
|
|
62
|
+
alt_path = repo_config_file.parent / ".infrahub.yml"
|
|
63
|
+
if alt_path.exists():
|
|
64
|
+
repo_config_file = alt_path
|
|
65
|
+
|
|
28
66
|
try:
|
|
29
67
|
config_file_data = load_repository_config_file(repo_config_file)
|
|
30
68
|
except FileNotFoundError as exc:
|
|
31
|
-
console.print(f"[red]File not found {exc}")
|
|
69
|
+
console.print(f"[red]File not found {exc} (also checked for .infrahub.yml and .infrahub.yaml)")
|
|
32
70
|
raise typer.Exit(1) from exc
|
|
33
71
|
except FileNotValidError as exc:
|
|
34
72
|
console.print(f"[red]{exc.message}")
|
infrahub_sdk/ctl/schema.py
CHANGED
|
@@ -73,14 +73,29 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche
|
|
|
73
73
|
loc_type = loc_path[-1]
|
|
74
74
|
input_str = error.get("input", None)
|
|
75
75
|
error_message = f"{loc_type} ({input_str}) | {error['msg']} ({error['type']})"
|
|
76
|
-
console.print(
|
|
76
|
+
console.print(
|
|
77
|
+
f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
|
|
78
|
+
)
|
|
77
79
|
|
|
78
80
|
elif len(loc_path) > 6:
|
|
79
81
|
loc_type = loc_path[5]
|
|
80
|
-
|
|
82
|
+
error_data = node[loc_type]
|
|
83
|
+
attribute = loc_path[6]
|
|
84
|
+
|
|
85
|
+
if isinstance(attribute, str):
|
|
86
|
+
input_label = None
|
|
87
|
+
for data in error_data:
|
|
88
|
+
if data.get(attribute) is not None:
|
|
89
|
+
input_label = data.get("name", None)
|
|
90
|
+
break
|
|
91
|
+
else:
|
|
92
|
+
input_label = error_data[attribute].get("name", None)
|
|
93
|
+
|
|
81
94
|
input_str = error.get("input", None)
|
|
82
95
|
error_message = f"{loc_type[:-1].title()}: {input_label} ({input_str}) | {error['msg']} ({error['type']})"
|
|
83
|
-
console.print(
|
|
96
|
+
console.print(
|
|
97
|
+
f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
|
|
98
|
+
)
|
|
84
99
|
|
|
85
100
|
|
|
86
101
|
def handle_non_detail_errors(response: dict[str, Any]) -> None:
|
infrahub_sdk/ctl/utils.py
CHANGED
|
@@ -118,6 +118,10 @@ def execute_graphql_query(
|
|
|
118
118
|
query_str = query_object.load_query()
|
|
119
119
|
|
|
120
120
|
client = initialize_client_sync()
|
|
121
|
+
|
|
122
|
+
if not branch:
|
|
123
|
+
branch = client.config.default_infrahub_branch
|
|
124
|
+
|
|
121
125
|
response = client.execute_graphql(
|
|
122
126
|
query=query_str,
|
|
123
127
|
branch_name=branch,
|
infrahub_sdk/ctl/validate.py
CHANGED
|
@@ -14,7 +14,7 @@ from ..ctl.client import initialize_client, initialize_client_sync
|
|
|
14
14
|
from ..ctl.exceptions import QueryNotFoundError
|
|
15
15
|
from ..ctl.utils import catch_exception, find_graphql_query, parse_cli_vars
|
|
16
16
|
from ..exceptions import GraphQLError
|
|
17
|
-
from ..utils import
|
|
17
|
+
from ..utils import write_to_file
|
|
18
18
|
from ..yaml import SchemaFile
|
|
19
19
|
from .parameters import CONFIG_PARAM
|
|
20
20
|
from .utils import load_yamlfile_from_disk_and_exit
|
|
@@ -68,8 +68,6 @@ def validate_graphql(
|
|
|
68
68
|
) -> None:
|
|
69
69
|
"""Validate the format of a GraphQL Query stored locally by executing it on a remote GraphQL endpoint"""
|
|
70
70
|
|
|
71
|
-
branch = get_branch(branch)
|
|
72
|
-
|
|
73
71
|
try:
|
|
74
72
|
query_str = find_graphql_query(query)
|
|
75
73
|
except QueryNotFoundError:
|
|
@@ -81,6 +79,10 @@ def validate_graphql(
|
|
|
81
79
|
variables_dict = parse_cli_vars(variables)
|
|
82
80
|
|
|
83
81
|
client = initialize_client_sync()
|
|
82
|
+
|
|
83
|
+
if not branch:
|
|
84
|
+
branch = client.config.default_infrahub_branch
|
|
85
|
+
|
|
84
86
|
try:
|
|
85
87
|
response = client.execute_graphql(
|
|
86
88
|
query=query_str,
|
infrahub_sdk/diff.py
CHANGED
|
@@ -37,8 +37,8 @@ class NodeDiffPeer(TypedDict):
|
|
|
37
37
|
|
|
38
38
|
def get_diff_summary_query() -> str:
|
|
39
39
|
return """
|
|
40
|
-
query GetDiffTree($branch_name: String
|
|
41
|
-
DiffTree(branch: $branch_name) {
|
|
40
|
+
query GetDiffTree($branch_name: String!, $name: String, $from_time: DateTime, $to_time: DateTime) {
|
|
41
|
+
DiffTree(branch: $branch_name, name: $name, from_time: $from_time, to_time: $to_time) {
|
|
42
42
|
nodes {
|
|
43
43
|
uuid
|
|
44
44
|
kind
|
|
@@ -117,12 +117,11 @@ def diff_tree_node_to_node_diff(node_dict: dict[str, Any], branch_name: str) ->
|
|
|
117
117
|
)
|
|
118
118
|
relationship_diff["peers"] = peer_diffs
|
|
119
119
|
element_diffs.append(relationship_diff)
|
|
120
|
-
|
|
120
|
+
return NodeDiff(
|
|
121
121
|
branch=branch_name,
|
|
122
122
|
kind=str(node_dict.get("kind")),
|
|
123
123
|
id=str(node_dict.get("uuid")),
|
|
124
|
-
action=str(node_dict.get("
|
|
124
|
+
action=str(node_dict.get("status")),
|
|
125
125
|
display_label=str(node_dict.get("label")),
|
|
126
126
|
elements=element_diffs,
|
|
127
127
|
)
|
|
128
|
-
return node_diff
|
infrahub_sdk/exceptions.py
CHANGED
|
@@ -17,6 +17,8 @@ class JsonDecodeError(Error):
|
|
|
17
17
|
self.url = url
|
|
18
18
|
if not self.message and self.url:
|
|
19
19
|
self.message = f"Unable to decode response as JSON data from {self.url}"
|
|
20
|
+
if self.content:
|
|
21
|
+
self.message += f". Server response: {self.content}"
|
|
20
22
|
super().__init__(self.message)
|
|
21
23
|
|
|
22
24
|
|
infrahub_sdk/generator.py
CHANGED
|
@@ -26,6 +26,8 @@ class InfrahubGenerator(InfrahubOperation):
|
|
|
26
26
|
generator_instance: str = "",
|
|
27
27
|
params: dict | None = None,
|
|
28
28
|
convert_query_response: bool = False,
|
|
29
|
+
execute_in_proposed_change: bool = True,
|
|
30
|
+
execute_after_merge: bool = True,
|
|
29
31
|
logger: logging.Logger | None = None,
|
|
30
32
|
request_context: RequestContext | None = None,
|
|
31
33
|
) -> None:
|
|
@@ -44,6 +46,8 @@ class InfrahubGenerator(InfrahubOperation):
|
|
|
44
46
|
self._client: InfrahubClient | None = None
|
|
45
47
|
self.logger = logger if logger else logging.getLogger("infrahub.tasks")
|
|
46
48
|
self.request_context = request_context
|
|
49
|
+
self.execute_in_proposed_change = execute_in_proposed_change
|
|
50
|
+
self.execute_after_merge = execute_after_merge
|
|
47
51
|
|
|
48
52
|
@property
|
|
49
53
|
def subscribers(self) -> list[str] | None:
|
|
@@ -81,8 +85,10 @@ class InfrahubGenerator(InfrahubOperation):
|
|
|
81
85
|
unpacked = data.get("data") or data
|
|
82
86
|
await self.process_nodes(data=unpacked)
|
|
83
87
|
|
|
88
|
+
group_type = "CoreGeneratorGroup" if self.execute_after_merge else "CoreGeneratorAwareGroup"
|
|
89
|
+
|
|
84
90
|
async with self._init_client.start_tracking(
|
|
85
|
-
identifier=identifier, params=self.params, delete_unused_nodes=True, group_type=
|
|
91
|
+
identifier=identifier, params=self.params, delete_unused_nodes=True, group_type=group_type
|
|
86
92
|
) as self.client:
|
|
87
93
|
await self.generate(data=unpacked)
|
|
88
94
|
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from .constants import VARIABLE_TYPE_MAPPING
|
|
2
|
+
from .query import Mutation, Query
|
|
3
|
+
from .renderers import render_input_block, render_query_block, render_variables_to_string
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"VARIABLE_TYPE_MAPPING",
|
|
7
|
+
"Mutation",
|
|
8
|
+
"Query",
|
|
9
|
+
"render_input_block",
|
|
10
|
+
"render_query_block",
|
|
11
|
+
"render_variables_to_string",
|
|
12
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VARIABLE_TYPE_MAPPING = ((str, "String!"), (int, "Int!"), (float, "Float!"), (bool, "Boolean!"))
|