infrahub-server 1.4.12__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +27 -3
- infrahub/auth.py +5 -5
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +160 -157
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +56 -9
- infrahub/computed_attribute/tasks.py +19 -7
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +35 -24
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +9 -5
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +72 -10
- infrahub/core/changelog/models.py +2 -10
- infrahub/core/constants/__init__.py +4 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/calculator.py +2 -2
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/delete_query.py +9 -5
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/diff/query/merge.py +39 -23
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +7 -4
- infrahub/core/manager.py +3 -81
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +13 -10
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
- infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +26 -5
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +66 -19
- infrahub/core/models.py +2 -2
- infrahub/core/node/__init__.py +207 -54
- infrahub/core/node/create.py +53 -49
- infrahub/core/node/lock_utils.py +124 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +82 -15
- infrahub/core/query/diff.py +61 -16
- infrahub/core/query/ipam.py +16 -4
- infrahub/core/query/node.py +92 -212
- infrahub/core/query/relationship.py +44 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/relationship/model.py +69 -24
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/check.py +1 -1
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/core/transform.py +1 -1
- infrahub/core/schema/definitions/internal.py +12 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +3 -0
- infrahub/core/schema/node_schema.py +1 -0
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +295 -10
- infrahub/core/schema/schema_branch_display.py +135 -0
- infrahub/core/schema/schema_branch_hfid.py +120 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +42 -2
- infrahub/git/integrator.py +22 -14
- infrahub/git/tasks.py +52 -2
- infrahub/graphql/analyzer.py +9 -0
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +16 -6
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +213 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +16 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +44 -13
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +73 -41
- infrahub/graphql/mutations/main.py +61 -178
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +8 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +119 -42
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/message_bus/types.py +1 -0
- infrahub/patch/plan_writer.py +2 -2
- infrahub/permissions/constants.py +2 -0
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +98 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +67 -14
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +9 -1
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workers/dependencies.py +3 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +118 -3
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/models.py +17 -2
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/branch.py +17 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +376 -95
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/branch.py +3 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +20 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +40 -10
- infrahub_sdk/ctl/task.py +110 -0
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/node/relationship.py +1 -3
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +54 -6
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/task/models.py +6 -4
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +233 -176
- infrahub_testcontainers/container.py +114 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -0
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
infrahub/cli/dev.py
ADDED
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import importlib
|
|
4
|
+
import logging
|
|
5
|
+
from pathlib import Path # noqa: TC003
|
|
6
|
+
from typing import TYPE_CHECKING
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from graphql import parse, print_ast, print_schema
|
|
10
|
+
from infrahub_sdk.async_typer import AsyncTyper
|
|
11
|
+
from rich.logging import RichHandler
|
|
12
|
+
|
|
13
|
+
from infrahub import config
|
|
14
|
+
from infrahub.core.initialization import (
|
|
15
|
+
first_time_initialization,
|
|
16
|
+
initialization,
|
|
17
|
+
)
|
|
18
|
+
from infrahub.core.schema import SchemaRoot, core_models, internal_schema
|
|
19
|
+
from infrahub.core.schema.schema_branch import SchemaBranch
|
|
20
|
+
from infrahub.core.utils import delete_all_nodes
|
|
21
|
+
from infrahub.graphql.manager import GraphQLSchemaManager
|
|
22
|
+
from infrahub.graphql.schema_sort import sort_schema_ast
|
|
23
|
+
from infrahub.log import get_logger
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from infrahub.cli.context import CliContext
|
|
27
|
+
|
|
28
|
+
app = AsyncTyper()
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@app.command(name="export-graphql-schema")
|
|
32
|
+
async def export_graphql_schema(
|
|
33
|
+
ctx: typer.Context, # noqa: ARG001
|
|
34
|
+
config_file: str = typer.Option("infrahub.toml", envvar="INFRAHUB_CONFIG"),
|
|
35
|
+
out: Path = typer.Option("schema.graphql"), # noqa: B008
|
|
36
|
+
) -> None:
|
|
37
|
+
"""Export the Core GraphQL schema to a file."""
|
|
38
|
+
|
|
39
|
+
config.load_and_exit(config_file_name=config_file)
|
|
40
|
+
|
|
41
|
+
schema = SchemaRoot(**internal_schema)
|
|
42
|
+
full_schema = schema.merge(schema=SchemaRoot(**core_models))
|
|
43
|
+
|
|
44
|
+
schema_branch = SchemaBranch(cache={}, name="default")
|
|
45
|
+
schema_branch.load_schema(schema=full_schema)
|
|
46
|
+
|
|
47
|
+
schema_branch.process()
|
|
48
|
+
|
|
49
|
+
gqlm = GraphQLSchemaManager(schema=schema_branch)
|
|
50
|
+
gql_schema = gqlm.generate()
|
|
51
|
+
|
|
52
|
+
schema_str = print_schema(gql_schema)
|
|
53
|
+
schema_ast = parse(schema_str)
|
|
54
|
+
sorted_schema_ast = sort_schema_ast(schema_ast)
|
|
55
|
+
sorted_schema_str = print_ast(sorted_schema_ast)
|
|
56
|
+
|
|
57
|
+
out.write_text(sorted_schema_str)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@app.command(name="db-init")
|
|
61
|
+
async def database_init(
|
|
62
|
+
ctx: typer.Context,
|
|
63
|
+
config_file: str = typer.Option(
|
|
64
|
+
"infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub"
|
|
65
|
+
),
|
|
66
|
+
) -> None:
|
|
67
|
+
"""Erase the content of the database and initialize it with the core schema."""
|
|
68
|
+
|
|
69
|
+
log = get_logger()
|
|
70
|
+
|
|
71
|
+
# --------------------------------------------------
|
|
72
|
+
# CLEANUP
|
|
73
|
+
# - For now we delete everything in the database
|
|
74
|
+
# TODO, if possible try to implement this in an idempotent way
|
|
75
|
+
# --------------------------------------------------
|
|
76
|
+
|
|
77
|
+
logging.getLogger("neo4j").setLevel(logging.ERROR)
|
|
78
|
+
config.load_and_exit(config_file_name=config_file)
|
|
79
|
+
|
|
80
|
+
context: CliContext = ctx.obj
|
|
81
|
+
dbdriver = await context.init_db(retry=1)
|
|
82
|
+
async with dbdriver.start_transaction() as db:
|
|
83
|
+
log.info("Delete All Nodes")
|
|
84
|
+
await delete_all_nodes(db=db)
|
|
85
|
+
await first_time_initialization(db=db)
|
|
86
|
+
|
|
87
|
+
await dbdriver.close()
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
@app.command(name="load-test-data")
|
|
91
|
+
async def load_test_data(
|
|
92
|
+
ctx: typer.Context,
|
|
93
|
+
config_file: str = typer.Option(
|
|
94
|
+
"infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub"
|
|
95
|
+
),
|
|
96
|
+
dataset: str = "dataset01",
|
|
97
|
+
) -> None:
|
|
98
|
+
"""Load test data into the database from the `test_data` directory."""
|
|
99
|
+
|
|
100
|
+
logging.getLogger("neo4j").setLevel(logging.ERROR)
|
|
101
|
+
config.load_and_exit(config_file_name=config_file)
|
|
102
|
+
|
|
103
|
+
context: CliContext = ctx.obj
|
|
104
|
+
dbdriver = await context.init_db(retry=1)
|
|
105
|
+
|
|
106
|
+
async with dbdriver.start_session() as db:
|
|
107
|
+
await initialization(db=db)
|
|
108
|
+
|
|
109
|
+
log_level = "DEBUG"
|
|
110
|
+
|
|
111
|
+
FORMAT = "%(message)s"
|
|
112
|
+
logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
|
|
113
|
+
logging.getLogger("infrahub")
|
|
114
|
+
|
|
115
|
+
dataset_module = importlib.import_module(f"infrahub.test_data.{dataset}")
|
|
116
|
+
await dataset_module.load_data(db=db)
|
|
117
|
+
|
|
118
|
+
await dbdriver.close()
|
infrahub/cli/tasks.py
CHANGED
|
@@ -3,9 +3,11 @@ import logging
|
|
|
3
3
|
import typer
|
|
4
4
|
from infrahub_sdk.async_typer import AsyncTyper
|
|
5
5
|
from prefect.client.orchestration import get_client
|
|
6
|
+
from prefect.client.schemas.objects import StateType
|
|
6
7
|
|
|
7
8
|
from infrahub import config
|
|
8
9
|
from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
|
|
10
|
+
from infrahub.task_manager.task import PrefectTask
|
|
9
11
|
from infrahub.tasks.dummy import DUMMY_FLOW, DummyInput
|
|
10
12
|
from infrahub.workflows.initialization import setup_task_manager
|
|
11
13
|
from infrahub.workflows.models import WorkerPoolDefinition
|
|
@@ -50,3 +52,47 @@ async def execute(
|
|
|
50
52
|
workflow=DUMMY_FLOW, parameters={"data": DummyInput(firstname="John", lastname="Doe")}
|
|
51
53
|
) # type: ignore[var-annotated]
|
|
52
54
|
print(result)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
flush_app = AsyncTyper()
|
|
58
|
+
|
|
59
|
+
app.add_typer(flush_app, name="flush")
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
@flush_app.command()
|
|
63
|
+
async def flow_runs(
|
|
64
|
+
ctx: typer.Context, # noqa: ARG001
|
|
65
|
+
config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
|
|
66
|
+
days_to_keep: int = 30,
|
|
67
|
+
batch_size: int = 100,
|
|
68
|
+
) -> None:
|
|
69
|
+
"""Flush old task runs"""
|
|
70
|
+
logging.getLogger("infrahub").setLevel(logging.WARNING)
|
|
71
|
+
logging.getLogger("neo4j").setLevel(logging.ERROR)
|
|
72
|
+
logging.getLogger("prefect").setLevel(logging.ERROR)
|
|
73
|
+
|
|
74
|
+
config.load_and_exit(config_file_name=config_file)
|
|
75
|
+
|
|
76
|
+
await PrefectTask.delete_flow_runs(
|
|
77
|
+
days_to_keep=days_to_keep,
|
|
78
|
+
batch_size=batch_size,
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@flush_app.command()
|
|
83
|
+
async def stale_runs(
|
|
84
|
+
ctx: typer.Context, # noqa: ARG001
|
|
85
|
+
config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
|
|
86
|
+
days_to_keep: int = 2,
|
|
87
|
+
batch_size: int = 100,
|
|
88
|
+
) -> None:
|
|
89
|
+
"""Flush stale task runs"""
|
|
90
|
+
logging.getLogger("infrahub").setLevel(logging.WARNING)
|
|
91
|
+
logging.getLogger("neo4j").setLevel(logging.ERROR)
|
|
92
|
+
logging.getLogger("prefect").setLevel(logging.ERROR)
|
|
93
|
+
|
|
94
|
+
config.load_and_exit(config_file_name=config_file)
|
|
95
|
+
|
|
96
|
+
await PrefectTask.delete_flow_runs(
|
|
97
|
+
states=[StateType.RUNNING], delete=False, days_to_keep=days_to_keep, batch_size=batch_size
|
|
98
|
+
)
|
infrahub/cli/upgrade.py
CHANGED
|
@@ -8,13 +8,19 @@ import typer
|
|
|
8
8
|
from deepdiff import DeepDiff
|
|
9
9
|
from infrahub_sdk.async_typer import AsyncTyper
|
|
10
10
|
from prefect.client.orchestration import get_client
|
|
11
|
-
from rich import print as rprint
|
|
12
11
|
|
|
13
12
|
from infrahub import config
|
|
14
|
-
from infrahub.core.initialization import
|
|
13
|
+
from infrahub.core.initialization import (
|
|
14
|
+
create_anonymous_role,
|
|
15
|
+
create_default_account_groups,
|
|
16
|
+
get_root_node,
|
|
17
|
+
initialize_registry,
|
|
18
|
+
)
|
|
15
19
|
from infrahub.core.manager import NodeManager
|
|
20
|
+
from infrahub.core.migrations.shared import get_migration_console
|
|
16
21
|
from infrahub.core.protocols import CoreAccount, CoreObjectPermission
|
|
17
22
|
from infrahub.dependencies.registry import build_component_registry
|
|
23
|
+
from infrahub.lock import initialize_lock
|
|
18
24
|
from infrahub.menu.menu import default_menu
|
|
19
25
|
from infrahub.menu.models import MenuDict
|
|
20
26
|
from infrahub.menu.repository import MenuRepository
|
|
@@ -26,13 +32,22 @@ from infrahub.workflows.initialization import (
|
|
|
26
32
|
setup_worker_pools,
|
|
27
33
|
)
|
|
28
34
|
|
|
29
|
-
from .db import
|
|
35
|
+
from .db import (
|
|
36
|
+
detect_migration_to_run,
|
|
37
|
+
initialize_internal_schema,
|
|
38
|
+
mark_branches_needing_rebase,
|
|
39
|
+
migrate_database,
|
|
40
|
+
trigger_rebase_branches,
|
|
41
|
+
update_core_schema,
|
|
42
|
+
)
|
|
30
43
|
|
|
31
44
|
if TYPE_CHECKING:
|
|
32
45
|
from infrahub.cli.context import CliContext
|
|
46
|
+
from infrahub.core.branch.models import Branch
|
|
33
47
|
from infrahub.database import InfrahubDatabase
|
|
34
48
|
|
|
35
49
|
app = AsyncTyper()
|
|
50
|
+
console = get_migration_console()
|
|
36
51
|
|
|
37
52
|
|
|
38
53
|
@app.command(name="upgrade")
|
|
@@ -40,6 +55,10 @@ async def upgrade_cmd(
|
|
|
40
55
|
ctx: typer.Context,
|
|
41
56
|
config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
|
|
42
57
|
check: bool = typer.Option(False, help="Check the state of the system without upgrading."),
|
|
58
|
+
rebase_branches: bool = typer.Option(False, help="Rebase and apply migrations to branches if required."),
|
|
59
|
+
interactive: bool = typer.Option(
|
|
60
|
+
False, help="Use interactive prompt to accept or deny rebase of individual branches."
|
|
61
|
+
),
|
|
43
62
|
) -> None:
|
|
44
63
|
"""Upgrade Infrahub to the latest version."""
|
|
45
64
|
|
|
@@ -54,9 +73,12 @@ async def upgrade_cmd(
|
|
|
54
73
|
dbdriver = await context.init_db(retry=1)
|
|
55
74
|
|
|
56
75
|
await initialize_registry(db=dbdriver)
|
|
76
|
+
initialize_lock()
|
|
57
77
|
|
|
58
78
|
build_component_registry()
|
|
59
79
|
|
|
80
|
+
root_node = await get_root_node(db=dbdriver)
|
|
81
|
+
|
|
60
82
|
# NOTE add step to validate if the database and the task manager are reachable
|
|
61
83
|
|
|
62
84
|
# -------------------------------------------
|
|
@@ -67,9 +89,14 @@ async def upgrade_cmd(
|
|
|
67
89
|
# Upgrade Infrahub Database and Schema
|
|
68
90
|
# -------------------------------------------
|
|
69
91
|
|
|
70
|
-
|
|
92
|
+
migrations = await detect_migration_to_run(current_graph_version=root_node.graph_version)
|
|
93
|
+
if check:
|
|
94
|
+
await dbdriver.close()
|
|
95
|
+
return
|
|
96
|
+
|
|
97
|
+
if not await migrate_database(db=dbdriver, initialize=False, migrations=migrations):
|
|
71
98
|
# A migration failed, stop the upgrade process
|
|
72
|
-
|
|
99
|
+
console.log("Upgrade cancelled due to migration failure.")
|
|
73
100
|
await dbdriver.close()
|
|
74
101
|
return
|
|
75
102
|
|
|
@@ -91,6 +118,26 @@ async def upgrade_cmd(
|
|
|
91
118
|
await setup_deployments(client=client)
|
|
92
119
|
await trigger_configure_all()
|
|
93
120
|
|
|
121
|
+
# -------------------------------------------
|
|
122
|
+
# Perform branch rebase and apply migrations to them
|
|
123
|
+
# -------------------------------------------
|
|
124
|
+
branches = await mark_branches_needing_rebase(db=dbdriver)
|
|
125
|
+
plural = len(branches) != 1
|
|
126
|
+
get_migration_console().log(
|
|
127
|
+
f"Found {len(branches)} {'branches' if plural else 'branch'} that {'need' if plural else 'needs'} to be rebased"
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
if rebase_branches:
|
|
131
|
+
branches_to_rebase: list[Branch] = []
|
|
132
|
+
if not interactive:
|
|
133
|
+
branches_to_rebase = branches
|
|
134
|
+
else:
|
|
135
|
+
for branch in branches:
|
|
136
|
+
if typer.confirm(f"Rebase branch {branch.name}?"):
|
|
137
|
+
branches_to_rebase.append(branch)
|
|
138
|
+
|
|
139
|
+
await trigger_rebase_branches(db=dbdriver, branches=branches_to_rebase)
|
|
140
|
+
|
|
94
141
|
await dbdriver.close()
|
|
95
142
|
|
|
96
143
|
|
|
@@ -107,21 +154,21 @@ async def upgrade_menu(db: InfrahubDatabase) -> None:
|
|
|
107
154
|
diff_menu = DeepDiff(menu_items.to_rest(), default_menu_dict.to_rest(), ignore_order=True)
|
|
108
155
|
|
|
109
156
|
if not diff_menu:
|
|
110
|
-
|
|
157
|
+
console.log("Menu Up to date, nothing to update")
|
|
111
158
|
return
|
|
112
159
|
|
|
113
160
|
await menu_repository.update_menu(existing_menu=menu_items, new_menu=default_menu_dict, menu_nodes=menu_nodes)
|
|
114
|
-
|
|
161
|
+
console.log("Menu has been updated")
|
|
115
162
|
|
|
116
163
|
|
|
117
164
|
async def upgrade_permissions(db: InfrahubDatabase) -> None:
|
|
118
165
|
existing_permissions = await NodeManager.query(schema=CoreObjectPermission, db=db, limit=1)
|
|
119
166
|
if existing_permissions:
|
|
120
|
-
|
|
167
|
+
console.log("Permissions Up to date, nothing to update")
|
|
121
168
|
return
|
|
122
169
|
|
|
123
170
|
await setup_permissions(db=db)
|
|
124
|
-
|
|
171
|
+
console.log("Permissions have been updated")
|
|
125
172
|
|
|
126
173
|
|
|
127
174
|
async def setup_permissions(db: InfrahubDatabase) -> None:
|
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
|
+
from infrahub_sdk.exceptions import URLNotFoundError
|
|
5
6
|
from infrahub_sdk.protocols import CoreTransformPython
|
|
6
7
|
from infrahub_sdk.template import Jinja2Template
|
|
7
8
|
from prefect import flow
|
|
@@ -177,12 +178,17 @@ async def computed_attribute_jinja2_update_value(
|
|
|
177
178
|
log.debug(f"Ignoring to update {obj} with existing value on {attribute_name}={value}")
|
|
178
179
|
return
|
|
179
180
|
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
181
|
+
try:
|
|
182
|
+
await client.execute_graphql(
|
|
183
|
+
query=UPDATE_ATTRIBUTE,
|
|
184
|
+
variables={"id": obj.node_id, "kind": node_kind, "attribute": attribute_name, "value": value},
|
|
185
|
+
branch_name=branch_name,
|
|
186
|
+
)
|
|
187
|
+
log.info(f"Updating computed attribute {node_kind}.{attribute_name}='{value}' ({obj.node_id})")
|
|
188
|
+
except URLNotFoundError:
|
|
189
|
+
log.warning(
|
|
190
|
+
f"Update of computed attribute {node_kind}.{attribute_name} failed for branch {branch_name} (not found)"
|
|
191
|
+
)
|
|
186
192
|
|
|
187
193
|
|
|
188
194
|
@flow(
|
|
@@ -229,7 +235,13 @@ async def process_jinja2(
|
|
|
229
235
|
|
|
230
236
|
for id_filter in computed_macro.node_filters:
|
|
231
237
|
query = attribute_graphql.render_graphql_query(query_filter=id_filter, filter_id=object_id)
|
|
232
|
-
|
|
238
|
+
try:
|
|
239
|
+
response = await client.execute_graphql(query=query, branch_name=branch_name)
|
|
240
|
+
except URLNotFoundError:
|
|
241
|
+
log.warning(
|
|
242
|
+
f"Process computed attributes for {computed_attribute_kind}.{computed_attribute_name} failed for branch {branch_name} (not found)"
|
|
243
|
+
)
|
|
244
|
+
return
|
|
233
245
|
output = attribute_graphql.parse_response(response=response)
|
|
234
246
|
found.extend(output)
|
|
235
247
|
|
infrahub/config.py
CHANGED
|
@@ -8,7 +8,7 @@ from enum import Enum
|
|
|
8
8
|
from pathlib import Path
|
|
9
9
|
from typing import TYPE_CHECKING, Any
|
|
10
10
|
|
|
11
|
-
import
|
|
11
|
+
import tomllib
|
|
12
12
|
from infrahub_sdk.utils import generate_uuid
|
|
13
13
|
from pydantic import (
|
|
14
14
|
AliasChoices,
|
|
@@ -371,6 +371,11 @@ class CacheSettings(BaseSettings):
|
|
|
371
371
|
tls_enabled: bool = Field(default=False, description="Indicates if TLS is enabled for the connection")
|
|
372
372
|
tls_insecure: bool = Field(default=False, description="Indicates if TLS certificates are verified")
|
|
373
373
|
tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
|
|
374
|
+
clean_up_deadlocks_interval_mins: int = Field(
|
|
375
|
+
default=15,
|
|
376
|
+
ge=1,
|
|
377
|
+
description="Age threshold in minutes: locks older than this and owned by inactive workers are deleted by the cleanup task.",
|
|
378
|
+
)
|
|
374
379
|
|
|
375
380
|
@property
|
|
376
381
|
def service_port(self) -> int:
|
|
@@ -975,7 +980,7 @@ def load(config_file_name: Path | str = "infrahub.toml", config_data: dict[str,
|
|
|
975
980
|
|
|
976
981
|
if config_file.exists():
|
|
977
982
|
config_string = config_file.read_text(encoding="utf-8")
|
|
978
|
-
config_tmp =
|
|
983
|
+
config_tmp = tomllib.loads(config_string)
|
|
979
984
|
|
|
980
985
|
return Settings(**config_tmp)
|
|
981
986
|
|
infrahub/core/attribute.py
CHANGED
|
@@ -18,6 +18,7 @@ from infrahub.core.changelog.models import AttributeChangelog
|
|
|
18
18
|
from infrahub.core.constants import NULL_VALUE, AttributeDBNodeType, BranchSupportType, RelationshipStatus
|
|
19
19
|
from infrahub.core.property import FlagPropertyMixin, NodePropertyData, NodePropertyMixin
|
|
20
20
|
from infrahub.core.query.attribute import (
|
|
21
|
+
AttributeClearNodePropertyQuery,
|
|
21
22
|
AttributeGetQuery,
|
|
22
23
|
AttributeUpdateFlagQuery,
|
|
23
24
|
AttributeUpdateNodePropertyQuery,
|
|
@@ -36,7 +37,7 @@ from .schema.attribute_parameters import NumberAttributeParameters
|
|
|
36
37
|
if TYPE_CHECKING:
|
|
37
38
|
from infrahub.core.branch import Branch
|
|
38
39
|
from infrahub.core.node import Node
|
|
39
|
-
from infrahub.core.schema import AttributeSchema
|
|
40
|
+
from infrahub.core.schema import AttributeSchema, MainSchemaTypes
|
|
40
41
|
from infrahub.database import InfrahubDatabase
|
|
41
42
|
|
|
42
43
|
|
|
@@ -324,7 +325,7 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
324
325
|
|
|
325
326
|
save_at = Timestamp(at)
|
|
326
327
|
|
|
327
|
-
if not self.id
|
|
328
|
+
if not self.id:
|
|
328
329
|
return None
|
|
329
330
|
|
|
330
331
|
return await self._update(at=save_at, db=db)
|
|
@@ -395,7 +396,6 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
395
396
|
|
|
396
397
|
Get the current value
|
|
397
398
|
- If the value is the same, do nothing
|
|
398
|
-
- If the value is inherited and is different, raise error (for now just ignore)
|
|
399
399
|
- If the value is different, create new node and update relationship
|
|
400
400
|
|
|
401
401
|
"""
|
|
@@ -470,28 +470,38 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
470
470
|
|
|
471
471
|
# ---------- Update the Node Properties ----------
|
|
472
472
|
for prop_name in self._node_properties:
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
473
|
+
current_prop_id = getattr(self, f"{prop_name}_id")
|
|
474
|
+
database_prop_id: str | None = None
|
|
475
|
+
if prop_name in current_attr_data.node_properties:
|
|
476
|
+
database_prop_id = current_attr_data.node_properties[prop_name].uuid
|
|
477
|
+
needs_update = current_prop_id is not None and current_prop_id != database_prop_id
|
|
478
|
+
needs_clear = self.is_clear(prop_name) and database_prop_id
|
|
479
|
+
|
|
480
|
+
if not needs_update and not needs_clear:
|
|
481
|
+
continue
|
|
481
482
|
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
483
|
+
changelog.add_property(
|
|
484
|
+
name=prop_name,
|
|
485
|
+
value_current=current_prop_id,
|
|
486
|
+
value_previous=database_prop_id,
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
if needs_update:
|
|
487
490
|
query = await AttributeUpdateNodePropertyQuery.init(
|
|
488
|
-
db=db, attr=self, at=update_at, prop_name=prop_name, prop_id=
|
|
491
|
+
db=db, attr=self, at=update_at, prop_name=prop_name, prop_id=current_prop_id
|
|
489
492
|
)
|
|
490
493
|
await query.execute(db=db)
|
|
491
494
|
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
+
if needs_clear:
|
|
496
|
+
query = await AttributeClearNodePropertyQuery.init(
|
|
497
|
+
db=db, attr=self, at=update_at, prop_name=prop_name, prop_id=database_prop_id
|
|
498
|
+
)
|
|
499
|
+
await query.execute(db=db)
|
|
500
|
+
|
|
501
|
+
# set the to time on the previously active edge
|
|
502
|
+
rel = current_attr_result.get(f"rel_{prop_name}")
|
|
503
|
+
if rel and rel.get("branch") == branch.name:
|
|
504
|
+
await update_relationships_to([rel.element_id], to=update_at, db=db)
|
|
495
505
|
|
|
496
506
|
if changelog.has_updates:
|
|
497
507
|
return changelog
|
|
@@ -578,7 +588,7 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
578
588
|
|
|
579
589
|
return value
|
|
580
590
|
|
|
581
|
-
async def from_graphql(self, data: dict, db: InfrahubDatabase) -> bool:
|
|
591
|
+
async def from_graphql(self, data: dict, db: InfrahubDatabase, process_pools: bool = True) -> bool:
|
|
582
592
|
"""Update attr from GraphQL payload"""
|
|
583
593
|
|
|
584
594
|
changed = False
|
|
@@ -592,7 +602,8 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
592
602
|
changed = True
|
|
593
603
|
elif "from_pool" in data:
|
|
594
604
|
self.from_pool = data["from_pool"]
|
|
595
|
-
|
|
605
|
+
if process_pools:
|
|
606
|
+
await self.node.handle_pool(db=db, attribute=self, errors=[])
|
|
596
607
|
changed = True
|
|
597
608
|
|
|
598
609
|
if changed and self.is_from_profile:
|
|
@@ -627,7 +638,7 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
627
638
|
return AttributeDBNodeType.DEFAULT
|
|
628
639
|
return AttributeDBNodeType.INDEXED
|
|
629
640
|
|
|
630
|
-
def get_create_data(self) -> AttributeCreateData:
|
|
641
|
+
def get_create_data(self, node_schema: MainSchemaTypes) -> AttributeCreateData:
|
|
631
642
|
branch = self.branch
|
|
632
643
|
hierarchy_level = branch.hierarchy_level
|
|
633
644
|
if self.schema.branch == BranchSupportType.AGNOSTIC:
|
|
@@ -642,7 +653,7 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
|
|
|
642
653
|
branch=branch.name,
|
|
643
654
|
status="active",
|
|
644
655
|
branch_level=hierarchy_level,
|
|
645
|
-
branch_support=self.schema.branch.value,
|
|
656
|
+
branch_support=self.schema.branch.value if self.schema.branch is not None else node_schema.branch,
|
|
646
657
|
content=self.to_db(),
|
|
647
658
|
is_default=self.is_default,
|
|
648
659
|
is_protected=self.is_protected,
|
infrahub/core/branch/enums.py
CHANGED
infrahub/core/branch/models.py
CHANGED
|
@@ -5,9 +5,9 @@ from typing import TYPE_CHECKING, Any, Optional, Self, Union
|
|
|
5
5
|
|
|
6
6
|
from pydantic import Field, field_validator
|
|
7
7
|
|
|
8
|
-
from infrahub.core.
|
|
9
|
-
|
|
10
|
-
|
|
8
|
+
from infrahub.core.branch.enums import BranchStatus
|
|
9
|
+
from infrahub.core.constants import GLOBAL_BRANCH_NAME
|
|
10
|
+
from infrahub.core.graph import GRAPH_VERSION
|
|
11
11
|
from infrahub.core.models import SchemaBranchHash # noqa: TC001
|
|
12
12
|
from infrahub.core.node.standard import StandardNode
|
|
13
13
|
from infrahub.core.query import QueryType
|
|
@@ -21,8 +21,6 @@ from infrahub.core.registry import registry
|
|
|
21
21
|
from infrahub.core.timestamp import Timestamp
|
|
22
22
|
from infrahub.exceptions import BranchNotFoundError, InitializationError, ValidationError
|
|
23
23
|
|
|
24
|
-
from .enums import BranchStatus
|
|
25
|
-
|
|
26
24
|
if TYPE_CHECKING:
|
|
27
25
|
from infrahub.database import InfrahubDatabase
|
|
28
26
|
|
|
@@ -47,6 +45,7 @@ class Branch(StandardNode):
|
|
|
47
45
|
is_isolated: bool = True
|
|
48
46
|
schema_changed_at: Optional[str] = None
|
|
49
47
|
schema_hash: Optional[SchemaBranchHash] = None
|
|
48
|
+
graph_version: int | None = None
|
|
50
49
|
|
|
51
50
|
_exclude_attrs: list[str] = ["id", "uuid", "owner"]
|
|
52
51
|
|
|
@@ -262,6 +261,10 @@ class Branch(StandardNode):
|
|
|
262
261
|
|
|
263
262
|
return start, end
|
|
264
263
|
|
|
264
|
+
async def create(self, db: InfrahubDatabase) -> bool:
|
|
265
|
+
self.graph_version = GRAPH_VERSION
|
|
266
|
+
return await super().create(db=db)
|
|
267
|
+
|
|
265
268
|
async def delete(self, db: InfrahubDatabase) -> None:
|
|
266
269
|
if self.is_default:
|
|
267
270
|
raise ValidationError(f"Unable to delete {self.name} it is the default branch.")
|
|
@@ -485,6 +488,7 @@ class Branch(StandardNode):
|
|
|
485
488
|
# FIXME, we must ensure that there is no conflict before rebasing a branch
|
|
486
489
|
# Otherwise we could endup with a complicated situation
|
|
487
490
|
self.branched_from = at.to_string()
|
|
491
|
+
self.status = BranchStatus.OPEN
|
|
488
492
|
await self.save(db=db)
|
|
489
493
|
|
|
490
494
|
# Update the branch in the registry after the rebase
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from infrahub.core.branch import Branch
|
|
2
|
+
from infrahub.core.branch.enums import BranchStatus
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def raise_needs_rebase_error(branch_name: str) -> None:
|
|
6
|
+
raise ValueError(f"Branch {branch_name} must be rebased before any updates can be made")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def check_need_rebase_status(branch: Branch) -> None:
|
|
10
|
+
if branch.status == BranchStatus.NEED_REBASE:
|
|
11
|
+
raise_needs_rebase_error(branch_name=branch.name)
|