infrahub-server 1.5.0b1__py3-none-any.whl → 1.5.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/internal.py +2 -0
- infrahub/api/oauth2.py +13 -19
- infrahub/api/oidc.py +15 -21
- infrahub/api/schema.py +24 -3
- infrahub/artifacts/models.py +2 -1
- infrahub/auth.py +137 -3
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +83 -102
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +30 -3
- infrahub/computed_attribute/tasks.py +20 -8
- infrahub/core/attribute.py +10 -2
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +7 -3
- infrahub/core/branch/tasks.py +68 -7
- infrahub/core/constants/__init__.py +3 -0
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +5 -2
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +10 -13
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
- infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
- infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +30 -2
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +48 -14
- infrahub/core/node/__init__.py +16 -11
- infrahub/core/node/create.py +46 -63
- infrahub/core/node/lock_utils.py +70 -44
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/query/attribute.py +55 -0
- infrahub/core/query/ipam.py +1 -0
- infrahub/core/query/node.py +9 -3
- infrahub/core/query/relationship.py +1 -0
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -0
- infrahub/core/schema/definitions/internal.py +2 -2
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/manager.py +22 -1
- infrahub/core/schema/schema_branch.py +180 -22
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/tasks.py +13 -7
- infrahub/events/branch_action.py +27 -1
- infrahub/generators/tasks.py +3 -7
- infrahub/git/base.py +4 -1
- infrahub/git/integrator.py +1 -1
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +22 -5
- infrahub/git/tasks.py +66 -10
- infrahub/git/utils.py +123 -1
- infrahub/graphql/api/endpoints.py +14 -4
- infrahub/graphql/manager.py +4 -9
- infrahub/graphql/mutations/convert_object_type.py +11 -1
- infrahub/graphql/mutations/display_label.py +17 -10
- infrahub/graphql/mutations/hfid.py +17 -10
- infrahub/graphql/mutations/ipam.py +54 -35
- infrahub/graphql/mutations/main.py +27 -28
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/hfid/tasks.py +13 -7
- infrahub/lock.py +52 -12
- infrahub/message_bus/types.py +2 -1
- infrahub/permissions/constants.py +2 -0
- infrahub/proposed_change/tasks.py +25 -16
- infrahub/server.py +6 -2
- infrahub/services/__init__.py +2 -2
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +3 -0
- infrahub/workers/dependencies.py +10 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +8 -0
- infrahub/workflows/initialization.py +5 -0
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/client.py +13 -10
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/ctl/schema.py +22 -7
- infrahub_sdk/schema/__init__.py +32 -4
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +37 -102
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +3 -1
- {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +115 -101
- infrahub_testcontainers/container.py +114 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -0
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
- infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +0 -97
- infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +0 -86
- {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub/core/branch/tasks.py
CHANGED
|
@@ -12,6 +12,7 @@ from infrahub import lock
|
|
|
12
12
|
from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
|
|
13
13
|
from infrahub.core import registry
|
|
14
14
|
from infrahub.core.branch import Branch
|
|
15
|
+
from infrahub.core.branch.enums import BranchStatus
|
|
15
16
|
from infrahub.core.changelog.diff import DiffChangelogCollector, MigrationTracker
|
|
16
17
|
from infrahub.core.constants import MutationAction
|
|
17
18
|
from infrahub.core.diff.coordinator import DiffCoordinator
|
|
@@ -21,7 +22,10 @@ from infrahub.core.diff.merger.merger import DiffMerger
|
|
|
21
22
|
from infrahub.core.diff.model.path import BranchTrackingId, EnrichedDiffRoot, EnrichedDiffRootMetadata
|
|
22
23
|
from infrahub.core.diff.models import RequestDiffUpdate
|
|
23
24
|
from infrahub.core.diff.repository.repository import DiffRepository
|
|
25
|
+
from infrahub.core.graph import GRAPH_VERSION
|
|
24
26
|
from infrahub.core.merge import BranchMerger
|
|
27
|
+
from infrahub.core.migrations.exceptions import MigrationFailureError
|
|
28
|
+
from infrahub.core.migrations.runner import MigrationRunner
|
|
25
29
|
from infrahub.core.migrations.schema.models import SchemaApplyMigrationData
|
|
26
30
|
from infrahub.core.migrations.schema.tasks import schema_apply_migrations
|
|
27
31
|
from infrahub.core.timestamp import Timestamp
|
|
@@ -29,7 +33,13 @@ from infrahub.core.validators.determiner import ConstraintValidatorDeterminer
|
|
|
29
33
|
from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
|
|
30
34
|
from infrahub.core.validators.tasks import schema_validate_migrations
|
|
31
35
|
from infrahub.dependencies.registry import get_component_registry
|
|
32
|
-
from infrahub.events.branch_action import
|
|
36
|
+
from infrahub.events.branch_action import (
|
|
37
|
+
BranchCreatedEvent,
|
|
38
|
+
BranchDeletedEvent,
|
|
39
|
+
BranchMergedEvent,
|
|
40
|
+
BranchMigratedEvent,
|
|
41
|
+
BranchRebasedEvent,
|
|
42
|
+
)
|
|
33
43
|
from infrahub.events.models import EventMeta, InfrahubEvent
|
|
34
44
|
from infrahub.events.node_action import get_node_event
|
|
35
45
|
from infrahub.exceptions import BranchNotFoundError, ValidationError
|
|
@@ -49,8 +59,57 @@ from infrahub.workflows.catalogue import (
|
|
|
49
59
|
from infrahub.workflows.utils import add_tags
|
|
50
60
|
|
|
51
61
|
|
|
62
|
+
@flow(name="branch-migrate", flow_run_name="Apply migrations to branch {branch}")
|
|
63
|
+
async def migrate_branch(branch: str, context: InfrahubContext, send_events: bool = True) -> None:
|
|
64
|
+
await add_tags(branches=[branch])
|
|
65
|
+
|
|
66
|
+
database = await get_database()
|
|
67
|
+
async with database.start_session() as db:
|
|
68
|
+
log = get_run_logger()
|
|
69
|
+
|
|
70
|
+
obj = await Branch.get_by_name(db=db, name=branch)
|
|
71
|
+
|
|
72
|
+
if obj.graph_version == GRAPH_VERSION:
|
|
73
|
+
log.info(f"Branch '{obj.name}' has graph version {obj.graph_version}, no migrations to apply")
|
|
74
|
+
return
|
|
75
|
+
|
|
76
|
+
migration_runner = MigrationRunner(branch=obj)
|
|
77
|
+
if not migration_runner.has_migrations():
|
|
78
|
+
log.info(f"No migrations detected for branch '{obj.name}'")
|
|
79
|
+
obj.graph_version = GRAPH_VERSION
|
|
80
|
+
await obj.save(db=db)
|
|
81
|
+
return
|
|
82
|
+
|
|
83
|
+
# Branch status will remain as so if the migration process fails
|
|
84
|
+
# This will help user to know that a branch is in an invalid state to be used properly and that actions need to be taken
|
|
85
|
+
if obj.status != BranchStatus.NEED_UPGRADE_REBASE:
|
|
86
|
+
obj.status = BranchStatus.NEED_UPGRADE_REBASE
|
|
87
|
+
await obj.save(db=db)
|
|
88
|
+
|
|
89
|
+
try:
|
|
90
|
+
log.info(f"Running migrations for branch '{obj.name}'")
|
|
91
|
+
await migration_runner.run(db=db)
|
|
92
|
+
except MigrationFailureError as exc:
|
|
93
|
+
log.error(f"Failed to run migrations for branch '{obj.name}': {exc.errors}")
|
|
94
|
+
raise
|
|
95
|
+
|
|
96
|
+
if obj.status == BranchStatus.NEED_UPGRADE_REBASE:
|
|
97
|
+
obj.status = BranchStatus.OPEN
|
|
98
|
+
obj.graph_version = GRAPH_VERSION
|
|
99
|
+
await obj.save(db=db)
|
|
100
|
+
|
|
101
|
+
if send_events:
|
|
102
|
+
event_service = await get_event_service()
|
|
103
|
+
await event_service.send(
|
|
104
|
+
BranchMigratedEvent(
|
|
105
|
+
branch_name=obj.name, branch_id=str(obj.uuid), meta=EventMeta(branch=obj, context=context)
|
|
106
|
+
)
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
|
|
52
110
|
@flow(name="branch-rebase", flow_run_name="Rebase branch {branch}")
|
|
53
|
-
async def rebase_branch(branch: str, context: InfrahubContext) -> None: # noqa: PLR0915
|
|
111
|
+
async def rebase_branch(branch: str, context: InfrahubContext, send_events: bool = True) -> None: # noqa: PLR0915
|
|
112
|
+
workflow = get_workflow()
|
|
54
113
|
database = await get_database()
|
|
55
114
|
async with database.start_session() as db:
|
|
56
115
|
log = get_run_logger()
|
|
@@ -69,7 +128,7 @@ async def rebase_branch(branch: str, context: InfrahubContext) -> None: # noqa:
|
|
|
69
128
|
diff_repository=diff_repository,
|
|
70
129
|
source_branch=obj,
|
|
71
130
|
diff_locker=DiffLocker(),
|
|
72
|
-
workflow=
|
|
131
|
+
workflow=workflow,
|
|
73
132
|
)
|
|
74
133
|
|
|
75
134
|
enriched_diff_metadata = await diff_coordinator.update_branch_diff(base_branch=base_branch, diff_branch=obj)
|
|
@@ -156,15 +215,17 @@ async def rebase_branch(branch: str, context: InfrahubContext) -> None: # noqa:
|
|
|
156
215
|
target_branch_name=registry.default_branch,
|
|
157
216
|
)
|
|
158
217
|
if ipam_node_details:
|
|
159
|
-
await
|
|
218
|
+
await workflow.submit_workflow(
|
|
160
219
|
workflow=IPAM_RECONCILIATION,
|
|
161
220
|
context=context,
|
|
162
221
|
parameters={"branch": obj.name, "ipam_node_details": ipam_node_details},
|
|
163
222
|
)
|
|
164
223
|
|
|
165
|
-
await
|
|
166
|
-
|
|
167
|
-
|
|
224
|
+
await migrate_branch(branch=branch, context=context, send_events=send_events)
|
|
225
|
+
await workflow.submit_workflow(workflow=DIFF_REFRESH_ALL, context=context, parameters={"branch_name": obj.name})
|
|
226
|
+
|
|
227
|
+
if not send_events:
|
|
228
|
+
return
|
|
168
229
|
|
|
169
230
|
# -------------------------------------------------------------
|
|
170
231
|
# Generate an event to indicate that a branch has been rebased
|
|
@@ -37,6 +37,7 @@ RESERVED_ATTR_REL_NAMES = [
|
|
|
37
37
|
"rels",
|
|
38
38
|
"save",
|
|
39
39
|
"hfid",
|
|
40
|
+
"process_pools",
|
|
40
41
|
]
|
|
41
42
|
|
|
42
43
|
RESERVED_ATTR_GEN_NAMES = ["type"]
|
|
@@ -50,6 +51,7 @@ class EventType(InfrahubStringEnum):
|
|
|
50
51
|
BRANCH_CREATED = f"{EVENT_NAMESPACE}.branch.created"
|
|
51
52
|
BRANCH_DELETED = f"{EVENT_NAMESPACE}.branch.deleted"
|
|
52
53
|
BRANCH_MERGED = f"{EVENT_NAMESPACE}.branch.merged"
|
|
54
|
+
BRANCH_MIGRATED = f"{EVENT_NAMESPACE}.branch.migrated"
|
|
53
55
|
BRANCH_REBASED = f"{EVENT_NAMESPACE}.branch.rebased"
|
|
54
56
|
|
|
55
57
|
SCHEMA_UPDATED = f"{EVENT_NAMESPACE}.schema.updated"
|
|
@@ -99,6 +101,7 @@ class GlobalPermissions(InfrahubStringEnum):
|
|
|
99
101
|
MANAGE_PERMISSIONS = "manage_permissions"
|
|
100
102
|
MANAGE_REPOSITORIES = "manage_repositories"
|
|
101
103
|
OVERRIDE_CONTEXT = "override_context"
|
|
104
|
+
UPDATE_OBJECT_HFID_DISPLAY_LABEL = "update_object_hfid_display_label"
|
|
102
105
|
|
|
103
106
|
|
|
104
107
|
class PermissionAction(InfrahubStringEnum):
|
infrahub/core/graph/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
GRAPH_VERSION =
|
|
1
|
+
GRAPH_VERSION = 43
|
infrahub/core/initialization.py
CHANGED
|
@@ -50,7 +50,7 @@ async def get_root_node(db: InfrahubDatabase, initialize: bool = False) -> Root:
|
|
|
50
50
|
roots = await Root.get_list(db=db)
|
|
51
51
|
if len(roots) == 0 and not initialize:
|
|
52
52
|
raise DatabaseError(
|
|
53
|
-
"The Database hasn't been initialized for Infrahub, please
|
|
53
|
+
"The Database hasn't been initialized for Infrahub, please 'infrahub server start' to initialize the database."
|
|
54
54
|
)
|
|
55
55
|
|
|
56
56
|
if len(roots) == 0:
|
|
@@ -137,7 +137,8 @@ async def add_indexes(db: InfrahubDatabase) -> None:
|
|
|
137
137
|
await index_manager.add()
|
|
138
138
|
|
|
139
139
|
|
|
140
|
-
async def initialization(db: InfrahubDatabase, add_database_indexes: bool = False) ->
|
|
140
|
+
async def initialization(db: InfrahubDatabase, add_database_indexes: bool = False) -> bool:
|
|
141
|
+
"""Run initialization and setup, returns a boolean to indicate if it's the initial setup."""
|
|
141
142
|
if config.SETTINGS.database.db_type == config.DatabaseType.MEMGRAPH:
|
|
142
143
|
session = await db.session()
|
|
143
144
|
await session.run(query="SET DATABASE SETTING 'log.level' TO 'INFO'")
|
|
@@ -148,6 +149,7 @@ async def initialization(db: InfrahubDatabase, add_database_indexes: bool = Fals
|
|
|
148
149
|
# Initialize the database and Load the Root node
|
|
149
150
|
# ---------------------------------------------------
|
|
150
151
|
async with lock.registry.initialization():
|
|
152
|
+
first_time_initialization = len(await Root.get_list(db=db)) == 0
|
|
151
153
|
log.debug("Checking Root Node")
|
|
152
154
|
await initialize_registry(db=db, initialize=True)
|
|
153
155
|
|
|
@@ -210,6 +212,7 @@ async def initialization(db: InfrahubDatabase, add_database_indexes: bool = Fals
|
|
|
210
212
|
ip_namespace = await get_default_ipnamespace(db=db)
|
|
211
213
|
if ip_namespace:
|
|
212
214
|
registry.default_ipnamespace = ip_namespace.id
|
|
215
|
+
return first_time_initialization
|
|
213
216
|
|
|
214
217
|
|
|
215
218
|
async def create_root_node(db: InfrahubDatabase) -> Root:
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from .schema.attribute_kind_update import AttributeKindUpdateMigration
|
|
2
2
|
from .schema.attribute_name_update import AttributeNameUpdateMigration
|
|
3
|
+
from .schema.attribute_supports_profile import AttributeSupportsProfileUpdateMigration
|
|
3
4
|
from .schema.node_attribute_add import NodeAttributeAddMigration
|
|
4
5
|
from .schema.node_attribute_remove import NodeAttributeRemoveMigration
|
|
5
6
|
from .schema.node_kind_update import NodeKindUpdateMigration
|
|
@@ -19,6 +20,8 @@ MIGRATION_MAP: dict[str, type[SchemaMigration] | None] = {
|
|
|
19
20
|
"attribute.name.update": AttributeNameUpdateMigration,
|
|
20
21
|
"attribute.branch.update": None,
|
|
21
22
|
"attribute.kind.update": AttributeKindUpdateMigration,
|
|
23
|
+
"attribute.optional.update": AttributeSupportsProfileUpdateMigration,
|
|
24
|
+
"attribute.read_only.update": AttributeSupportsProfileUpdateMigration,
|
|
22
25
|
"relationship.branch.update": None,
|
|
23
26
|
"relationship.direction.update": None,
|
|
24
27
|
"relationship.identifier.update": PlaceholderDummyMigration,
|
|
@@ -41,16 +41,16 @@ from .m036_drop_attr_value_index import Migration036
|
|
|
41
41
|
from .m037_index_attr_vals import Migration037
|
|
42
42
|
from .m038_redo_0000_prefix_fix import Migration038
|
|
43
43
|
from .m039_ipam_reconcile import Migration039
|
|
44
|
-
from .
|
|
45
|
-
from .
|
|
46
|
-
from .
|
|
44
|
+
from .m040_duplicated_attributes import Migration040
|
|
45
|
+
from .m041_profile_attrs_in_db import Migration041
|
|
46
|
+
from .m042_create_hfid_display_label_in_db import Migration042
|
|
47
|
+
from .m043_backfill_hfid_display_label_in_db import Migration043
|
|
47
48
|
|
|
48
49
|
if TYPE_CHECKING:
|
|
49
|
-
from
|
|
50
|
+
from ..shared import MigrationTypes
|
|
50
51
|
|
|
51
|
-
from ..shared import ArbitraryMigration, GraphMigration, InternalSchemaMigration
|
|
52
52
|
|
|
53
|
-
MIGRATIONS: list[type[
|
|
53
|
+
MIGRATIONS: list[type[MigrationTypes]] = [
|
|
54
54
|
Migration001,
|
|
55
55
|
Migration002,
|
|
56
56
|
Migration003,
|
|
@@ -93,25 +93,22 @@ MIGRATIONS: list[type[GraphMigration | InternalSchemaMigration | ArbitraryMigrat
|
|
|
93
93
|
Migration040,
|
|
94
94
|
Migration041,
|
|
95
95
|
Migration042,
|
|
96
|
+
Migration043,
|
|
96
97
|
]
|
|
97
98
|
|
|
98
99
|
|
|
99
|
-
async def get_graph_migrations(
|
|
100
|
-
root: Root,
|
|
101
|
-
) -> Sequence[GraphMigration | InternalSchemaMigration | ArbitraryMigration]:
|
|
100
|
+
async def get_graph_migrations(current_graph_version: int) -> Sequence[MigrationTypes]:
|
|
102
101
|
applicable_migrations = []
|
|
103
102
|
for migration_class in MIGRATIONS:
|
|
104
103
|
migration = migration_class.init()
|
|
105
|
-
if
|
|
104
|
+
if current_graph_version > migration.minimum_version:
|
|
106
105
|
continue
|
|
107
106
|
applicable_migrations.append(migration)
|
|
108
107
|
|
|
109
108
|
return applicable_migrations
|
|
110
109
|
|
|
111
110
|
|
|
112
|
-
def get_migration_by_number(
|
|
113
|
-
migration_number: int | str,
|
|
114
|
-
) -> GraphMigration | InternalSchemaMigration | ArbitraryMigration:
|
|
111
|
+
def get_migration_by_number(migration_number: int | str) -> MigrationTypes:
|
|
115
112
|
# Convert to string and pad with zeros if needed
|
|
116
113
|
try:
|
|
117
114
|
num = int(migration_number)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
from infrahub.core import registry
|
|
2
|
+
from infrahub.core.branch import Branch
|
|
3
|
+
from infrahub.core.schema import SchemaRoot, internal_schema
|
|
4
|
+
from infrahub.core.schema.manager import SchemaManager
|
|
5
|
+
from infrahub.core.schema.schema_branch import SchemaBranch
|
|
6
|
+
from infrahub.database import InfrahubDatabase
|
|
7
|
+
from infrahub.exceptions import InitializationError
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def get_or_load_schema_branch(db: InfrahubDatabase, branch: Branch) -> SchemaBranch:
|
|
11
|
+
try:
|
|
12
|
+
if registry.schema.has_schema_branch(branch.name):
|
|
13
|
+
return registry.schema.get_schema_branch(branch.name)
|
|
14
|
+
except InitializationError:
|
|
15
|
+
schema_manager = SchemaManager()
|
|
16
|
+
registry.schema = schema_manager
|
|
17
|
+
internal_schema_root = SchemaRoot(**internal_schema)
|
|
18
|
+
registry.schema.register_schema(schema=internal_schema_root)
|
|
19
|
+
schema_branch = await registry.schema.load_schema_from_db(db=db, branch=branch)
|
|
20
|
+
registry.schema.set_schema_branch(name=branch.name, schema=schema_branch)
|
|
21
|
+
return schema_branch
|
|
@@ -286,7 +286,7 @@ class Migration013AddInternalStatusData(AttributeAddQuery):
|
|
|
286
286
|
kwargs.pop("branch", None)
|
|
287
287
|
|
|
288
288
|
super().__init__(
|
|
289
|
-
|
|
289
|
+
node_kinds=["CoreGenericRepository"],
|
|
290
290
|
attribute_name="internal_status",
|
|
291
291
|
attribute_kind="Dropdown",
|
|
292
292
|
branch_support=BranchSupportType.LOCAL.value,
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Sequence
|
|
4
|
+
|
|
5
|
+
from infrahub.core.migrations.shared import MigrationResult
|
|
6
|
+
from infrahub.core.query import Query, QueryType
|
|
7
|
+
|
|
8
|
+
from ..shared import GraphMigration
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from infrahub.database import InfrahubDatabase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class DeleteDuplicatedAttributesQuery(Query):
|
|
15
|
+
name: str = "delete_duplicated_attributes"
|
|
16
|
+
type: QueryType = QueryType.WRITE
|
|
17
|
+
insert_return: bool = False
|
|
18
|
+
insert_limit: bool = False
|
|
19
|
+
|
|
20
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
|
|
21
|
+
query = """
|
|
22
|
+
// -------------
|
|
23
|
+
// get all the Nodes linked to multiple Attributes with the same name to drastically reduce the search space
|
|
24
|
+
// -------------
|
|
25
|
+
MATCH (n:Node)-[:HAS_ATTRIBUTE]->(attr:Attribute)
|
|
26
|
+
WITH DISTINCT n, attr
|
|
27
|
+
WITH n, attr.name AS attr_name, count(*) AS num_attrs
|
|
28
|
+
WHERE num_attrs > 1
|
|
29
|
+
// -------------
|
|
30
|
+
// for each Node-attr_name pair, get the possible duplicate Attributes
|
|
31
|
+
// -------------
|
|
32
|
+
MATCH (n)-[:HAS_ATTRIBUTE]->(dup_attr:Attribute {name: attr_name})
|
|
33
|
+
WITH DISTINCT n, dup_attr
|
|
34
|
+
// -------------
|
|
35
|
+
// get the branch(es) for each possible duplicate Attribute
|
|
36
|
+
// -------------
|
|
37
|
+
CALL (n, dup_attr) {
|
|
38
|
+
MATCH (n)-[r:HAS_ATTRIBUTE {status: "active"}]->(dup_attr)
|
|
39
|
+
WHERE r.to IS NULL
|
|
40
|
+
AND NOT exists((n)-[:HAS_ATTRIBUTE {status: "deleted", branch: r.branch}]->(dup_attr))
|
|
41
|
+
RETURN r.branch AS branch
|
|
42
|
+
}
|
|
43
|
+
// -------------
|
|
44
|
+
// get the latest update time for each duplicate Attribute on each branch
|
|
45
|
+
// -------------
|
|
46
|
+
CALL (dup_attr, branch) {
|
|
47
|
+
MATCH (dup_attr)-[r {branch: branch}]-()
|
|
48
|
+
RETURN max(r.from) AS latest_update
|
|
49
|
+
}
|
|
50
|
+
// -------------
|
|
51
|
+
// order the duplicate Attributes by latest update time
|
|
52
|
+
// -------------
|
|
53
|
+
WITH n, dup_attr, branch, latest_update
|
|
54
|
+
ORDER BY n, branch, dup_attr.name, latest_update DESC
|
|
55
|
+
// -------------
|
|
56
|
+
// for any Node-dup_attr_name pairs with multiple duplicate Attributes, keep the Attribute with the latest update
|
|
57
|
+
// on this branch and delete all the other edges on this branch for this Attribute
|
|
58
|
+
// -------------
|
|
59
|
+
WITH n, branch, dup_attr.name AS dup_attr_name, collect(dup_attr) AS dup_attrs_reverse_chronological
|
|
60
|
+
WHERE size(dup_attrs_reverse_chronological) > 1
|
|
61
|
+
WITH branch, tail(dup_attrs_reverse_chronological) AS dup_attrs_to_delete
|
|
62
|
+
UNWIND dup_attrs_to_delete AS dup_attr_to_delete
|
|
63
|
+
MATCH (dup_attr_to_delete)-[r {branch: branch}]-()
|
|
64
|
+
DELETE r
|
|
65
|
+
// -------------
|
|
66
|
+
// delete any orphaned Attributes
|
|
67
|
+
// -------------
|
|
68
|
+
WITH DISTINCT dup_attr_to_delete
|
|
69
|
+
WHERE NOT exists((dup_attr_to_delete)--())
|
|
70
|
+
DELETE dup_attr_to_delete
|
|
71
|
+
"""
|
|
72
|
+
self.add_to_query(query)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class Migration040(GraphMigration):
|
|
76
|
+
name: str = "040_duplicated_attributes"
|
|
77
|
+
queries: Sequence[type[Query]] = [DeleteDuplicatedAttributesQuery]
|
|
78
|
+
minimum_version: int = 39
|
|
79
|
+
|
|
80
|
+
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
81
|
+
return MigrationResult()
|
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any
|
|
4
|
+
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from rich.progress import Progress
|
|
7
|
+
|
|
8
|
+
from infrahub.core.branch.models import Branch
|
|
9
|
+
from infrahub.core.initialization import get_root_node
|
|
10
|
+
from infrahub.core.manager import NodeManager
|
|
11
|
+
from infrahub.core.migrations.shared import MigrationResult
|
|
12
|
+
from infrahub.core.query import Query, QueryType
|
|
13
|
+
from infrahub.core.timestamp import Timestamp
|
|
14
|
+
from infrahub.log import get_logger
|
|
15
|
+
from infrahub.profiles.node_applier import NodeProfilesApplier
|
|
16
|
+
|
|
17
|
+
from ..shared import MigrationRequiringRebase
|
|
18
|
+
from .load_schema_branch import get_or_load_schema_branch
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from infrahub.database import InfrahubDatabase
|
|
22
|
+
|
|
23
|
+
log = get_logger()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class GetUpdatedProfilesForBranchQuery(Query):
|
|
27
|
+
"""
|
|
28
|
+
Get CoreProfile UUIDs with updated attributes on this branch
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
name = "get_profiles_by_branch"
|
|
32
|
+
type = QueryType.READ
|
|
33
|
+
|
|
34
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
|
|
35
|
+
self.params["branch"] = self.branch.name
|
|
36
|
+
query = """
|
|
37
|
+
MATCH (profile:CoreProfile)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[e:HAS_VALUE]->(:AttributeValue)
|
|
38
|
+
WHERE e.branch = $branch
|
|
39
|
+
WITH DISTINCT profile.uuid AS profile_uuid
|
|
40
|
+
"""
|
|
41
|
+
self.add_to_query(query)
|
|
42
|
+
self.return_labels = ["profile_uuid"]
|
|
43
|
+
|
|
44
|
+
def get_profile_ids(self) -> list[str]:
|
|
45
|
+
"""Get list of updated profile UUIDs"""
|
|
46
|
+
return [result.get_as_type("profile_uuid", str) for result in self.get_results()]
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class GetNodesWithProfileUpdatesForBranchQuery(Query):
|
|
50
|
+
"""
|
|
51
|
+
Get Node UUIDs by which branches they have updated profiles on
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
name = "get_nodes_with_profile_updates_by_branch"
|
|
55
|
+
type = QueryType.READ
|
|
56
|
+
|
|
57
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
|
|
58
|
+
self.params["branch"] = self.branch.name
|
|
59
|
+
query = """
|
|
60
|
+
MATCH (node:Node)-[e:IS_RELATED]->(:Relationship {name: "node__profile"})
|
|
61
|
+
WHERE NOT node:CoreProfile
|
|
62
|
+
AND e.branch = $branch
|
|
63
|
+
WITH DISTINCT node.uuid AS node_uuid
|
|
64
|
+
"""
|
|
65
|
+
self.add_to_query(query)
|
|
66
|
+
self.return_labels = ["node_uuid"]
|
|
67
|
+
|
|
68
|
+
def get_node_ids(self) -> list[str]:
|
|
69
|
+
"""Get list of updated node UUIDs"""
|
|
70
|
+
return [result.get_as_type("node_uuid", str) for result in self.get_results()]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class Migration041(MigrationRequiringRebase):
|
|
74
|
+
"""
|
|
75
|
+
Save profile attribute values on each node using the profile in the database
|
|
76
|
+
For any profile that has updates on a given branch (including default branch)
|
|
77
|
+
- run NodeProfilesApplier.apply_profiles on each node related to the profile on that branch
|
|
78
|
+
For any node that has an updated relationship to a profile on a given branch
|
|
79
|
+
- run NodeProfilesApplier.apply_profiles on the node on that branch
|
|
80
|
+
"""
|
|
81
|
+
|
|
82
|
+
name: str = "041_profile_attrs_in_db"
|
|
83
|
+
minimum_version: int = 40
|
|
84
|
+
|
|
85
|
+
def _get_profile_applier(self, db: InfrahubDatabase, branch: Branch) -> NodeProfilesApplier:
|
|
86
|
+
return NodeProfilesApplier(db=db, branch=branch)
|
|
87
|
+
|
|
88
|
+
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
89
|
+
return MigrationResult()
|
|
90
|
+
|
|
91
|
+
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
92
|
+
root_node = await get_root_node(db=db, initialize=False)
|
|
93
|
+
default_branch_name = root_node.default_branch
|
|
94
|
+
default_branch = await Branch.get_by_name(db=db, name=default_branch_name)
|
|
95
|
+
return await self._do_execute_for_branch(db=db, branch=default_branch)
|
|
96
|
+
|
|
97
|
+
async def execute_against_branch(self, db: InfrahubDatabase, branch: Branch) -> MigrationResult:
|
|
98
|
+
return await self._do_execute_for_branch(db=db, branch=branch)
|
|
99
|
+
|
|
100
|
+
async def _do_execute_for_branch(self, db: InfrahubDatabase, branch: Branch) -> MigrationResult:
|
|
101
|
+
console = Console()
|
|
102
|
+
result = MigrationResult()
|
|
103
|
+
await get_or_load_schema_branch(db=db, branch=branch)
|
|
104
|
+
|
|
105
|
+
console.print(f"Gathering profiles for branch {branch.name}...", end="")
|
|
106
|
+
get_updated_profiles_for_branch_query = await GetUpdatedProfilesForBranchQuery.init(db=db, branch=branch)
|
|
107
|
+
await get_updated_profiles_for_branch_query.execute(db=db)
|
|
108
|
+
profile_ids = get_updated_profiles_for_branch_query.get_profile_ids()
|
|
109
|
+
|
|
110
|
+
profiles_map = await NodeManager.get_many(db=db, branch=branch, ids=list(profile_ids))
|
|
111
|
+
console.print("done")
|
|
112
|
+
|
|
113
|
+
node_ids_to_update: set[str] = set()
|
|
114
|
+
with Progress() as progress:
|
|
115
|
+
gather_nodes_task = progress.add_task(
|
|
116
|
+
f"Gathering affected objects for each profile on branch {branch.name}...", total=len(profiles_map)
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
for profile in profiles_map.values():
|
|
120
|
+
node_relationship_manager = profile.get_relationship("related_nodes")
|
|
121
|
+
node_peers = await node_relationship_manager.get_db_peers(db=db)
|
|
122
|
+
node_ids_to_update.update(str(peer.peer_id) for peer in node_peers)
|
|
123
|
+
progress.update(gather_nodes_task, advance=1)
|
|
124
|
+
|
|
125
|
+
console.print("Identifying nodes with profile updates by branch...", end="")
|
|
126
|
+
get_nodes_with_profile_updates_by_branch_query = await GetNodesWithProfileUpdatesForBranchQuery.init(
|
|
127
|
+
db=db, branch=branch
|
|
128
|
+
)
|
|
129
|
+
await get_nodes_with_profile_updates_by_branch_query.execute(db=db)
|
|
130
|
+
node_ids_to_update.update(get_nodes_with_profile_updates_by_branch_query.get_node_ids())
|
|
131
|
+
console.print("done")
|
|
132
|
+
|
|
133
|
+
right_now = Timestamp()
|
|
134
|
+
with Progress() as progress:
|
|
135
|
+
apply_task = progress.add_task("Applying profiles to nodes...", total=len(node_ids_to_update))
|
|
136
|
+
applier = self._get_profile_applier(db=db, branch=branch)
|
|
137
|
+
for node_id in node_ids_to_update:
|
|
138
|
+
node = await NodeManager.get_one(db=db, branch=branch, id=node_id, at=right_now)
|
|
139
|
+
if node:
|
|
140
|
+
updated_field_names = await applier.apply_profiles(node=node)
|
|
141
|
+
if updated_field_names:
|
|
142
|
+
await node.save(db=db, fields=updated_field_names, at=right_now)
|
|
143
|
+
progress.update(apply_task, advance=1)
|
|
144
|
+
|
|
145
|
+
return result
|