infrahub-server 1.2.6__py3-none-any.whl → 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/transformation.py +1 -0
- infrahub/artifacts/models.py +4 -0
- infrahub/cli/db.py +3 -1
- infrahub/cli/patch.py +153 -0
- infrahub/computed_attribute/models.py +81 -1
- infrahub/computed_attribute/tasks.py +35 -53
- infrahub/config.py +2 -1
- infrahub/constants/__init__.py +0 -0
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/graph/index.py +3 -1
- infrahub/core/manager.py +16 -5
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +7 -8
- infrahub/core/node/__init__.py +4 -1
- infrahub/core/protocols.py +1 -0
- infrahub/core/query/ipam.py +7 -5
- infrahub/core/query/node.py +96 -29
- infrahub/core/schema/definitions/core/builtin.py +2 -4
- infrahub/core/schema/definitions/core/transform.py +1 -0
- infrahub/core/validators/aggregated_checker.py +2 -2
- infrahub/core/validators/uniqueness/query.py +8 -3
- infrahub/database/__init__.py +2 -10
- infrahub/database/index.py +1 -1
- infrahub/database/memgraph.py +2 -1
- infrahub/database/neo4j.py +1 -1
- infrahub/git/integrator.py +27 -3
- infrahub/git/models.py +4 -0
- infrahub/git/tasks.py +3 -0
- infrahub/git_credential/helper.py +2 -2
- infrahub/message_bus/operations/requests/proposed_change.py +6 -0
- infrahub/message_bus/types.py +3 -0
- infrahub/patch/__init__.py +0 -0
- infrahub/patch/constants.py +13 -0
- infrahub/patch/edge_adder.py +64 -0
- infrahub/patch/edge_deleter.py +33 -0
- infrahub/patch/edge_updater.py +28 -0
- infrahub/patch/models.py +98 -0
- infrahub/patch/plan_reader.py +107 -0
- infrahub/patch/plan_writer.py +92 -0
- infrahub/patch/queries/__init__.py +0 -0
- infrahub/patch/queries/base.py +17 -0
- infrahub/patch/queries/consolidate_duplicated_nodes.py +109 -0
- infrahub/patch/queries/delete_duplicated_edges.py +138 -0
- infrahub/patch/runner.py +254 -0
- infrahub/patch/vertex_adder.py +61 -0
- infrahub/patch/vertex_deleter.py +33 -0
- infrahub/patch/vertex_updater.py +28 -0
- infrahub/proposed_change/tasks.py +1 -0
- infrahub/server.py +3 -1
- infrahub/transformations/models.py +3 -0
- infrahub/transformations/tasks.py +1 -0
- infrahub/webhook/models.py +3 -0
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +4 -4
- infrahub_sdk/config.py +17 -0
- infrahub_sdk/ctl/cli_commands.py +9 -3
- infrahub_sdk/ctl/generator.py +2 -2
- infrahub_sdk/ctl/menu.py +56 -13
- infrahub_sdk/ctl/object.py +55 -5
- infrahub_sdk/ctl/utils.py +22 -1
- infrahub_sdk/exceptions.py +19 -1
- infrahub_sdk/generator.py +12 -66
- infrahub_sdk/node.py +42 -26
- infrahub_sdk/operation.py +80 -0
- infrahub_sdk/protocols.py +12 -0
- infrahub_sdk/protocols_generator/__init__.py +0 -0
- infrahub_sdk/protocols_generator/constants.py +28 -0
- infrahub_sdk/{code_generator.py → protocols_generator/generator.py} +47 -34
- infrahub_sdk/protocols_generator/template.j2 +114 -0
- infrahub_sdk/recorder.py +3 -0
- infrahub_sdk/schema/__init__.py +110 -74
- infrahub_sdk/schema/main.py +36 -2
- infrahub_sdk/schema/repository.py +6 -0
- infrahub_sdk/spec/menu.py +3 -3
- infrahub_sdk/spec/object.py +522 -41
- infrahub_sdk/testing/docker.py +4 -5
- infrahub_sdk/testing/schemas/animal.py +7 -0
- infrahub_sdk/transforms.py +15 -27
- infrahub_sdk/yaml.py +63 -7
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/METADATA +2 -2
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/RECORD +85 -64
- infrahub_testcontainers/docker-compose.test.yml +2 -0
- infrahub_sdk/ctl/constants.py +0 -115
- /infrahub/{database/constants.py → constants/database.py} +0 -0
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from dataclasses import asdict
|
|
2
|
+
|
|
3
|
+
from infrahub.core.query import QueryType
|
|
4
|
+
from infrahub.database import InfrahubDatabase
|
|
5
|
+
|
|
6
|
+
from .models import EdgeToUpdate
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PatchPlanEdgeUpdater:
|
|
10
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
11
|
+
self.db = db
|
|
12
|
+
self.batch_size_limit = batch_size_limit
|
|
13
|
+
|
|
14
|
+
async def _run_update_query(self, edges_to_update: list[EdgeToUpdate]) -> None:
|
|
15
|
+
query = """
|
|
16
|
+
UNWIND $edges_to_update AS edge_to_update
|
|
17
|
+
MATCH ()-[e]-()
|
|
18
|
+
WHERE %(id_func_name)s(e) = edge_to_update.db_id
|
|
19
|
+
SET e = edge_to_update.after_props
|
|
20
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
21
|
+
await self.db.execute_query(
|
|
22
|
+
query=query, params={"edges_to_update": [asdict(e) for e in edges_to_update]}, type=QueryType.WRITE
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
async def execute(self, edges_to_update: list[EdgeToUpdate]) -> None:
|
|
26
|
+
for i in range(0, len(edges_to_update), self.batch_size_limit):
|
|
27
|
+
vertices_slice = edges_to_update[i : i + self.batch_size_limit]
|
|
28
|
+
await self._run_update_query(edges_to_update=vertices_slice)
|
infrahub/patch/models.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from uuid import uuid4
|
|
3
|
+
|
|
4
|
+
PropertyPrimitives = str | bool | int | float | None
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def str_uuid() -> str:
|
|
8
|
+
return str(uuid4())
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass
|
|
12
|
+
class VertexToAdd:
|
|
13
|
+
labels: list[str]
|
|
14
|
+
after_props: dict[str, PropertyPrimitives]
|
|
15
|
+
identifier: str = field(default_factory=str_uuid)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class VertexToUpdate:
|
|
20
|
+
db_id: str
|
|
21
|
+
before_props: dict[str, PropertyPrimitives]
|
|
22
|
+
after_props: dict[str, PropertyPrimitives]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class VertexToDelete:
|
|
27
|
+
db_id: str
|
|
28
|
+
labels: list[str]
|
|
29
|
+
before_props: dict[str, PropertyPrimitives]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class EdgeToAdd:
|
|
34
|
+
from_id: str
|
|
35
|
+
to_id: str
|
|
36
|
+
edge_type: str
|
|
37
|
+
after_props: dict[str, PropertyPrimitives]
|
|
38
|
+
identifier: str = field(default_factory=str_uuid)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class EdgeToUpdate:
|
|
43
|
+
db_id: str
|
|
44
|
+
before_props: dict[str, PropertyPrimitives]
|
|
45
|
+
after_props: dict[str, PropertyPrimitives]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class EdgeToDelete:
|
|
50
|
+
db_id: str
|
|
51
|
+
from_id: str
|
|
52
|
+
to_id: str
|
|
53
|
+
edge_type: str
|
|
54
|
+
before_props: dict[str, PropertyPrimitives]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class PatchPlan:
|
|
59
|
+
name: str
|
|
60
|
+
vertices_to_add: list[VertexToAdd] = field(default_factory=list)
|
|
61
|
+
vertices_to_update: list[VertexToUpdate] = field(default_factory=list)
|
|
62
|
+
vertices_to_delete: list[VertexToDelete] = field(default_factory=list)
|
|
63
|
+
edges_to_add: list[EdgeToAdd] = field(default_factory=list)
|
|
64
|
+
edges_to_update: list[EdgeToUpdate] = field(default_factory=list)
|
|
65
|
+
edges_to_delete: list[EdgeToDelete] = field(default_factory=list)
|
|
66
|
+
added_element_db_id_map: dict[str, str] = field(default_factory=dict)
|
|
67
|
+
deleted_db_ids: set[str] = field(default_factory=set)
|
|
68
|
+
reverted_deleted_db_id_map: dict[str, str] = field(default_factory=dict)
|
|
69
|
+
|
|
70
|
+
def get_database_id_for_added_element(self, abstract_id: str) -> str:
|
|
71
|
+
return self.added_element_db_id_map.get(abstract_id, abstract_id)
|
|
72
|
+
|
|
73
|
+
def has_element_been_added(self, identifier: str) -> bool:
|
|
74
|
+
return identifier in self.added_element_db_id_map
|
|
75
|
+
|
|
76
|
+
@property
|
|
77
|
+
def added_vertices(self) -> list[VertexToAdd]:
|
|
78
|
+
return [v for v in self.vertices_to_add if self.has_element_been_added(v.identifier)]
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def added_edges(self) -> list[EdgeToAdd]:
|
|
82
|
+
return [e for e in self.edges_to_add if self.has_element_been_added(e.identifier)]
|
|
83
|
+
|
|
84
|
+
@property
|
|
85
|
+
def deleted_vertices(self) -> list[VertexToDelete]:
|
|
86
|
+
return [v for v in self.vertices_to_delete if v.db_id in self.deleted_db_ids]
|
|
87
|
+
|
|
88
|
+
@property
|
|
89
|
+
def deleted_edges(self) -> list[EdgeToDelete]:
|
|
90
|
+
return [e for e in self.edges_to_delete if e.db_id in self.deleted_db_ids]
|
|
91
|
+
|
|
92
|
+
def drop_added_db_ids(self, db_ids_to_drop: set[str]) -> None:
|
|
93
|
+
self.added_element_db_id_map = {
|
|
94
|
+
k: v for k, v in self.added_element_db_id_map.items() if v not in db_ids_to_drop
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
def drop_deleted_db_ids(self, db_ids_to_drop: set[str]) -> None:
|
|
98
|
+
self.deleted_db_ids -= db_ids_to_drop
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Generator
|
|
4
|
+
|
|
5
|
+
from .constants import PatchPlanFilename
|
|
6
|
+
from .models import EdgeToAdd, EdgeToDelete, EdgeToUpdate, PatchPlan, VertexToAdd, VertexToDelete, VertexToUpdate
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PatchPlanReader:
|
|
10
|
+
def read(self, patch_plan_directory: Path) -> PatchPlan:
|
|
11
|
+
vertices_to_add = self._read_vertices_to_add(patch_plan_directory=patch_plan_directory)
|
|
12
|
+
vertices_to_delete = self._read_vertices_to_delete(patch_plan_directory=patch_plan_directory)
|
|
13
|
+
vertices_to_update = self._read_vertices_to_update(patch_plan_directory=patch_plan_directory)
|
|
14
|
+
edges_to_add = self._read_edges_to_add(patch_plan_directory=patch_plan_directory)
|
|
15
|
+
edges_to_delete = self._read_edges_to_delete(patch_plan_directory=patch_plan_directory)
|
|
16
|
+
edges_to_update = self._read_edges_to_update(patch_plan_directory=patch_plan_directory)
|
|
17
|
+
added_node_db_id_map = self._read_added_node_db_id_map(patch_plan_directory=patch_plan_directory)
|
|
18
|
+
deleted_db_ids = self._read_deleted_db_ids(patch_plan_directory=patch_plan_directory)
|
|
19
|
+
reverted_deleted_db_id_map = self._read_reverted_deleted_db_id_map(patch_plan_directory=patch_plan_directory)
|
|
20
|
+
|
|
21
|
+
return PatchPlan(
|
|
22
|
+
name="none",
|
|
23
|
+
vertices_to_add=vertices_to_add,
|
|
24
|
+
vertices_to_delete=vertices_to_delete,
|
|
25
|
+
vertices_to_update=vertices_to_update,
|
|
26
|
+
edges_to_add=edges_to_add,
|
|
27
|
+
edges_to_delete=edges_to_delete,
|
|
28
|
+
edges_to_update=edges_to_update,
|
|
29
|
+
added_element_db_id_map=added_node_db_id_map or {},
|
|
30
|
+
deleted_db_ids=deleted_db_ids or set(),
|
|
31
|
+
reverted_deleted_db_id_map=reverted_deleted_db_id_map or {},
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
def _read_file_lines(self, patch_file: Path) -> Generator[str | None, None, None]:
|
|
35
|
+
if not patch_file.exists():
|
|
36
|
+
return
|
|
37
|
+
with patch_file.open() as f:
|
|
38
|
+
yield from f
|
|
39
|
+
|
|
40
|
+
def _read_vertices_to_add(self, patch_plan_directory: Path) -> list[VertexToAdd]:
|
|
41
|
+
file = patch_plan_directory / Path(PatchPlanFilename.VERTICES_TO_ADD.value)
|
|
42
|
+
vertices_to_add: list[VertexToAdd] = []
|
|
43
|
+
for raw_line in self._read_file_lines(patch_file=file):
|
|
44
|
+
if raw_line:
|
|
45
|
+
vertices_to_add.append(VertexToAdd(**json.loads(raw_line)))
|
|
46
|
+
return vertices_to_add
|
|
47
|
+
|
|
48
|
+
def _read_vertices_to_update(self, patch_plan_directory: Path) -> list[VertexToUpdate]:
|
|
49
|
+
file = patch_plan_directory / Path(PatchPlanFilename.VERTICES_TO_UPDATE.value)
|
|
50
|
+
vertices_to_update: list[VertexToUpdate] = []
|
|
51
|
+
for raw_line in self._read_file_lines(patch_file=file):
|
|
52
|
+
if raw_line:
|
|
53
|
+
vertices_to_update.append(VertexToUpdate(**json.loads(raw_line)))
|
|
54
|
+
return vertices_to_update
|
|
55
|
+
|
|
56
|
+
def _read_vertices_to_delete(self, patch_plan_directory: Path) -> list[VertexToDelete]:
|
|
57
|
+
file = patch_plan_directory / Path(PatchPlanFilename.VERTICES_TO_DELETE.value)
|
|
58
|
+
vertices_to_delete: list[VertexToDelete] = []
|
|
59
|
+
for raw_line in self._read_file_lines(patch_file=file):
|
|
60
|
+
if raw_line:
|
|
61
|
+
vertices_to_delete.append(VertexToDelete(**json.loads(raw_line)))
|
|
62
|
+
return vertices_to_delete
|
|
63
|
+
|
|
64
|
+
def _read_edges_to_add(self, patch_plan_directory: Path) -> list[EdgeToAdd]:
|
|
65
|
+
file = patch_plan_directory / Path(PatchPlanFilename.EDGES_TO_ADD.value)
|
|
66
|
+
edges_to_add: list[EdgeToAdd] = []
|
|
67
|
+
for raw_line in self._read_file_lines(patch_file=file):
|
|
68
|
+
if raw_line:
|
|
69
|
+
edges_to_add.append(EdgeToAdd(**json.loads(raw_line)))
|
|
70
|
+
return edges_to_add
|
|
71
|
+
|
|
72
|
+
def _read_edges_to_delete(self, patch_plan_directory: Path) -> list[EdgeToDelete]:
|
|
73
|
+
file = patch_plan_directory / Path(PatchPlanFilename.EDGES_TO_DELETE.value)
|
|
74
|
+
edges_to_delete: list[EdgeToDelete] = []
|
|
75
|
+
for raw_line in self._read_file_lines(patch_file=file):
|
|
76
|
+
if raw_line:
|
|
77
|
+
edges_to_delete.append(EdgeToDelete(**json.loads(raw_line)))
|
|
78
|
+
return edges_to_delete
|
|
79
|
+
|
|
80
|
+
def _read_edges_to_update(self, patch_plan_directory: Path) -> list[EdgeToUpdate]:
|
|
81
|
+
file = patch_plan_directory / Path(PatchPlanFilename.EDGES_TO_UPDATE.value)
|
|
82
|
+
edges_to_update: list[EdgeToUpdate] = []
|
|
83
|
+
for raw_line in self._read_file_lines(patch_file=file):
|
|
84
|
+
if raw_line:
|
|
85
|
+
edges_to_update.append(EdgeToUpdate(**json.loads(raw_line)))
|
|
86
|
+
return edges_to_update
|
|
87
|
+
|
|
88
|
+
def _read_added_node_db_id_map(self, patch_plan_directory: Path) -> dict[str, str] | None:
|
|
89
|
+
file = patch_plan_directory / Path(PatchPlanFilename.ADDED_DB_IDS.value)
|
|
90
|
+
if not file.exists():
|
|
91
|
+
return None
|
|
92
|
+
added_db_id_json = file.read_text()
|
|
93
|
+
return json.loads(added_db_id_json)
|
|
94
|
+
|
|
95
|
+
def _read_deleted_db_ids(self, patch_plan_directory: Path) -> set[str] | None:
|
|
96
|
+
file = patch_plan_directory / Path(PatchPlanFilename.DELETED_DB_IDS.value)
|
|
97
|
+
if not file.exists():
|
|
98
|
+
return None
|
|
99
|
+
deleted_db_ids_json = file.read_text()
|
|
100
|
+
return set(json.loads(deleted_db_ids_json))
|
|
101
|
+
|
|
102
|
+
def _read_reverted_deleted_db_id_map(self, patch_plan_directory: Path) -> dict[str, str] | None:
|
|
103
|
+
file = patch_plan_directory / Path(PatchPlanFilename.REVERTED_DELETED_DB_IDS.value)
|
|
104
|
+
if not file.exists():
|
|
105
|
+
return None
|
|
106
|
+
reverted_deleted_db_id_json = file.read_text()
|
|
107
|
+
return json.loads(reverted_deleted_db_id_json)
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
import json
|
|
2
|
+
from dataclasses import asdict
|
|
3
|
+
from datetime import datetime, timezone
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from .constants import PatchPlanFilename
|
|
8
|
+
from .models import EdgeToAdd, EdgeToDelete, EdgeToUpdate, PatchPlan, VertexToAdd, VertexToDelete, VertexToUpdate
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PatchPlanWriter:
|
|
12
|
+
def write(self, patches_directory: Path, patch_plan: PatchPlan) -> Path:
|
|
13
|
+
timestamp_str = datetime.now(tz=timezone.utc).strftime("%Y%m%d-%H%M%S")
|
|
14
|
+
patch_name = f"patch-{patch_plan.name}-{timestamp_str}"
|
|
15
|
+
patch_plan_directory = patches_directory / Path(patch_name)
|
|
16
|
+
if not patch_plan_directory.exists():
|
|
17
|
+
patch_plan_directory.mkdir(parents=True)
|
|
18
|
+
if patch_plan.vertices_to_add:
|
|
19
|
+
self._write_vertices_to_add(
|
|
20
|
+
patch_plan_directory=patch_plan_directory, vertices_to_add=patch_plan.vertices_to_add
|
|
21
|
+
)
|
|
22
|
+
if patch_plan.vertices_to_delete:
|
|
23
|
+
self._write_vertices_to_delete(
|
|
24
|
+
patch_plan_directory=patch_plan_directory, vertices_to_delete=patch_plan.vertices_to_delete
|
|
25
|
+
)
|
|
26
|
+
if patch_plan.vertices_to_update:
|
|
27
|
+
self._write_vertices_to_update(
|
|
28
|
+
patch_plan_directory=patch_plan_directory, vertices_to_update=patch_plan.vertices_to_update
|
|
29
|
+
)
|
|
30
|
+
if patch_plan.edges_to_add:
|
|
31
|
+
self._write_edges_to_add(patch_plan_directory=patch_plan_directory, edges_to_add=patch_plan.edges_to_add)
|
|
32
|
+
if patch_plan.edges_to_delete:
|
|
33
|
+
self._write_edges_to_delete(
|
|
34
|
+
patch_plan_directory=patch_plan_directory, edges_to_delete=patch_plan.edges_to_delete
|
|
35
|
+
)
|
|
36
|
+
if patch_plan.edges_to_update:
|
|
37
|
+
self._write_edges_to_update(
|
|
38
|
+
patch_plan_directory=patch_plan_directory, edges_to_update=patch_plan.edges_to_update
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
return patch_plan_directory
|
|
42
|
+
|
|
43
|
+
def write_added_db_id_map(self, patch_plan_directory: Path, db_id_map: dict[str, str]) -> None:
|
|
44
|
+
file = patch_plan_directory / Path(PatchPlanFilename.ADDED_DB_IDS.value)
|
|
45
|
+
file.touch(exist_ok=True)
|
|
46
|
+
with file.open(mode="w") as f:
|
|
47
|
+
f.write(json.dumps(db_id_map) + "\n")
|
|
48
|
+
|
|
49
|
+
def write_deleted_db_ids(self, patch_plan_directory: Path, deleted_ids: set[str]) -> None:
|
|
50
|
+
file = patch_plan_directory / Path(PatchPlanFilename.DELETED_DB_IDS.value)
|
|
51
|
+
file.touch(exist_ok=True)
|
|
52
|
+
with file.open(mode="w") as f:
|
|
53
|
+
f.write(json.dumps(list(deleted_ids)) + "\n")
|
|
54
|
+
|
|
55
|
+
def write_reverted_deleted_db_id_map(self, patch_plan_directory: Path, db_id_map: dict[str, str]) -> None:
|
|
56
|
+
file = patch_plan_directory / Path(PatchPlanFilename.REVERTED_DELETED_DB_IDS.value)
|
|
57
|
+
file.touch(exist_ok=True)
|
|
58
|
+
with file.open(mode="w") as f:
|
|
59
|
+
f.write(json.dumps(db_id_map) + "\n")
|
|
60
|
+
|
|
61
|
+
def _dataclass_to_json_line(self, dataclass_instance: Any) -> str:
|
|
62
|
+
return json.dumps(asdict(dataclass_instance)) + "\n"
|
|
63
|
+
|
|
64
|
+
def _write_to_file(self, file_path: Path, objects: list[Any]) -> None:
|
|
65
|
+
file_path.touch(exist_ok=True)
|
|
66
|
+
with file_path.open(mode="w") as f:
|
|
67
|
+
for obj in objects:
|
|
68
|
+
f.write(self._dataclass_to_json_line(obj))
|
|
69
|
+
|
|
70
|
+
def _write_vertices_to_add(self, patch_plan_directory: Path, vertices_to_add: list[VertexToAdd]) -> None:
|
|
71
|
+
file = patch_plan_directory / Path(PatchPlanFilename.VERTICES_TO_ADD.value)
|
|
72
|
+
self._write_to_file(file_path=file, objects=vertices_to_add)
|
|
73
|
+
|
|
74
|
+
def _write_vertices_to_delete(self, patch_plan_directory: Path, vertices_to_delete: list[VertexToDelete]) -> None:
|
|
75
|
+
file = patch_plan_directory / Path(PatchPlanFilename.VERTICES_TO_DELETE.value)
|
|
76
|
+
self._write_to_file(file_path=file, objects=vertices_to_delete)
|
|
77
|
+
|
|
78
|
+
def _write_vertices_to_update(self, patch_plan_directory: Path, vertices_to_update: list[VertexToUpdate]) -> None:
|
|
79
|
+
file = patch_plan_directory / Path(PatchPlanFilename.VERTICES_TO_UPDATE.value)
|
|
80
|
+
self._write_to_file(file_path=file, objects=vertices_to_update)
|
|
81
|
+
|
|
82
|
+
def _write_edges_to_add(self, patch_plan_directory: Path, edges_to_add: list[EdgeToAdd]) -> None:
|
|
83
|
+
file = patch_plan_directory / Path(PatchPlanFilename.EDGES_TO_ADD.value)
|
|
84
|
+
self._write_to_file(file_path=file, objects=edges_to_add)
|
|
85
|
+
|
|
86
|
+
def _write_edges_to_delete(self, patch_plan_directory: Path, edges_to_delete: list[EdgeToDelete]) -> None:
|
|
87
|
+
file = patch_plan_directory / Path(PatchPlanFilename.EDGES_TO_DELETE.value)
|
|
88
|
+
self._write_to_file(file_path=file, objects=edges_to_delete)
|
|
89
|
+
|
|
90
|
+
def _write_edges_to_update(self, patch_plan_directory: Path, edges_to_update: list[EdgeToUpdate]) -> None:
|
|
91
|
+
file = patch_plan_directory / Path(PatchPlanFilename.EDGES_TO_UPDATE.value)
|
|
92
|
+
self._write_to_file(file_path=file, objects=edges_to_update)
|
|
File without changes
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
from infrahub.database import InfrahubDatabase
|
|
4
|
+
|
|
5
|
+
from ..models import PatchPlan
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class PatchQuery(ABC):
|
|
9
|
+
def __init__(self, db: InfrahubDatabase):
|
|
10
|
+
self.db = db
|
|
11
|
+
|
|
12
|
+
@abstractmethod
|
|
13
|
+
async def plan(self) -> PatchPlan: ...
|
|
14
|
+
|
|
15
|
+
@property
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def name(self) -> str: ...
|
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
from ..models import EdgeToAdd, EdgeToDelete, PatchPlan, VertexToDelete
|
|
2
|
+
from .base import PatchQuery
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class ConsolidateDuplicatedNodesPatchQuery(PatchQuery):
|
|
6
|
+
"""
|
|
7
|
+
Find any groups of nodes with the same labels and properties, move all the edges to one of the duplicated nodes,
|
|
8
|
+
then delete the other duplicated nodes
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
@property
|
|
12
|
+
def name(self) -> str:
|
|
13
|
+
return "consolidate-duplicated-nodes"
|
|
14
|
+
|
|
15
|
+
async def plan(self) -> PatchPlan:
|
|
16
|
+
query = """
|
|
17
|
+
//------------
|
|
18
|
+
// Find nodes with the same labels and UUID
|
|
19
|
+
//------------
|
|
20
|
+
MATCH (n:Node)
|
|
21
|
+
WITH n.uuid AS node_uuid, count(*) as num_nodes_with_uuid
|
|
22
|
+
WHERE num_nodes_with_uuid > 1
|
|
23
|
+
WITH DISTINCT node_uuid
|
|
24
|
+
MATCH (n:Node {uuid: node_uuid})
|
|
25
|
+
CALL {
|
|
26
|
+
WITH n
|
|
27
|
+
WITH labels(n) AS n_labels
|
|
28
|
+
UNWIND n_labels AS n_label
|
|
29
|
+
WITH n_label
|
|
30
|
+
ORDER BY n_label ASC
|
|
31
|
+
RETURN collect(n_label) AS sorted_labels
|
|
32
|
+
}
|
|
33
|
+
WITH n.uuid AS n_uuid, sorted_labels, collect(n) AS duplicate_nodes
|
|
34
|
+
WHERE size(duplicate_nodes) > 1
|
|
35
|
+
WITH n_uuid, head(duplicate_nodes) AS node_to_keep, tail(duplicate_nodes) AS nodes_to_delete
|
|
36
|
+
UNWIND nodes_to_delete AS node_to_delete
|
|
37
|
+
//------------
|
|
38
|
+
// Find the edges that we need to move to the selected node_to_keep
|
|
39
|
+
//------------
|
|
40
|
+
CALL {
|
|
41
|
+
WITH node_to_keep, node_to_delete
|
|
42
|
+
MATCH (node_to_delete)-[edge_to_delete]->(peer)
|
|
43
|
+
RETURN {
|
|
44
|
+
from_id: %(id_func_name)s(node_to_keep),
|
|
45
|
+
to_id: %(id_func_name)s(peer),
|
|
46
|
+
edge_type: type(edge_to_delete),
|
|
47
|
+
after_props: properties(edge_to_delete)
|
|
48
|
+
} AS edge_to_create
|
|
49
|
+
UNION
|
|
50
|
+
WITH node_to_keep, node_to_delete
|
|
51
|
+
MATCH (node_to_delete)<-[edge_to_delete]-(peer)
|
|
52
|
+
RETURN {
|
|
53
|
+
from_id: %(id_func_name)s(peer),
|
|
54
|
+
to_id: %(id_func_name)s(node_to_keep),
|
|
55
|
+
edge_type: type(edge_to_delete),
|
|
56
|
+
after_props: properties(edge_to_delete)
|
|
57
|
+
} AS edge_to_create
|
|
58
|
+
}
|
|
59
|
+
WITH node_to_delete, collect(edge_to_create) AS edges_to_create
|
|
60
|
+
//------------
|
|
61
|
+
// Find the edges that we need to remove from the duplicated nodes
|
|
62
|
+
//------------
|
|
63
|
+
CALL {
|
|
64
|
+
WITH node_to_delete
|
|
65
|
+
MATCH (node_to_delete)-[e]->(peer)
|
|
66
|
+
RETURN {
|
|
67
|
+
db_id: %(id_func_name)s(e),
|
|
68
|
+
from_id: %(id_func_name)s(node_to_delete),
|
|
69
|
+
to_id: %(id_func_name)s(peer),
|
|
70
|
+
edge_type: type(e),
|
|
71
|
+
before_props: properties(e)
|
|
72
|
+
} AS edge_to_delete
|
|
73
|
+
UNION
|
|
74
|
+
WITH node_to_delete
|
|
75
|
+
MATCH (node_to_delete)<-[e]-(peer)
|
|
76
|
+
RETURN {
|
|
77
|
+
db_id: %(id_func_name)s(e),
|
|
78
|
+
from_id: %(id_func_name)s(peer),
|
|
79
|
+
to_id: %(id_func_name)s(node_to_delete),
|
|
80
|
+
edge_type: type(e),
|
|
81
|
+
before_props: properties(e)
|
|
82
|
+
} AS edge_to_delete
|
|
83
|
+
}
|
|
84
|
+
WITH node_to_delete, edges_to_create, collect(edge_to_delete) AS edges_to_delete
|
|
85
|
+
RETURN
|
|
86
|
+
{db_id: %(id_func_name)s(node_to_delete), labels: labels(node_to_delete), before_props: properties(node_to_delete)} AS vertex_to_delete,
|
|
87
|
+
edges_to_create,
|
|
88
|
+
edges_to_delete
|
|
89
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
90
|
+
results = await self.db.execute_query(query=query)
|
|
91
|
+
vertices_to_delete: list[VertexToDelete] = []
|
|
92
|
+
edges_to_delete: list[EdgeToDelete] = []
|
|
93
|
+
edges_to_add: list[EdgeToAdd] = []
|
|
94
|
+
for result in results:
|
|
95
|
+
serial_vertex_to_delete = result.get("vertex_to_delete")
|
|
96
|
+
if serial_vertex_to_delete:
|
|
97
|
+
vertex_to_delete = VertexToDelete(**serial_vertex_to_delete)
|
|
98
|
+
vertices_to_delete.append(vertex_to_delete)
|
|
99
|
+
for serial_edge_to_delete in result.get("edges_to_delete"):
|
|
100
|
+
edge_to_delete = EdgeToDelete(**serial_edge_to_delete)
|
|
101
|
+
edges_to_delete.append(edge_to_delete)
|
|
102
|
+
for serial_edge_to_create in result.get("edges_to_create"):
|
|
103
|
+
edges_to_add.append(EdgeToAdd(**serial_edge_to_create))
|
|
104
|
+
return PatchPlan(
|
|
105
|
+
name=self.name,
|
|
106
|
+
vertices_to_delete=vertices_to_delete,
|
|
107
|
+
edges_to_add=edges_to_add,
|
|
108
|
+
edges_to_delete=edges_to_delete,
|
|
109
|
+
)
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
from ..models import EdgeToDelete, EdgeToUpdate, PatchPlan
|
|
2
|
+
from .base import PatchQuery
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DeleteDuplicatedEdgesPatchQuery(PatchQuery):
|
|
6
|
+
"""
|
|
7
|
+
Find duplicated or overlapping edges of the same status, type, and branch to update and delete
|
|
8
|
+
- one edge will be kept for each pair of nodes and a given status, type, and branch. it will be
|
|
9
|
+
updated to have the earliest "from" and "to" times in this group
|
|
10
|
+
- all the other duplicate/overlapping edges will be deleted
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
@property
|
|
14
|
+
def name(self) -> str:
|
|
15
|
+
return "delete-duplicated-edges"
|
|
16
|
+
|
|
17
|
+
async def plan(self) -> PatchPlan:
|
|
18
|
+
query = """
|
|
19
|
+
// ------------
|
|
20
|
+
// Find node pairs that have duplicate edges
|
|
21
|
+
// ------------
|
|
22
|
+
MATCH (node_with_dup_edges:Node)-[edge]->(peer)
|
|
23
|
+
WITH node_with_dup_edges, type(edge) AS edge_type, edge.status AS edge_status, edge.branch AS edge_branch, peer, count(*) AS num_dup_edges
|
|
24
|
+
WHERE num_dup_edges > 1
|
|
25
|
+
WITH DISTINCT node_with_dup_edges, edge_type, edge_branch, peer
|
|
26
|
+
CALL {
|
|
27
|
+
// ------------
|
|
28
|
+
// Get the earliest active and deleted edges for this branch
|
|
29
|
+
// ------------
|
|
30
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer
|
|
31
|
+
MATCH (node_with_dup_edges)-[active_edge {branch: edge_branch, status: "active"}]->(peer)
|
|
32
|
+
WHERE type(active_edge) = edge_type
|
|
33
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, active_edge
|
|
34
|
+
ORDER BY active_edge.from ASC
|
|
35
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, head(collect(active_edge.from)) AS active_from
|
|
36
|
+
OPTIONAL MATCH (node_with_dup_edges)-[deleted_edge {branch: edge_branch, status: "deleted"}]->(peer)
|
|
37
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, active_from, deleted_edge
|
|
38
|
+
ORDER BY deleted_edge.from ASC
|
|
39
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, active_from, head(collect(deleted_edge.from)) AS deleted_from
|
|
40
|
+
// ------------
|
|
41
|
+
// Plan one active edge update with correct from and to times
|
|
42
|
+
// ------------
|
|
43
|
+
CALL {
|
|
44
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, active_from, deleted_from
|
|
45
|
+
MATCH (node_with_dup_edges)-[active_e {branch: edge_branch, status: "active"}]->(peer)
|
|
46
|
+
WHERE type(active_e) = edge_type
|
|
47
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, active_from, deleted_from, active_e
|
|
48
|
+
ORDER BY %(id_func_name)s(active_e)
|
|
49
|
+
LIMIT 1
|
|
50
|
+
WITH active_e, properties(active_e) AS before_props, {from: active_from, to: deleted_from} AS prop_updates
|
|
51
|
+
RETURN [
|
|
52
|
+
{
|
|
53
|
+
db_id: %(id_func_name)s(active_e), before_props: before_props, prop_updates: prop_updates
|
|
54
|
+
}
|
|
55
|
+
] AS active_edges_to_update
|
|
56
|
+
}
|
|
57
|
+
// ------------
|
|
58
|
+
// Plan deletes for all the other active edges of this type on this branch
|
|
59
|
+
// ------------
|
|
60
|
+
CALL {
|
|
61
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer
|
|
62
|
+
MATCH (node_with_dup_edges)-[active_e {branch: edge_branch, status: "active"}]->(peer)
|
|
63
|
+
WHERE type(active_e) = edge_type
|
|
64
|
+
WITH node_with_dup_edges, peer, active_e
|
|
65
|
+
ORDER BY %(id_func_name)s(active_e)
|
|
66
|
+
SKIP 1
|
|
67
|
+
RETURN collect(
|
|
68
|
+
{
|
|
69
|
+
db_id: %(id_func_name)s(active_e),
|
|
70
|
+
from_id: %(id_func_name)s(node_with_dup_edges),
|
|
71
|
+
to_id: %(id_func_name)s(peer),
|
|
72
|
+
edge_type: type(active_e),
|
|
73
|
+
before_props: properties(active_e)
|
|
74
|
+
}
|
|
75
|
+
) AS active_edges_to_delete
|
|
76
|
+
}
|
|
77
|
+
// ------------
|
|
78
|
+
// Plan one deleted edge update with correct from time
|
|
79
|
+
// ------------
|
|
80
|
+
CALL {
|
|
81
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, deleted_from
|
|
82
|
+
MATCH (node_with_dup_edges)-[deleted_e {branch: edge_branch, status: "deleted"}]->(peer)
|
|
83
|
+
WHERE type(deleted_e) = edge_type
|
|
84
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer, deleted_from, deleted_e
|
|
85
|
+
ORDER BY %(id_func_name)s(deleted_e)
|
|
86
|
+
LIMIT 1
|
|
87
|
+
WITH deleted_e, properties(deleted_e) AS before_props, {from: deleted_from} AS prop_updates
|
|
88
|
+
RETURN [
|
|
89
|
+
{
|
|
90
|
+
db_id: %(id_func_name)s(deleted_e), before_props: before_props, prop_updates: prop_updates
|
|
91
|
+
}
|
|
92
|
+
] AS deleted_edges_to_update
|
|
93
|
+
}
|
|
94
|
+
// ------------
|
|
95
|
+
// Plan deletes for all the other deleted edges of this type on this branch
|
|
96
|
+
// ------------
|
|
97
|
+
CALL {
|
|
98
|
+
WITH node_with_dup_edges, edge_type, edge_branch, peer
|
|
99
|
+
MATCH (node_with_dup_edges)-[deleted_e {branch: edge_branch, status: "deleted"}]->(peer)
|
|
100
|
+
WHERE type(deleted_e) = edge_type
|
|
101
|
+
WITH node_with_dup_edges, peer, deleted_e
|
|
102
|
+
ORDER BY %(id_func_name)s(deleted_e)
|
|
103
|
+
SKIP 1
|
|
104
|
+
RETURN collect(
|
|
105
|
+
{
|
|
106
|
+
db_id: %(id_func_name)s(deleted_e),
|
|
107
|
+
from_id: %(id_func_name)s(node_with_dup_edges),
|
|
108
|
+
to_id: %(id_func_name)s(peer),
|
|
109
|
+
edge_type: type(deleted_e),
|
|
110
|
+
before_props: properties(deleted_e)
|
|
111
|
+
}
|
|
112
|
+
) AS deleted_edges_to_delete
|
|
113
|
+
}
|
|
114
|
+
RETURN
|
|
115
|
+
active_edges_to_update + deleted_edges_to_update AS edges_to_update,
|
|
116
|
+
active_edges_to_delete + deleted_edges_to_delete AS edges_to_delete
|
|
117
|
+
}
|
|
118
|
+
RETURN edges_to_update, edges_to_delete
|
|
119
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
120
|
+
results = await self.db.execute_query(query=query)
|
|
121
|
+
edges_to_delete: list[EdgeToDelete] = []
|
|
122
|
+
edges_to_update: list[EdgeToUpdate] = []
|
|
123
|
+
for result in results:
|
|
124
|
+
for serial_edge_to_delete in result.get("edges_to_delete"):
|
|
125
|
+
edge_to_delete = EdgeToDelete(**serial_edge_to_delete)
|
|
126
|
+
edges_to_delete.append(edge_to_delete)
|
|
127
|
+
for serial_edge_to_update in result.get("edges_to_update"):
|
|
128
|
+
prop_updates = serial_edge_to_update["prop_updates"]
|
|
129
|
+
if prop_updates:
|
|
130
|
+
serial_edge_to_update["after_props"] = serial_edge_to_update["before_props"] | prop_updates
|
|
131
|
+
del serial_edge_to_update["prop_updates"]
|
|
132
|
+
edge_to_update = EdgeToUpdate(**serial_edge_to_update)
|
|
133
|
+
edges_to_update.append(edge_to_update)
|
|
134
|
+
return PatchPlan(
|
|
135
|
+
name=self.name,
|
|
136
|
+
edges_to_delete=edges_to_delete,
|
|
137
|
+
edges_to_update=edges_to_update,
|
|
138
|
+
)
|