infrahub-server 1.2.5__py3-none-any.whl → 1.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/cli/db.py +2 -0
- infrahub/cli/patch.py +153 -0
- infrahub/computed_attribute/models.py +81 -1
- infrahub/computed_attribute/tasks.py +34 -53
- infrahub/core/manager.py +15 -2
- infrahub/core/node/__init__.py +4 -1
- infrahub/core/query/ipam.py +7 -5
- infrahub/core/registry.py +2 -3
- infrahub/core/schema/schema_branch.py +34 -37
- infrahub/database/__init__.py +2 -0
- infrahub/graphql/manager.py +10 -0
- infrahub/graphql/mutations/main.py +4 -5
- infrahub/graphql/mutations/resource_manager.py +3 -3
- infrahub/patch/__init__.py +0 -0
- infrahub/patch/constants.py +13 -0
- infrahub/patch/edge_adder.py +64 -0
- infrahub/patch/edge_deleter.py +33 -0
- infrahub/patch/edge_updater.py +28 -0
- infrahub/patch/models.py +98 -0
- infrahub/patch/plan_reader.py +107 -0
- infrahub/patch/plan_writer.py +92 -0
- infrahub/patch/queries/__init__.py +0 -0
- infrahub/patch/queries/base.py +17 -0
- infrahub/patch/runner.py +254 -0
- infrahub/patch/vertex_adder.py +61 -0
- infrahub/patch/vertex_deleter.py +33 -0
- infrahub/patch/vertex_updater.py +28 -0
- infrahub/tasks/registry.py +4 -1
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/ctl/cli_commands.py +2 -2
- infrahub_sdk/ctl/menu.py +56 -13
- infrahub_sdk/ctl/object.py +55 -5
- infrahub_sdk/ctl/utils.py +22 -1
- infrahub_sdk/exceptions.py +19 -1
- infrahub_sdk/node.py +42 -26
- infrahub_sdk/protocols_generator/__init__.py +0 -0
- infrahub_sdk/protocols_generator/constants.py +28 -0
- infrahub_sdk/{code_generator.py → protocols_generator/generator.py} +47 -34
- infrahub_sdk/protocols_generator/template.j2 +114 -0
- infrahub_sdk/schema/__init__.py +110 -74
- infrahub_sdk/schema/main.py +36 -2
- infrahub_sdk/schema/repository.py +2 -0
- infrahub_sdk/spec/menu.py +3 -3
- infrahub_sdk/spec/object.py +522 -41
- infrahub_sdk/testing/docker.py +4 -5
- infrahub_sdk/testing/schemas/animal.py +7 -0
- infrahub_sdk/yaml.py +63 -7
- {infrahub_server-1.2.5.dist-info → infrahub_server-1.2.7.dist-info}/METADATA +1 -1
- {infrahub_server-1.2.5.dist-info → infrahub_server-1.2.7.dist-info}/RECORD +56 -39
- infrahub_testcontainers/container.py +52 -2
- infrahub_testcontainers/docker-compose.test.yml +27 -0
- infrahub_testcontainers/performance_test.py +1 -1
- infrahub_testcontainers/plugin.py +1 -1
- infrahub_sdk/ctl/constants.py +0 -115
- {infrahub_server-1.2.5.dist-info → infrahub_server-1.2.7.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.5.dist-info → infrahub_server-1.2.7.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.5.dist-info → infrahub_server-1.2.7.dist-info}/entry_points.txt +0 -0
infrahub/patch/runner.py
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from .edge_adder import PatchPlanEdgeAdder
|
|
4
|
+
from .edge_deleter import PatchPlanEdgeDeleter
|
|
5
|
+
from .edge_updater import PatchPlanEdgeUpdater
|
|
6
|
+
from .models import EdgeToAdd, EdgeToDelete, EdgeToUpdate, PatchPlan, VertexToAdd, VertexToDelete, VertexToUpdate
|
|
7
|
+
from .plan_reader import PatchPlanReader
|
|
8
|
+
from .plan_writer import PatchPlanWriter
|
|
9
|
+
from .queries.base import PatchQuery
|
|
10
|
+
from .vertex_adder import PatchPlanVertexAdder
|
|
11
|
+
from .vertex_deleter import PatchPlanVertexDeleter
|
|
12
|
+
from .vertex_updater import PatchPlanVertexUpdater
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class PatchPlanEdgeDbIdTranslator:
|
|
16
|
+
def translate_to_db_ids(self, patch_plan: PatchPlan) -> None:
|
|
17
|
+
for edge_to_add in patch_plan.edges_to_add:
|
|
18
|
+
translated_from_id = patch_plan.get_database_id_for_added_element(abstract_id=edge_to_add.from_id)
|
|
19
|
+
edge_to_add.from_id = translated_from_id
|
|
20
|
+
translated_to_id = patch_plan.get_database_id_for_added_element(abstract_id=edge_to_add.to_id)
|
|
21
|
+
edge_to_add.to_id = translated_to_id
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class PatchRunner:
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
plan_writer: PatchPlanWriter,
|
|
28
|
+
plan_reader: PatchPlanReader,
|
|
29
|
+
edge_db_id_translator: PatchPlanEdgeDbIdTranslator,
|
|
30
|
+
vertex_adder: PatchPlanVertexAdder,
|
|
31
|
+
vertex_updater: PatchPlanVertexUpdater,
|
|
32
|
+
vertex_deleter: PatchPlanVertexDeleter,
|
|
33
|
+
edge_adder: PatchPlanEdgeAdder,
|
|
34
|
+
edge_updater: PatchPlanEdgeUpdater,
|
|
35
|
+
edge_deleter: PatchPlanEdgeDeleter,
|
|
36
|
+
) -> None:
|
|
37
|
+
self.plan_writer = plan_writer
|
|
38
|
+
self.plan_reader = plan_reader
|
|
39
|
+
self.edge_db_id_translator = edge_db_id_translator
|
|
40
|
+
self.vertex_adder = vertex_adder
|
|
41
|
+
self.vertex_updater = vertex_updater
|
|
42
|
+
self.vertex_deleter = vertex_deleter
|
|
43
|
+
self.edge_adder = edge_adder
|
|
44
|
+
self.edge_updater = edge_updater
|
|
45
|
+
self.edge_deleter = edge_deleter
|
|
46
|
+
|
|
47
|
+
async def prepare_plan(self, patch_query: PatchQuery, directory: Path) -> Path:
|
|
48
|
+
patch_plan = await patch_query.plan()
|
|
49
|
+
return self.plan_writer.write(patches_directory=directory, patch_plan=patch_plan)
|
|
50
|
+
|
|
51
|
+
async def apply(self, patch_plan_directory: Path) -> PatchPlan:
|
|
52
|
+
patch_plan = self.plan_reader.read(patch_plan_directory)
|
|
53
|
+
await self._apply_vertices_to_add(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
54
|
+
await self._apply_edges_to_add(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
55
|
+
if patch_plan.vertices_to_update:
|
|
56
|
+
await self.vertex_updater.execute(vertices_to_update=patch_plan.vertices_to_update)
|
|
57
|
+
await self._apply_edges_to_delete(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
58
|
+
await self._apply_vertices_to_delete(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
59
|
+
if patch_plan.edges_to_update:
|
|
60
|
+
await self.edge_updater.execute(edges_to_update=patch_plan.edges_to_update)
|
|
61
|
+
return patch_plan
|
|
62
|
+
|
|
63
|
+
async def _apply_vertices_to_add(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
64
|
+
if not patch_plan.vertices_to_add:
|
|
65
|
+
return
|
|
66
|
+
unadded_vertices = [
|
|
67
|
+
v for v in patch_plan.vertices_to_add if not patch_plan.has_element_been_added(v.identifier)
|
|
68
|
+
]
|
|
69
|
+
try:
|
|
70
|
+
async for added_element_id_map in self.vertex_adder.execute(vertices_to_add=unadded_vertices):
|
|
71
|
+
patch_plan.added_element_db_id_map.update(added_element_id_map)
|
|
72
|
+
finally:
|
|
73
|
+
# record the added elements so that we do not double-add them if the patch is run again
|
|
74
|
+
self.plan_writer.write_added_db_id_map(
|
|
75
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
async def _apply_edges_to_add(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
79
|
+
if not patch_plan.edges_to_add:
|
|
80
|
+
return
|
|
81
|
+
self.edge_db_id_translator.translate_to_db_ids(patch_plan=patch_plan)
|
|
82
|
+
unadded_edges = [e for e in patch_plan.edges_to_add if not patch_plan.has_element_been_added(e.identifier)]
|
|
83
|
+
try:
|
|
84
|
+
async for added_element_id_map in self.edge_adder.execute(edges_to_add=unadded_edges):
|
|
85
|
+
patch_plan.added_element_db_id_map.update(added_element_id_map)
|
|
86
|
+
finally:
|
|
87
|
+
# record the added elements so that we do not double-add them if the patch is run again
|
|
88
|
+
self.plan_writer.write_added_db_id_map(
|
|
89
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
async def _apply_vertices_to_delete(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
93
|
+
if not patch_plan.vertices_to_delete:
|
|
94
|
+
return
|
|
95
|
+
try:
|
|
96
|
+
async for deleted_ids in self.vertex_deleter.execute(vertices_to_delete=patch_plan.vertices_to_delete):
|
|
97
|
+
patch_plan.deleted_db_ids |= deleted_ids
|
|
98
|
+
finally:
|
|
99
|
+
# record the deleted elements so that we know what to add if the patch is reverted
|
|
100
|
+
self.plan_writer.write_deleted_db_ids(
|
|
101
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
async def _apply_edges_to_delete(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
105
|
+
if not patch_plan.edges_to_delete:
|
|
106
|
+
return
|
|
107
|
+
try:
|
|
108
|
+
async for deleted_ids in self.edge_deleter.execute(edges_to_delete=patch_plan.edges_to_delete):
|
|
109
|
+
patch_plan.deleted_db_ids |= deleted_ids
|
|
110
|
+
finally:
|
|
111
|
+
# record the deleted elements so that we know what to add if the patch is reverted
|
|
112
|
+
self.plan_writer.write_deleted_db_ids(
|
|
113
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
async def revert(self, patch_plan_directory: Path) -> PatchPlan:
|
|
117
|
+
"""Invert the PatchPlan to create the complement of every added/updated/deleted element and undo them"""
|
|
118
|
+
patch_plan = self.plan_reader.read(patch_plan_directory)
|
|
119
|
+
await self._revert_deleted_vertices(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
120
|
+
await self._revert_deleted_edges(
|
|
121
|
+
patch_plan=patch_plan,
|
|
122
|
+
patch_plan_directory=patch_plan_directory,
|
|
123
|
+
)
|
|
124
|
+
await self._revert_added_edges(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
125
|
+
await self._revert_added_vertices(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
126
|
+
vertices_to_update = [
|
|
127
|
+
VertexToUpdate(
|
|
128
|
+
db_id=vertex_update_to_revert.db_id,
|
|
129
|
+
before_props=vertex_update_to_revert.after_props,
|
|
130
|
+
after_props=vertex_update_to_revert.before_props,
|
|
131
|
+
)
|
|
132
|
+
for vertex_update_to_revert in patch_plan.vertices_to_update
|
|
133
|
+
]
|
|
134
|
+
if vertices_to_update:
|
|
135
|
+
await self.vertex_updater.execute(vertices_to_update=vertices_to_update)
|
|
136
|
+
|
|
137
|
+
edges_to_update = [
|
|
138
|
+
EdgeToUpdate(
|
|
139
|
+
db_id=edge_update_to_revert.db_id,
|
|
140
|
+
before_props=edge_update_to_revert.after_props,
|
|
141
|
+
after_props=edge_update_to_revert.before_props,
|
|
142
|
+
)
|
|
143
|
+
for edge_update_to_revert in patch_plan.edges_to_update
|
|
144
|
+
]
|
|
145
|
+
if edges_to_update:
|
|
146
|
+
await self.edge_updater.execute(edges_to_update=edges_to_update)
|
|
147
|
+
if patch_plan.reverted_deleted_db_id_map:
|
|
148
|
+
patch_plan.reverted_deleted_db_id_map = {}
|
|
149
|
+
self.plan_writer.write_reverted_deleted_db_id_map(
|
|
150
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.reverted_deleted_db_id_map
|
|
151
|
+
)
|
|
152
|
+
return patch_plan
|
|
153
|
+
|
|
154
|
+
async def _revert_added_vertices(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
155
|
+
vertices_to_delete = [
|
|
156
|
+
VertexToDelete(
|
|
157
|
+
db_id=patch_plan.get_database_id_for_added_element(abstract_id=vertex_add_to_revert.identifier),
|
|
158
|
+
labels=vertex_add_to_revert.labels,
|
|
159
|
+
before_props=vertex_add_to_revert.after_props,
|
|
160
|
+
)
|
|
161
|
+
for vertex_add_to_revert in patch_plan.added_vertices
|
|
162
|
+
]
|
|
163
|
+
if not vertices_to_delete:
|
|
164
|
+
return
|
|
165
|
+
all_deleted_ids: set[str] = set()
|
|
166
|
+
try:
|
|
167
|
+
async for deleted_ids in self.vertex_deleter.execute(vertices_to_delete=vertices_to_delete):
|
|
168
|
+
all_deleted_ids |= deleted_ids
|
|
169
|
+
finally:
|
|
170
|
+
if all_deleted_ids:
|
|
171
|
+
patch_plan.drop_added_db_ids(db_ids_to_drop=all_deleted_ids)
|
|
172
|
+
self.plan_writer.write_added_db_id_map(
|
|
173
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
async def _revert_deleted_vertices(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
177
|
+
vertices_to_add = [
|
|
178
|
+
VertexToAdd(
|
|
179
|
+
labels=vertex_delete_to_revert.labels,
|
|
180
|
+
after_props=vertex_delete_to_revert.before_props,
|
|
181
|
+
identifier=vertex_delete_to_revert.db_id,
|
|
182
|
+
)
|
|
183
|
+
for vertex_delete_to_revert in patch_plan.deleted_vertices
|
|
184
|
+
]
|
|
185
|
+
if not vertices_to_add:
|
|
186
|
+
return
|
|
187
|
+
|
|
188
|
+
deleted_to_undeleted_db_id_map: dict[str, str] = {}
|
|
189
|
+
try:
|
|
190
|
+
async for added_db_id_map in self.vertex_adder.execute(vertices_to_add=vertices_to_add):
|
|
191
|
+
deleted_to_undeleted_db_id_map.update(added_db_id_map)
|
|
192
|
+
finally:
|
|
193
|
+
if deleted_to_undeleted_db_id_map:
|
|
194
|
+
patch_plan.drop_deleted_db_ids(db_ids_to_drop=set(deleted_to_undeleted_db_id_map.keys()))
|
|
195
|
+
self.plan_writer.write_deleted_db_ids(
|
|
196
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
197
|
+
)
|
|
198
|
+
patch_plan.reverted_deleted_db_id_map.update(deleted_to_undeleted_db_id_map)
|
|
199
|
+
self.plan_writer.write_reverted_deleted_db_id_map(
|
|
200
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.reverted_deleted_db_id_map
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
async def _revert_added_edges(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
204
|
+
edges_to_delete = [
|
|
205
|
+
EdgeToDelete(
|
|
206
|
+
db_id=patch_plan.get_database_id_for_added_element(abstract_id=edge_add_to_revert.identifier),
|
|
207
|
+
from_id=edge_add_to_revert.from_id,
|
|
208
|
+
to_id=edge_add_to_revert.to_id,
|
|
209
|
+
edge_type=edge_add_to_revert.edge_type,
|
|
210
|
+
before_props=edge_add_to_revert.after_props,
|
|
211
|
+
)
|
|
212
|
+
for edge_add_to_revert in patch_plan.added_edges
|
|
213
|
+
]
|
|
214
|
+
if not edges_to_delete:
|
|
215
|
+
return
|
|
216
|
+
all_deleted_ids: set[str] = set()
|
|
217
|
+
try:
|
|
218
|
+
async for deleted_ids in self.edge_deleter.execute(edges_to_delete=edges_to_delete):
|
|
219
|
+
all_deleted_ids |= deleted_ids
|
|
220
|
+
finally:
|
|
221
|
+
if all_deleted_ids:
|
|
222
|
+
patch_plan.drop_added_db_ids(db_ids_to_drop=all_deleted_ids)
|
|
223
|
+
self.plan_writer.write_added_db_id_map(
|
|
224
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
async def _revert_deleted_edges(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
228
|
+
edges_to_add = [
|
|
229
|
+
EdgeToAdd(
|
|
230
|
+
identifier=edge_delete_to_revert.db_id,
|
|
231
|
+
from_id=patch_plan.reverted_deleted_db_id_map.get(
|
|
232
|
+
edge_delete_to_revert.from_id, edge_delete_to_revert.from_id
|
|
233
|
+
),
|
|
234
|
+
to_id=patch_plan.reverted_deleted_db_id_map.get(
|
|
235
|
+
edge_delete_to_revert.to_id, edge_delete_to_revert.to_id
|
|
236
|
+
),
|
|
237
|
+
edge_type=edge_delete_to_revert.edge_type,
|
|
238
|
+
after_props=edge_delete_to_revert.before_props,
|
|
239
|
+
)
|
|
240
|
+
for edge_delete_to_revert in patch_plan.deleted_edges
|
|
241
|
+
]
|
|
242
|
+
if not edges_to_add:
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
undeleted_ids: set[str] = set()
|
|
246
|
+
try:
|
|
247
|
+
async for added_db_id_map in self.edge_adder.execute(edges_to_add=edges_to_add):
|
|
248
|
+
undeleted_ids |= set(added_db_id_map.keys())
|
|
249
|
+
finally:
|
|
250
|
+
if undeleted_ids:
|
|
251
|
+
patch_plan.drop_deleted_db_ids(db_ids_to_drop=undeleted_ids)
|
|
252
|
+
self.plan_writer.write_deleted_db_ids(
|
|
253
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
254
|
+
)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from dataclasses import asdict
|
|
3
|
+
from typing import AsyncGenerator
|
|
4
|
+
|
|
5
|
+
from infrahub.core.query import QueryType
|
|
6
|
+
from infrahub.database import InfrahubDatabase
|
|
7
|
+
|
|
8
|
+
from .models import VertexToAdd
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PatchPlanVertexAdder:
|
|
12
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
13
|
+
self.db = db
|
|
14
|
+
self.batch_size_limit = batch_size_limit
|
|
15
|
+
|
|
16
|
+
async def _run_add_query(self, labels: list[str], vertices_to_add: list[VertexToAdd]) -> dict[str, str]:
|
|
17
|
+
labels_str = ":".join(labels)
|
|
18
|
+
serial_vertices_to_add: list[dict[str, str | int | bool]] = [asdict(v) for v in vertices_to_add]
|
|
19
|
+
query = """
|
|
20
|
+
UNWIND $vertices_to_add AS vertex_to_add
|
|
21
|
+
CREATE (v:%(labels)s)
|
|
22
|
+
SET v = vertex_to_add.after_props
|
|
23
|
+
RETURN vertex_to_add.identifier AS abstract_id, %(id_func_name)s(v) AS db_id
|
|
24
|
+
""" % {
|
|
25
|
+
"labels": labels_str,
|
|
26
|
+
"id_func_name": self.db.get_id_function_name(),
|
|
27
|
+
}
|
|
28
|
+
# use transaction to make sure we record the results before committing them
|
|
29
|
+
try:
|
|
30
|
+
txn_db = self.db.start_transaction()
|
|
31
|
+
async with txn_db as txn:
|
|
32
|
+
results = await txn.execute_query(
|
|
33
|
+
query=query, params={"vertices_to_add": serial_vertices_to_add}, type=QueryType.WRITE
|
|
34
|
+
)
|
|
35
|
+
abstract_to_concrete_id_map: dict[str, str] = {}
|
|
36
|
+
for result in results:
|
|
37
|
+
abstract_id = result.get("abstract_id")
|
|
38
|
+
concrete_id = result.get("db_id")
|
|
39
|
+
abstract_to_concrete_id_map[abstract_id] = concrete_id
|
|
40
|
+
finally:
|
|
41
|
+
await txn_db.close()
|
|
42
|
+
return abstract_to_concrete_id_map
|
|
43
|
+
|
|
44
|
+
async def execute(self, vertices_to_add: list[VertexToAdd]) -> AsyncGenerator[dict[str, str], None]:
|
|
45
|
+
"""
|
|
46
|
+
Create vertices_to_add on the database.
|
|
47
|
+
Returns a generator that yields dictionaries mapping VertexToAdd.identifier to the database-level ID of the newly created vertex.
|
|
48
|
+
"""
|
|
49
|
+
vertices_map_queue: dict[frozenset[str], list[VertexToAdd]] = defaultdict(list)
|
|
50
|
+
for vertex_to_add in vertices_to_add:
|
|
51
|
+
frozen_labels = frozenset(vertex_to_add.labels)
|
|
52
|
+
vertices_map_queue[frozen_labels].append(vertex_to_add)
|
|
53
|
+
if len(vertices_map_queue[frozen_labels]) > self.batch_size_limit:
|
|
54
|
+
yield await self._run_add_query(
|
|
55
|
+
labels=list(frozen_labels),
|
|
56
|
+
vertices_to_add=vertices_map_queue[frozen_labels],
|
|
57
|
+
)
|
|
58
|
+
vertices_map_queue[frozen_labels] = []
|
|
59
|
+
|
|
60
|
+
for frozen_labels, vertices_group in vertices_map_queue.items():
|
|
61
|
+
yield await self._run_add_query(labels=list(frozen_labels), vertices_to_add=vertices_group)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from typing import AsyncGenerator
|
|
2
|
+
|
|
3
|
+
from infrahub.core.query import QueryType
|
|
4
|
+
from infrahub.database import InfrahubDatabase
|
|
5
|
+
|
|
6
|
+
from .models import VertexToDelete
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PatchPlanVertexDeleter:
|
|
10
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
11
|
+
self.db = db
|
|
12
|
+
self.batch_size_limit = batch_size_limit
|
|
13
|
+
|
|
14
|
+
async def _run_delete_query(self, ids_to_delete: list[str]) -> set[str]:
|
|
15
|
+
query = """
|
|
16
|
+
MATCH (n)
|
|
17
|
+
WHERE %(id_func_name)s(n) IN $ids_to_delete
|
|
18
|
+
DETACH DELETE n
|
|
19
|
+
RETURN %(id_func_name)s(n) AS deleted_id
|
|
20
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
21
|
+
results = await self.db.execute_query(
|
|
22
|
+
query=query, params={"ids_to_delete": ids_to_delete}, type=QueryType.WRITE
|
|
23
|
+
)
|
|
24
|
+
deleted_ids: set[str] = set()
|
|
25
|
+
for result in results:
|
|
26
|
+
deleted_id = result.get("deleted_id")
|
|
27
|
+
deleted_ids.add(deleted_id)
|
|
28
|
+
return deleted_ids
|
|
29
|
+
|
|
30
|
+
async def execute(self, vertices_to_delete: list[VertexToDelete]) -> AsyncGenerator[set[str], None]:
|
|
31
|
+
for i in range(0, len(vertices_to_delete), self.batch_size_limit):
|
|
32
|
+
ids_to_delete = [v.db_id for v in vertices_to_delete[i : i + self.batch_size_limit]]
|
|
33
|
+
yield await self._run_delete_query(ids_to_delete=ids_to_delete)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from dataclasses import asdict
|
|
2
|
+
|
|
3
|
+
from infrahub.core.query import QueryType
|
|
4
|
+
from infrahub.database import InfrahubDatabase
|
|
5
|
+
|
|
6
|
+
from .models import VertexToUpdate
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PatchPlanVertexUpdater:
|
|
10
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
11
|
+
self.db = db
|
|
12
|
+
self.batch_size_limit = batch_size_limit
|
|
13
|
+
|
|
14
|
+
async def _run_update_query(self, vertices_to_update: list[VertexToUpdate]) -> None:
|
|
15
|
+
query = """
|
|
16
|
+
UNWIND $vertices_to_update AS vertex_to_update
|
|
17
|
+
MATCH (n)
|
|
18
|
+
WHERE %(id_func_name)s(n) = vertex_to_update.db_id
|
|
19
|
+
SET n = vertex_to_update.after_props
|
|
20
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
21
|
+
await self.db.execute_query(
|
|
22
|
+
query=query, params={"vertices_to_update": [asdict(v) for v in vertices_to_update]}, type=QueryType.WRITE
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
async def execute(self, vertices_to_update: list[VertexToUpdate]) -> None:
|
|
26
|
+
for i in range(0, len(vertices_to_update), self.batch_size_limit):
|
|
27
|
+
vertices_slice = vertices_to_update[i : i + self.batch_size_limit]
|
|
28
|
+
await self._run_update_query(vertices_to_update=vertices_slice)
|
infrahub/tasks/registry.py
CHANGED
|
@@ -61,5 +61,8 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
|
|
|
61
61
|
)
|
|
62
62
|
|
|
63
63
|
purged_branches = await registry.purge_inactive_branches(db=db, active_branches=branches)
|
|
64
|
-
|
|
64
|
+
purged_branches.update(
|
|
65
|
+
GraphQLSchemaManager.purge_inactive(active_branches=[branch.name for branch in branches])
|
|
66
|
+
)
|
|
67
|
+
for branch_name in sorted(purged_branches):
|
|
65
68
|
log.info(f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY)
|
infrahub_sdk/checks.py
CHANGED
|
@@ -83,7 +83,7 @@ class InfrahubCheck:
|
|
|
83
83
|
async def init(cls, client: InfrahubClient | None = None, *args: Any, **kwargs: Any) -> InfrahubCheck:
|
|
84
84
|
"""Async init method, If an existing InfrahubClient client hasn't been provided, one will be created automatically."""
|
|
85
85
|
warnings.warn(
|
|
86
|
-
"InfrahubCheck.init has been deprecated and will be removed in
|
|
86
|
+
"InfrahubCheck.init has been deprecated and will be removed in version 2.0.0 of the Infrahub Python SDK",
|
|
87
87
|
DeprecationWarning,
|
|
88
88
|
stacklevel=1,
|
|
89
89
|
)
|
infrahub_sdk/ctl/cli_commands.py
CHANGED
|
@@ -20,7 +20,6 @@ from rich.table import Table
|
|
|
20
20
|
|
|
21
21
|
from .. import __version__ as sdk_version
|
|
22
22
|
from ..async_typer import AsyncTyper
|
|
23
|
-
from ..code_generator import CodeGenerator
|
|
24
23
|
from ..ctl import config
|
|
25
24
|
from ..ctl.branch import app as branch_app
|
|
26
25
|
from ..ctl.check import run as run_check
|
|
@@ -42,6 +41,7 @@ from ..ctl.utils import (
|
|
|
42
41
|
)
|
|
43
42
|
from ..ctl.validate import app as validate_app
|
|
44
43
|
from ..exceptions import GraphQLError, ModuleImportError
|
|
44
|
+
from ..protocols_generator.generator import CodeGenerator
|
|
45
45
|
from ..schema import MainSchemaTypesAll, SchemaRoot
|
|
46
46
|
from ..template import Jinja2Template
|
|
47
47
|
from ..template.exceptions import JinjaTemplateError
|
|
@@ -61,7 +61,7 @@ app.add_typer(schema_app, name="schema")
|
|
|
61
61
|
app.add_typer(validate_app, name="validate")
|
|
62
62
|
app.add_typer(repository_app, name="repository")
|
|
63
63
|
app.add_typer(menu_app, name="menu")
|
|
64
|
-
app.add_typer(object_app, name="object"
|
|
64
|
+
app.add_typer(object_app, name="object")
|
|
65
65
|
|
|
66
66
|
app.command(name="dump")(dump)
|
|
67
67
|
app.command(name="load")(load)
|
infrahub_sdk/ctl/menu.py
CHANGED
|
@@ -7,9 +7,14 @@ from rich.console import Console
|
|
|
7
7
|
from ..async_typer import AsyncTyper
|
|
8
8
|
from ..ctl.client import initialize_client
|
|
9
9
|
from ..ctl.utils import catch_exception, init_logging
|
|
10
|
+
from ..exceptions import ObjectValidationError, ValidationError
|
|
10
11
|
from ..spec.menu import MenuFile
|
|
11
12
|
from .parameters import CONFIG_PARAM
|
|
12
|
-
from .utils import
|
|
13
|
+
from .utils import (
|
|
14
|
+
display_object_validate_format_error,
|
|
15
|
+
display_object_validate_format_success,
|
|
16
|
+
load_yamlfile_from_disk_and_exit,
|
|
17
|
+
)
|
|
13
18
|
|
|
14
19
|
app = AsyncTyper()
|
|
15
20
|
console = Console()
|
|
@@ -39,16 +44,54 @@ async def load(
|
|
|
39
44
|
files = load_yamlfile_from_disk_and_exit(paths=menus, file_type=MenuFile, console=console)
|
|
40
45
|
client = initialize_client()
|
|
41
46
|
|
|
47
|
+
has_errors = False
|
|
48
|
+
|
|
49
|
+
for file in files:
|
|
50
|
+
try:
|
|
51
|
+
await file.validate_format(client=client, branch=branch)
|
|
52
|
+
except ValidationError as exc:
|
|
53
|
+
has_errors = True
|
|
54
|
+
display_object_validate_format_error(file=file, error=exc, console=console)
|
|
55
|
+
|
|
56
|
+
if has_errors:
|
|
57
|
+
raise typer.Exit(1)
|
|
58
|
+
|
|
42
59
|
for file in files:
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
60
|
+
try:
|
|
61
|
+
await file.process(client=client, branch=branch)
|
|
62
|
+
except ObjectValidationError as exc:
|
|
63
|
+
has_errors = True
|
|
64
|
+
console.print(f"[red] {exc!s}")
|
|
65
|
+
|
|
66
|
+
if has_errors:
|
|
67
|
+
raise typer.Exit(1)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@app.command()
|
|
71
|
+
@catch_exception(console=console)
|
|
72
|
+
async def validate(
|
|
73
|
+
paths: list[Path],
|
|
74
|
+
debug: bool = False,
|
|
75
|
+
branch: str = typer.Option(None, help="Branch on which to validate the objects."),
|
|
76
|
+
_: str = CONFIG_PARAM,
|
|
77
|
+
) -> None:
|
|
78
|
+
"""Validate one or multiple menu files."""
|
|
79
|
+
|
|
80
|
+
init_logging(debug=debug)
|
|
81
|
+
|
|
82
|
+
logging.getLogger("infrahub_sdk").setLevel(logging.INFO)
|
|
83
|
+
|
|
84
|
+
files = load_yamlfile_from_disk_and_exit(paths=paths, file_type=MenuFile, console=console)
|
|
85
|
+
client = initialize_client()
|
|
86
|
+
|
|
87
|
+
has_errors = False
|
|
88
|
+
for file in files:
|
|
89
|
+
try:
|
|
90
|
+
await file.validate_format(client=client, branch=branch)
|
|
91
|
+
display_object_validate_format_success(file=file, console=console)
|
|
92
|
+
except ValidationError as exc:
|
|
93
|
+
has_errors = True
|
|
94
|
+
display_object_validate_format_error(file=file, error=exc, console=console)
|
|
95
|
+
|
|
96
|
+
if has_errors:
|
|
97
|
+
raise typer.Exit(1)
|
infrahub_sdk/ctl/object.py
CHANGED
|
@@ -7,9 +7,14 @@ from rich.console import Console
|
|
|
7
7
|
from ..async_typer import AsyncTyper
|
|
8
8
|
from ..ctl.client import initialize_client
|
|
9
9
|
from ..ctl.utils import catch_exception, init_logging
|
|
10
|
+
from ..exceptions import ObjectValidationError, ValidationError
|
|
10
11
|
from ..spec.object import ObjectFile
|
|
11
12
|
from .parameters import CONFIG_PARAM
|
|
12
|
-
from .utils import
|
|
13
|
+
from .utils import (
|
|
14
|
+
display_object_validate_format_error,
|
|
15
|
+
display_object_validate_format_success,
|
|
16
|
+
load_yamlfile_from_disk_and_exit,
|
|
17
|
+
)
|
|
13
18
|
|
|
14
19
|
app = AsyncTyper()
|
|
15
20
|
console = Console()
|
|
@@ -39,9 +44,54 @@ async def load(
|
|
|
39
44
|
files = load_yamlfile_from_disk_and_exit(paths=paths, file_type=ObjectFile, console=console)
|
|
40
45
|
client = initialize_client()
|
|
41
46
|
|
|
47
|
+
has_errors = False
|
|
48
|
+
|
|
49
|
+
for file in files:
|
|
50
|
+
try:
|
|
51
|
+
await file.validate_format(client=client, branch=branch)
|
|
52
|
+
except ValidationError as exc:
|
|
53
|
+
has_errors = True
|
|
54
|
+
display_object_validate_format_error(file=file, error=exc, console=console)
|
|
55
|
+
|
|
56
|
+
if has_errors:
|
|
57
|
+
raise typer.Exit(1)
|
|
58
|
+
|
|
59
|
+
for file in files:
|
|
60
|
+
try:
|
|
61
|
+
await file.process(client=client, branch=branch)
|
|
62
|
+
except ObjectValidationError as exc:
|
|
63
|
+
has_errors = True
|
|
64
|
+
console.print(f"[red] {exc!s}")
|
|
65
|
+
|
|
66
|
+
if has_errors:
|
|
67
|
+
raise typer.Exit(1)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@app.command()
|
|
71
|
+
@catch_exception(console=console)
|
|
72
|
+
async def validate(
|
|
73
|
+
paths: list[Path],
|
|
74
|
+
debug: bool = False,
|
|
75
|
+
branch: str = typer.Option(None, help="Branch on which to validate the objects."),
|
|
76
|
+
_: str = CONFIG_PARAM,
|
|
77
|
+
) -> None:
|
|
78
|
+
"""Validate one or multiple objects files."""
|
|
79
|
+
|
|
80
|
+
init_logging(debug=debug)
|
|
81
|
+
|
|
82
|
+
logging.getLogger("infrahub_sdk").setLevel(logging.INFO)
|
|
83
|
+
|
|
84
|
+
files = load_yamlfile_from_disk_and_exit(paths=paths, file_type=ObjectFile, console=console)
|
|
85
|
+
client = initialize_client()
|
|
86
|
+
|
|
87
|
+
has_errors = False
|
|
42
88
|
for file in files:
|
|
43
|
-
|
|
44
|
-
|
|
89
|
+
try:
|
|
90
|
+
await file.validate_format(client=client, branch=branch)
|
|
91
|
+
display_object_validate_format_success(file=file, console=console)
|
|
92
|
+
except ValidationError as exc:
|
|
93
|
+
has_errors = True
|
|
94
|
+
display_object_validate_format_error(file=file, error=exc, console=console)
|
|
45
95
|
|
|
46
|
-
|
|
47
|
-
|
|
96
|
+
if has_errors:
|
|
97
|
+
raise typer.Exit(1)
|
infrahub_sdk/ctl/utils.py
CHANGED
|
@@ -25,6 +25,7 @@ from ..exceptions import (
|
|
|
25
25
|
SchemaNotFoundError,
|
|
26
26
|
ServerNotReachableError,
|
|
27
27
|
ServerNotResponsiveError,
|
|
28
|
+
ValidationError,
|
|
28
29
|
)
|
|
29
30
|
from ..yaml import YamlFile
|
|
30
31
|
from .client import initialize_client_sync
|
|
@@ -32,6 +33,7 @@ from .exceptions import QueryNotFoundError
|
|
|
32
33
|
|
|
33
34
|
if TYPE_CHECKING:
|
|
34
35
|
from ..schema.repository import InfrahubRepositoryConfig
|
|
36
|
+
from ..spec.object import ObjectFile
|
|
35
37
|
|
|
36
38
|
YamlFileVar = TypeVar("YamlFileVar", bound=YamlFile)
|
|
37
39
|
T = TypeVar("T")
|
|
@@ -198,4 +200,23 @@ def load_yamlfile_from_disk_and_exit(
|
|
|
198
200
|
if has_error:
|
|
199
201
|
raise typer.Exit(1)
|
|
200
202
|
|
|
201
|
-
return data_files
|
|
203
|
+
return sorted(data_files, key=lambda x: x.location)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def display_object_validate_format_success(file: ObjectFile, console: Console) -> None:
|
|
207
|
+
if file.multiple_documents:
|
|
208
|
+
console.print(f"[green] File '{file.location}' [{file.document_position}] is Valid!")
|
|
209
|
+
else:
|
|
210
|
+
console.print(f"[green] File '{file.location}' is Valid!")
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def display_object_validate_format_error(file: ObjectFile, error: ValidationError, console: Console) -> None:
|
|
214
|
+
if file.multiple_documents:
|
|
215
|
+
console.print(f"[red] File '{file.location}' [{file.document_position}] is not valid!")
|
|
216
|
+
else:
|
|
217
|
+
console.print(f"[red] File '{file.location}' is not valid!")
|
|
218
|
+
if error.messages:
|
|
219
|
+
for message in error.messages:
|
|
220
|
+
console.print(f"[red] {message}")
|
|
221
|
+
else:
|
|
222
|
+
console.print(f"[red] {error.message}")
|
infrahub_sdk/exceptions.py
CHANGED
|
@@ -113,11 +113,29 @@ class InfrahubTransformNotFoundError(Error):
|
|
|
113
113
|
|
|
114
114
|
|
|
115
115
|
class ValidationError(Error):
|
|
116
|
-
def __init__(self, identifier: str, message: str):
|
|
116
|
+
def __init__(self, identifier: str, message: str | None = None, messages: list[str] | None = None):
|
|
117
117
|
self.identifier = identifier
|
|
118
118
|
self.message = message
|
|
119
|
+
self.messages = messages
|
|
120
|
+
if not messages and not message:
|
|
121
|
+
self.message = f"Validation Error for {self.identifier}"
|
|
119
122
|
super().__init__(self.message)
|
|
120
123
|
|
|
124
|
+
def __str__(self) -> str:
|
|
125
|
+
if self.messages:
|
|
126
|
+
return f"{self.identifier}: {', '.join(self.messages)}"
|
|
127
|
+
return f"{self.identifier}: {self.message}"
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class ObjectValidationError(Error):
|
|
131
|
+
def __init__(self, position: list[int | str], message: str):
|
|
132
|
+
self.position = position
|
|
133
|
+
self.message = message
|
|
134
|
+
super().__init__(self.message)
|
|
135
|
+
|
|
136
|
+
def __str__(self) -> str:
|
|
137
|
+
return f"{'.'.join(map(str, self.position))}: {self.message}"
|
|
138
|
+
|
|
121
139
|
|
|
122
140
|
class AuthenticationError(Error):
|
|
123
141
|
def __init__(self, message: str | None = None):
|