infrahub-server 1.2.6__py3-none-any.whl → 1.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/transformation.py +1 -0
- infrahub/artifacts/models.py +4 -0
- infrahub/cli/db.py +3 -1
- infrahub/cli/patch.py +153 -0
- infrahub/computed_attribute/models.py +81 -1
- infrahub/computed_attribute/tasks.py +35 -53
- infrahub/config.py +2 -1
- infrahub/constants/__init__.py +0 -0
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/graph/index.py +3 -1
- infrahub/core/manager.py +16 -5
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +7 -8
- infrahub/core/node/__init__.py +4 -1
- infrahub/core/protocols.py +1 -0
- infrahub/core/query/ipam.py +7 -5
- infrahub/core/query/node.py +96 -29
- infrahub/core/schema/definitions/core/builtin.py +2 -4
- infrahub/core/schema/definitions/core/transform.py +1 -0
- infrahub/core/validators/aggregated_checker.py +2 -2
- infrahub/core/validators/uniqueness/query.py +8 -3
- infrahub/database/__init__.py +2 -10
- infrahub/database/index.py +1 -1
- infrahub/database/memgraph.py +2 -1
- infrahub/database/neo4j.py +1 -1
- infrahub/git/integrator.py +27 -3
- infrahub/git/models.py +4 -0
- infrahub/git/tasks.py +3 -0
- infrahub/git_credential/helper.py +2 -2
- infrahub/message_bus/operations/requests/proposed_change.py +6 -0
- infrahub/message_bus/types.py +3 -0
- infrahub/patch/__init__.py +0 -0
- infrahub/patch/constants.py +13 -0
- infrahub/patch/edge_adder.py +64 -0
- infrahub/patch/edge_deleter.py +33 -0
- infrahub/patch/edge_updater.py +28 -0
- infrahub/patch/models.py +98 -0
- infrahub/patch/plan_reader.py +107 -0
- infrahub/patch/plan_writer.py +92 -0
- infrahub/patch/queries/__init__.py +0 -0
- infrahub/patch/queries/base.py +17 -0
- infrahub/patch/queries/consolidate_duplicated_nodes.py +109 -0
- infrahub/patch/queries/delete_duplicated_edges.py +138 -0
- infrahub/patch/runner.py +254 -0
- infrahub/patch/vertex_adder.py +61 -0
- infrahub/patch/vertex_deleter.py +33 -0
- infrahub/patch/vertex_updater.py +28 -0
- infrahub/proposed_change/tasks.py +1 -0
- infrahub/server.py +3 -1
- infrahub/transformations/models.py +3 -0
- infrahub/transformations/tasks.py +1 -0
- infrahub/webhook/models.py +3 -0
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +4 -4
- infrahub_sdk/config.py +17 -0
- infrahub_sdk/ctl/cli_commands.py +9 -3
- infrahub_sdk/ctl/generator.py +2 -2
- infrahub_sdk/ctl/menu.py +56 -13
- infrahub_sdk/ctl/object.py +55 -5
- infrahub_sdk/ctl/utils.py +22 -1
- infrahub_sdk/exceptions.py +19 -1
- infrahub_sdk/generator.py +12 -66
- infrahub_sdk/node.py +42 -26
- infrahub_sdk/operation.py +80 -0
- infrahub_sdk/protocols.py +12 -0
- infrahub_sdk/protocols_generator/__init__.py +0 -0
- infrahub_sdk/protocols_generator/constants.py +28 -0
- infrahub_sdk/{code_generator.py → protocols_generator/generator.py} +47 -34
- infrahub_sdk/protocols_generator/template.j2 +114 -0
- infrahub_sdk/recorder.py +3 -0
- infrahub_sdk/schema/__init__.py +110 -74
- infrahub_sdk/schema/main.py +36 -2
- infrahub_sdk/schema/repository.py +6 -0
- infrahub_sdk/spec/menu.py +3 -3
- infrahub_sdk/spec/object.py +522 -41
- infrahub_sdk/testing/docker.py +4 -5
- infrahub_sdk/testing/schemas/animal.py +7 -0
- infrahub_sdk/transforms.py +15 -27
- infrahub_sdk/yaml.py +63 -7
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/METADATA +2 -2
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/RECORD +85 -64
- infrahub_testcontainers/docker-compose.test.yml +2 -0
- infrahub_sdk/ctl/constants.py +0 -115
- /infrahub/{database/constants.py → constants/database.py} +0 -0
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.6.dist-info → infrahub_server-1.2.8.dist-info}/entry_points.txt +0 -0
infrahub/patch/runner.py
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from .edge_adder import PatchPlanEdgeAdder
|
|
4
|
+
from .edge_deleter import PatchPlanEdgeDeleter
|
|
5
|
+
from .edge_updater import PatchPlanEdgeUpdater
|
|
6
|
+
from .models import EdgeToAdd, EdgeToDelete, EdgeToUpdate, PatchPlan, VertexToAdd, VertexToDelete, VertexToUpdate
|
|
7
|
+
from .plan_reader import PatchPlanReader
|
|
8
|
+
from .plan_writer import PatchPlanWriter
|
|
9
|
+
from .queries.base import PatchQuery
|
|
10
|
+
from .vertex_adder import PatchPlanVertexAdder
|
|
11
|
+
from .vertex_deleter import PatchPlanVertexDeleter
|
|
12
|
+
from .vertex_updater import PatchPlanVertexUpdater
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class PatchPlanEdgeDbIdTranslator:
|
|
16
|
+
def translate_to_db_ids(self, patch_plan: PatchPlan) -> None:
|
|
17
|
+
for edge_to_add in patch_plan.edges_to_add:
|
|
18
|
+
translated_from_id = patch_plan.get_database_id_for_added_element(abstract_id=edge_to_add.from_id)
|
|
19
|
+
edge_to_add.from_id = translated_from_id
|
|
20
|
+
translated_to_id = patch_plan.get_database_id_for_added_element(abstract_id=edge_to_add.to_id)
|
|
21
|
+
edge_to_add.to_id = translated_to_id
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class PatchRunner:
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
plan_writer: PatchPlanWriter,
|
|
28
|
+
plan_reader: PatchPlanReader,
|
|
29
|
+
edge_db_id_translator: PatchPlanEdgeDbIdTranslator,
|
|
30
|
+
vertex_adder: PatchPlanVertexAdder,
|
|
31
|
+
vertex_updater: PatchPlanVertexUpdater,
|
|
32
|
+
vertex_deleter: PatchPlanVertexDeleter,
|
|
33
|
+
edge_adder: PatchPlanEdgeAdder,
|
|
34
|
+
edge_updater: PatchPlanEdgeUpdater,
|
|
35
|
+
edge_deleter: PatchPlanEdgeDeleter,
|
|
36
|
+
) -> None:
|
|
37
|
+
self.plan_writer = plan_writer
|
|
38
|
+
self.plan_reader = plan_reader
|
|
39
|
+
self.edge_db_id_translator = edge_db_id_translator
|
|
40
|
+
self.vertex_adder = vertex_adder
|
|
41
|
+
self.vertex_updater = vertex_updater
|
|
42
|
+
self.vertex_deleter = vertex_deleter
|
|
43
|
+
self.edge_adder = edge_adder
|
|
44
|
+
self.edge_updater = edge_updater
|
|
45
|
+
self.edge_deleter = edge_deleter
|
|
46
|
+
|
|
47
|
+
async def prepare_plan(self, patch_query: PatchQuery, directory: Path) -> Path:
|
|
48
|
+
patch_plan = await patch_query.plan()
|
|
49
|
+
return self.plan_writer.write(patches_directory=directory, patch_plan=patch_plan)
|
|
50
|
+
|
|
51
|
+
async def apply(self, patch_plan_directory: Path) -> PatchPlan:
|
|
52
|
+
patch_plan = self.plan_reader.read(patch_plan_directory)
|
|
53
|
+
await self._apply_vertices_to_add(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
54
|
+
await self._apply_edges_to_add(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
55
|
+
if patch_plan.vertices_to_update:
|
|
56
|
+
await self.vertex_updater.execute(vertices_to_update=patch_plan.vertices_to_update)
|
|
57
|
+
await self._apply_edges_to_delete(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
58
|
+
await self._apply_vertices_to_delete(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
59
|
+
if patch_plan.edges_to_update:
|
|
60
|
+
await self.edge_updater.execute(edges_to_update=patch_plan.edges_to_update)
|
|
61
|
+
return patch_plan
|
|
62
|
+
|
|
63
|
+
async def _apply_vertices_to_add(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
64
|
+
if not patch_plan.vertices_to_add:
|
|
65
|
+
return
|
|
66
|
+
unadded_vertices = [
|
|
67
|
+
v for v in patch_plan.vertices_to_add if not patch_plan.has_element_been_added(v.identifier)
|
|
68
|
+
]
|
|
69
|
+
try:
|
|
70
|
+
async for added_element_id_map in self.vertex_adder.execute(vertices_to_add=unadded_vertices):
|
|
71
|
+
patch_plan.added_element_db_id_map.update(added_element_id_map)
|
|
72
|
+
finally:
|
|
73
|
+
# record the added elements so that we do not double-add them if the patch is run again
|
|
74
|
+
self.plan_writer.write_added_db_id_map(
|
|
75
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
async def _apply_edges_to_add(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
79
|
+
if not patch_plan.edges_to_add:
|
|
80
|
+
return
|
|
81
|
+
self.edge_db_id_translator.translate_to_db_ids(patch_plan=patch_plan)
|
|
82
|
+
unadded_edges = [e for e in patch_plan.edges_to_add if not patch_plan.has_element_been_added(e.identifier)]
|
|
83
|
+
try:
|
|
84
|
+
async for added_element_id_map in self.edge_adder.execute(edges_to_add=unadded_edges):
|
|
85
|
+
patch_plan.added_element_db_id_map.update(added_element_id_map)
|
|
86
|
+
finally:
|
|
87
|
+
# record the added elements so that we do not double-add them if the patch is run again
|
|
88
|
+
self.plan_writer.write_added_db_id_map(
|
|
89
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
async def _apply_vertices_to_delete(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
93
|
+
if not patch_plan.vertices_to_delete:
|
|
94
|
+
return
|
|
95
|
+
try:
|
|
96
|
+
async for deleted_ids in self.vertex_deleter.execute(vertices_to_delete=patch_plan.vertices_to_delete):
|
|
97
|
+
patch_plan.deleted_db_ids |= deleted_ids
|
|
98
|
+
finally:
|
|
99
|
+
# record the deleted elements so that we know what to add if the patch is reverted
|
|
100
|
+
self.plan_writer.write_deleted_db_ids(
|
|
101
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
async def _apply_edges_to_delete(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
105
|
+
if not patch_plan.edges_to_delete:
|
|
106
|
+
return
|
|
107
|
+
try:
|
|
108
|
+
async for deleted_ids in self.edge_deleter.execute(edges_to_delete=patch_plan.edges_to_delete):
|
|
109
|
+
patch_plan.deleted_db_ids |= deleted_ids
|
|
110
|
+
finally:
|
|
111
|
+
# record the deleted elements so that we know what to add if the patch is reverted
|
|
112
|
+
self.plan_writer.write_deleted_db_ids(
|
|
113
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
async def revert(self, patch_plan_directory: Path) -> PatchPlan:
|
|
117
|
+
"""Invert the PatchPlan to create the complement of every added/updated/deleted element and undo them"""
|
|
118
|
+
patch_plan = self.plan_reader.read(patch_plan_directory)
|
|
119
|
+
await self._revert_deleted_vertices(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
120
|
+
await self._revert_deleted_edges(
|
|
121
|
+
patch_plan=patch_plan,
|
|
122
|
+
patch_plan_directory=patch_plan_directory,
|
|
123
|
+
)
|
|
124
|
+
await self._revert_added_edges(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
125
|
+
await self._revert_added_vertices(patch_plan=patch_plan, patch_plan_directory=patch_plan_directory)
|
|
126
|
+
vertices_to_update = [
|
|
127
|
+
VertexToUpdate(
|
|
128
|
+
db_id=vertex_update_to_revert.db_id,
|
|
129
|
+
before_props=vertex_update_to_revert.after_props,
|
|
130
|
+
after_props=vertex_update_to_revert.before_props,
|
|
131
|
+
)
|
|
132
|
+
for vertex_update_to_revert in patch_plan.vertices_to_update
|
|
133
|
+
]
|
|
134
|
+
if vertices_to_update:
|
|
135
|
+
await self.vertex_updater.execute(vertices_to_update=vertices_to_update)
|
|
136
|
+
|
|
137
|
+
edges_to_update = [
|
|
138
|
+
EdgeToUpdate(
|
|
139
|
+
db_id=edge_update_to_revert.db_id,
|
|
140
|
+
before_props=edge_update_to_revert.after_props,
|
|
141
|
+
after_props=edge_update_to_revert.before_props,
|
|
142
|
+
)
|
|
143
|
+
for edge_update_to_revert in patch_plan.edges_to_update
|
|
144
|
+
]
|
|
145
|
+
if edges_to_update:
|
|
146
|
+
await self.edge_updater.execute(edges_to_update=edges_to_update)
|
|
147
|
+
if patch_plan.reverted_deleted_db_id_map:
|
|
148
|
+
patch_plan.reverted_deleted_db_id_map = {}
|
|
149
|
+
self.plan_writer.write_reverted_deleted_db_id_map(
|
|
150
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.reverted_deleted_db_id_map
|
|
151
|
+
)
|
|
152
|
+
return patch_plan
|
|
153
|
+
|
|
154
|
+
async def _revert_added_vertices(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
155
|
+
vertices_to_delete = [
|
|
156
|
+
VertexToDelete(
|
|
157
|
+
db_id=patch_plan.get_database_id_for_added_element(abstract_id=vertex_add_to_revert.identifier),
|
|
158
|
+
labels=vertex_add_to_revert.labels,
|
|
159
|
+
before_props=vertex_add_to_revert.after_props,
|
|
160
|
+
)
|
|
161
|
+
for vertex_add_to_revert in patch_plan.added_vertices
|
|
162
|
+
]
|
|
163
|
+
if not vertices_to_delete:
|
|
164
|
+
return
|
|
165
|
+
all_deleted_ids: set[str] = set()
|
|
166
|
+
try:
|
|
167
|
+
async for deleted_ids in self.vertex_deleter.execute(vertices_to_delete=vertices_to_delete):
|
|
168
|
+
all_deleted_ids |= deleted_ids
|
|
169
|
+
finally:
|
|
170
|
+
if all_deleted_ids:
|
|
171
|
+
patch_plan.drop_added_db_ids(db_ids_to_drop=all_deleted_ids)
|
|
172
|
+
self.plan_writer.write_added_db_id_map(
|
|
173
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
async def _revert_deleted_vertices(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
177
|
+
vertices_to_add = [
|
|
178
|
+
VertexToAdd(
|
|
179
|
+
labels=vertex_delete_to_revert.labels,
|
|
180
|
+
after_props=vertex_delete_to_revert.before_props,
|
|
181
|
+
identifier=vertex_delete_to_revert.db_id,
|
|
182
|
+
)
|
|
183
|
+
for vertex_delete_to_revert in patch_plan.deleted_vertices
|
|
184
|
+
]
|
|
185
|
+
if not vertices_to_add:
|
|
186
|
+
return
|
|
187
|
+
|
|
188
|
+
deleted_to_undeleted_db_id_map: dict[str, str] = {}
|
|
189
|
+
try:
|
|
190
|
+
async for added_db_id_map in self.vertex_adder.execute(vertices_to_add=vertices_to_add):
|
|
191
|
+
deleted_to_undeleted_db_id_map.update(added_db_id_map)
|
|
192
|
+
finally:
|
|
193
|
+
if deleted_to_undeleted_db_id_map:
|
|
194
|
+
patch_plan.drop_deleted_db_ids(db_ids_to_drop=set(deleted_to_undeleted_db_id_map.keys()))
|
|
195
|
+
self.plan_writer.write_deleted_db_ids(
|
|
196
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
197
|
+
)
|
|
198
|
+
patch_plan.reverted_deleted_db_id_map.update(deleted_to_undeleted_db_id_map)
|
|
199
|
+
self.plan_writer.write_reverted_deleted_db_id_map(
|
|
200
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.reverted_deleted_db_id_map
|
|
201
|
+
)
|
|
202
|
+
|
|
203
|
+
async def _revert_added_edges(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
204
|
+
edges_to_delete = [
|
|
205
|
+
EdgeToDelete(
|
|
206
|
+
db_id=patch_plan.get_database_id_for_added_element(abstract_id=edge_add_to_revert.identifier),
|
|
207
|
+
from_id=edge_add_to_revert.from_id,
|
|
208
|
+
to_id=edge_add_to_revert.to_id,
|
|
209
|
+
edge_type=edge_add_to_revert.edge_type,
|
|
210
|
+
before_props=edge_add_to_revert.after_props,
|
|
211
|
+
)
|
|
212
|
+
for edge_add_to_revert in patch_plan.added_edges
|
|
213
|
+
]
|
|
214
|
+
if not edges_to_delete:
|
|
215
|
+
return
|
|
216
|
+
all_deleted_ids: set[str] = set()
|
|
217
|
+
try:
|
|
218
|
+
async for deleted_ids in self.edge_deleter.execute(edges_to_delete=edges_to_delete):
|
|
219
|
+
all_deleted_ids |= deleted_ids
|
|
220
|
+
finally:
|
|
221
|
+
if all_deleted_ids:
|
|
222
|
+
patch_plan.drop_added_db_ids(db_ids_to_drop=all_deleted_ids)
|
|
223
|
+
self.plan_writer.write_added_db_id_map(
|
|
224
|
+
patch_plan_directory=patch_plan_directory, db_id_map=patch_plan.added_element_db_id_map
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
async def _revert_deleted_edges(self, patch_plan: PatchPlan, patch_plan_directory: Path) -> None:
|
|
228
|
+
edges_to_add = [
|
|
229
|
+
EdgeToAdd(
|
|
230
|
+
identifier=edge_delete_to_revert.db_id,
|
|
231
|
+
from_id=patch_plan.reverted_deleted_db_id_map.get(
|
|
232
|
+
edge_delete_to_revert.from_id, edge_delete_to_revert.from_id
|
|
233
|
+
),
|
|
234
|
+
to_id=patch_plan.reverted_deleted_db_id_map.get(
|
|
235
|
+
edge_delete_to_revert.to_id, edge_delete_to_revert.to_id
|
|
236
|
+
),
|
|
237
|
+
edge_type=edge_delete_to_revert.edge_type,
|
|
238
|
+
after_props=edge_delete_to_revert.before_props,
|
|
239
|
+
)
|
|
240
|
+
for edge_delete_to_revert in patch_plan.deleted_edges
|
|
241
|
+
]
|
|
242
|
+
if not edges_to_add:
|
|
243
|
+
return
|
|
244
|
+
|
|
245
|
+
undeleted_ids: set[str] = set()
|
|
246
|
+
try:
|
|
247
|
+
async for added_db_id_map in self.edge_adder.execute(edges_to_add=edges_to_add):
|
|
248
|
+
undeleted_ids |= set(added_db_id_map.keys())
|
|
249
|
+
finally:
|
|
250
|
+
if undeleted_ids:
|
|
251
|
+
patch_plan.drop_deleted_db_ids(db_ids_to_drop=undeleted_ids)
|
|
252
|
+
self.plan_writer.write_deleted_db_ids(
|
|
253
|
+
patch_plan_directory=patch_plan_directory, deleted_ids=patch_plan.deleted_db_ids
|
|
254
|
+
)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from dataclasses import asdict
|
|
3
|
+
from typing import AsyncGenerator
|
|
4
|
+
|
|
5
|
+
from infrahub.core.query import QueryType
|
|
6
|
+
from infrahub.database import InfrahubDatabase
|
|
7
|
+
|
|
8
|
+
from .models import VertexToAdd
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class PatchPlanVertexAdder:
|
|
12
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
13
|
+
self.db = db
|
|
14
|
+
self.batch_size_limit = batch_size_limit
|
|
15
|
+
|
|
16
|
+
async def _run_add_query(self, labels: list[str], vertices_to_add: list[VertexToAdd]) -> dict[str, str]:
|
|
17
|
+
labels_str = ":".join(labels)
|
|
18
|
+
serial_vertices_to_add: list[dict[str, str | int | bool]] = [asdict(v) for v in vertices_to_add]
|
|
19
|
+
query = """
|
|
20
|
+
UNWIND $vertices_to_add AS vertex_to_add
|
|
21
|
+
CREATE (v:%(labels)s)
|
|
22
|
+
SET v = vertex_to_add.after_props
|
|
23
|
+
RETURN vertex_to_add.identifier AS abstract_id, %(id_func_name)s(v) AS db_id
|
|
24
|
+
""" % {
|
|
25
|
+
"labels": labels_str,
|
|
26
|
+
"id_func_name": self.db.get_id_function_name(),
|
|
27
|
+
}
|
|
28
|
+
# use transaction to make sure we record the results before committing them
|
|
29
|
+
try:
|
|
30
|
+
txn_db = self.db.start_transaction()
|
|
31
|
+
async with txn_db as txn:
|
|
32
|
+
results = await txn.execute_query(
|
|
33
|
+
query=query, params={"vertices_to_add": serial_vertices_to_add}, type=QueryType.WRITE
|
|
34
|
+
)
|
|
35
|
+
abstract_to_concrete_id_map: dict[str, str] = {}
|
|
36
|
+
for result in results:
|
|
37
|
+
abstract_id = result.get("abstract_id")
|
|
38
|
+
concrete_id = result.get("db_id")
|
|
39
|
+
abstract_to_concrete_id_map[abstract_id] = concrete_id
|
|
40
|
+
finally:
|
|
41
|
+
await txn_db.close()
|
|
42
|
+
return abstract_to_concrete_id_map
|
|
43
|
+
|
|
44
|
+
async def execute(self, vertices_to_add: list[VertexToAdd]) -> AsyncGenerator[dict[str, str], None]:
|
|
45
|
+
"""
|
|
46
|
+
Create vertices_to_add on the database.
|
|
47
|
+
Returns a generator that yields dictionaries mapping VertexToAdd.identifier to the database-level ID of the newly created vertex.
|
|
48
|
+
"""
|
|
49
|
+
vertices_map_queue: dict[frozenset[str], list[VertexToAdd]] = defaultdict(list)
|
|
50
|
+
for vertex_to_add in vertices_to_add:
|
|
51
|
+
frozen_labels = frozenset(vertex_to_add.labels)
|
|
52
|
+
vertices_map_queue[frozen_labels].append(vertex_to_add)
|
|
53
|
+
if len(vertices_map_queue[frozen_labels]) > self.batch_size_limit:
|
|
54
|
+
yield await self._run_add_query(
|
|
55
|
+
labels=list(frozen_labels),
|
|
56
|
+
vertices_to_add=vertices_map_queue[frozen_labels],
|
|
57
|
+
)
|
|
58
|
+
vertices_map_queue[frozen_labels] = []
|
|
59
|
+
|
|
60
|
+
for frozen_labels, vertices_group in vertices_map_queue.items():
|
|
61
|
+
yield await self._run_add_query(labels=list(frozen_labels), vertices_to_add=vertices_group)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
from typing import AsyncGenerator
|
|
2
|
+
|
|
3
|
+
from infrahub.core.query import QueryType
|
|
4
|
+
from infrahub.database import InfrahubDatabase
|
|
5
|
+
|
|
6
|
+
from .models import VertexToDelete
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PatchPlanVertexDeleter:
|
|
10
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
11
|
+
self.db = db
|
|
12
|
+
self.batch_size_limit = batch_size_limit
|
|
13
|
+
|
|
14
|
+
async def _run_delete_query(self, ids_to_delete: list[str]) -> set[str]:
|
|
15
|
+
query = """
|
|
16
|
+
MATCH (n)
|
|
17
|
+
WHERE %(id_func_name)s(n) IN $ids_to_delete
|
|
18
|
+
DETACH DELETE n
|
|
19
|
+
RETURN %(id_func_name)s(n) AS deleted_id
|
|
20
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
21
|
+
results = await self.db.execute_query(
|
|
22
|
+
query=query, params={"ids_to_delete": ids_to_delete}, type=QueryType.WRITE
|
|
23
|
+
)
|
|
24
|
+
deleted_ids: set[str] = set()
|
|
25
|
+
for result in results:
|
|
26
|
+
deleted_id = result.get("deleted_id")
|
|
27
|
+
deleted_ids.add(deleted_id)
|
|
28
|
+
return deleted_ids
|
|
29
|
+
|
|
30
|
+
async def execute(self, vertices_to_delete: list[VertexToDelete]) -> AsyncGenerator[set[str], None]:
|
|
31
|
+
for i in range(0, len(vertices_to_delete), self.batch_size_limit):
|
|
32
|
+
ids_to_delete = [v.db_id for v in vertices_to_delete[i : i + self.batch_size_limit]]
|
|
33
|
+
yield await self._run_delete_query(ids_to_delete=ids_to_delete)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from dataclasses import asdict
|
|
2
|
+
|
|
3
|
+
from infrahub.core.query import QueryType
|
|
4
|
+
from infrahub.database import InfrahubDatabase
|
|
5
|
+
|
|
6
|
+
from .models import VertexToUpdate
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class PatchPlanVertexUpdater:
|
|
10
|
+
def __init__(self, db: InfrahubDatabase, batch_size_limit: int = 1000) -> None:
|
|
11
|
+
self.db = db
|
|
12
|
+
self.batch_size_limit = batch_size_limit
|
|
13
|
+
|
|
14
|
+
async def _run_update_query(self, vertices_to_update: list[VertexToUpdate]) -> None:
|
|
15
|
+
query = """
|
|
16
|
+
UNWIND $vertices_to_update AS vertex_to_update
|
|
17
|
+
MATCH (n)
|
|
18
|
+
WHERE %(id_func_name)s(n) = vertex_to_update.db_id
|
|
19
|
+
SET n = vertex_to_update.after_props
|
|
20
|
+
""" % {"id_func_name": self.db.get_id_function_name()}
|
|
21
|
+
await self.db.execute_query(
|
|
22
|
+
query=query, params={"vertices_to_update": [asdict(v) for v in vertices_to_update]}, type=QueryType.WRITE
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
async def execute(self, vertices_to_update: list[VertexToUpdate]) -> None:
|
|
26
|
+
for i in range(0, len(vertices_to_update), self.batch_size_limit):
|
|
27
|
+
vertices_slice = vertices_to_update[i : i + self.batch_size_limit]
|
|
28
|
+
await self._run_update_query(vertices_to_update=vertices_slice)
|
|
@@ -607,6 +607,7 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, s
|
|
|
607
607
|
content_type=model.artifact_definition.content_type,
|
|
608
608
|
transform_type=model.artifact_definition.transform_kind,
|
|
609
609
|
transform_location=model.artifact_definition.transform_location,
|
|
610
|
+
convert_query_response=model.artifact_definition.convert_query_response,
|
|
610
611
|
repository_id=repository.repository_id,
|
|
611
612
|
repository_name=repository.repository_name,
|
|
612
613
|
repository_kind=repository.kind,
|
infrahub/server.py
CHANGED
|
@@ -23,7 +23,7 @@ from infrahub import __version__, config
|
|
|
23
23
|
from infrahub.api import router as api
|
|
24
24
|
from infrahub.api.exception_handlers import generic_api_exception_handler
|
|
25
25
|
from infrahub.components import ComponentType
|
|
26
|
-
from infrahub.core.graph.index import node_indexes, rel_indexes
|
|
26
|
+
from infrahub.core.graph.index import attr_value_index, node_indexes, rel_indexes
|
|
27
27
|
from infrahub.core.initialization import initialization
|
|
28
28
|
from infrahub.database import InfrahubDatabase, InfrahubDatabaseMode, get_db
|
|
29
29
|
from infrahub.dependencies.registry import build_component_registry
|
|
@@ -58,6 +58,8 @@ async def app_initialization(application: FastAPI, enable_scheduler: bool = True
|
|
|
58
58
|
|
|
59
59
|
# Initialize database Driver and load local registry
|
|
60
60
|
database = application.state.db = InfrahubDatabase(mode=InfrahubDatabaseMode.DRIVER, driver=await get_db())
|
|
61
|
+
if config.SETTINGS.experimental_features.value_db_index:
|
|
62
|
+
node_indexes.append(attr_value_index)
|
|
61
63
|
database.manager.index.init(nodes=node_indexes, rels=rel_indexes)
|
|
62
64
|
|
|
63
65
|
build_component_registry()
|
|
@@ -11,6 +11,9 @@ class TransformPythonData(BaseModel):
|
|
|
11
11
|
branch: str = Field(..., description="The branch to target")
|
|
12
12
|
transform_location: str = Field(..., description="Location of the transform within the repository")
|
|
13
13
|
commit: str = Field(..., description="The commit id to use when generating the artifact")
|
|
14
|
+
convert_query_response: bool = Field(
|
|
15
|
+
..., description="Define if the GraphQL query respose should be converted into InfrahubNode objects"
|
|
16
|
+
)
|
|
14
17
|
timeout: int = Field(..., description="The timeout value to use when generating the artifact")
|
|
15
18
|
|
|
16
19
|
|
|
@@ -30,6 +30,7 @@ async def transform_python(message: TransformPythonData, service: InfrahubServic
|
|
|
30
30
|
location=message.transform_location,
|
|
31
31
|
data=message.data,
|
|
32
32
|
client=service.client,
|
|
33
|
+
convert_query_response=message.convert_query_response,
|
|
33
34
|
) # type: ignore[misc]
|
|
34
35
|
|
|
35
36
|
return transformed_data
|
infrahub/webhook/models.py
CHANGED
|
@@ -204,6 +204,7 @@ class TransformWebhook(Webhook):
|
|
|
204
204
|
transform_class: str = Field(...)
|
|
205
205
|
transform_file: str = Field(...)
|
|
206
206
|
transform_timeout: int = Field(...)
|
|
207
|
+
convert_query_response: bool = Field(...)
|
|
207
208
|
|
|
208
209
|
async def _prepare_payload(self, data: dict[str, Any], context: EventContext, service: InfrahubServices) -> None:
|
|
209
210
|
repo: InfrahubReadOnlyRepository | InfrahubRepository
|
|
@@ -229,6 +230,7 @@ class TransformWebhook(Webhook):
|
|
|
229
230
|
branch_name=branch,
|
|
230
231
|
commit=commit,
|
|
231
232
|
location=f"{self.transform_file}::{self.transform_class}",
|
|
233
|
+
convert_query_response=self.convert_query_response,
|
|
232
234
|
data={"data": data, **context.model_dump()},
|
|
233
235
|
client=service.client,
|
|
234
236
|
) # type: ignore[misc]
|
|
@@ -247,4 +249,5 @@ class TransformWebhook(Webhook):
|
|
|
247
249
|
transform_class=transform.class_name.value,
|
|
248
250
|
transform_file=transform.file_path.value,
|
|
249
251
|
transform_timeout=transform.timeout.value,
|
|
252
|
+
convert_query_response=transform.convert_query_response.value or False,
|
|
250
253
|
)
|
infrahub_sdk/checks.py
CHANGED
|
@@ -83,7 +83,7 @@ class InfrahubCheck:
|
|
|
83
83
|
async def init(cls, client: InfrahubClient | None = None, *args: Any, **kwargs: Any) -> InfrahubCheck:
|
|
84
84
|
"""Async init method, If an existing InfrahubClient client hasn't been provided, one will be created automatically."""
|
|
85
85
|
warnings.warn(
|
|
86
|
-
"InfrahubCheck.init has been deprecated and will be removed in
|
|
86
|
+
"InfrahubCheck.init has been deprecated and will be removed in version 2.0.0 of the Infrahub Python SDK",
|
|
87
87
|
DeprecationWarning,
|
|
88
88
|
stacklevel=1,
|
|
89
89
|
)
|
infrahub_sdk/client.py
CHANGED
|
@@ -847,9 +847,9 @@ class InfrahubClient(BaseClient):
|
|
|
847
847
|
self.store.set(node=node)
|
|
848
848
|
return nodes
|
|
849
849
|
|
|
850
|
-
def clone(self) -> InfrahubClient:
|
|
850
|
+
def clone(self, branch: str | None = None) -> InfrahubClient:
|
|
851
851
|
"""Return a cloned version of the client using the same configuration"""
|
|
852
|
-
return InfrahubClient(config=self.config)
|
|
852
|
+
return InfrahubClient(config=self.config.clone(branch=branch))
|
|
853
853
|
|
|
854
854
|
async def execute_graphql(
|
|
855
855
|
self,
|
|
@@ -1591,9 +1591,9 @@ class InfrahubClientSync(BaseClient):
|
|
|
1591
1591
|
node = InfrahubNodeSync(client=self, schema=schema, branch=branch, data={"id": id})
|
|
1592
1592
|
node.delete()
|
|
1593
1593
|
|
|
1594
|
-
def clone(self) -> InfrahubClientSync:
|
|
1594
|
+
def clone(self, branch: str | None = None) -> InfrahubClientSync:
|
|
1595
1595
|
"""Return a cloned version of the client using the same configuration"""
|
|
1596
|
-
return InfrahubClientSync(config=self.config)
|
|
1596
|
+
return InfrahubClientSync(config=self.config.clone(branch=branch))
|
|
1597
1597
|
|
|
1598
1598
|
def execute_graphql(
|
|
1599
1599
|
self,
|
infrahub_sdk/config.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
from copy import deepcopy
|
|
3
4
|
from typing import Any
|
|
4
5
|
|
|
5
6
|
from pydantic import Field, field_validator, model_validator
|
|
@@ -158,3 +159,19 @@ class Config(ConfigBase):
|
|
|
158
159
|
elif values.get("recorder") == RecorderType.JSON and "custom_recorder" not in values:
|
|
159
160
|
values["custom_recorder"] = JSONRecorder()
|
|
160
161
|
return values
|
|
162
|
+
|
|
163
|
+
def clone(self, branch: str | None = None) -> Config:
|
|
164
|
+
config: dict[str, Any] = {
|
|
165
|
+
"default_branch": branch or self.default_branch,
|
|
166
|
+
"recorder": self.recorder,
|
|
167
|
+
"custom_recorder": self.custom_recorder,
|
|
168
|
+
"requester": self.requester,
|
|
169
|
+
"sync_requester": self.sync_requester,
|
|
170
|
+
"log": self.log,
|
|
171
|
+
}
|
|
172
|
+
covered_keys = list(config.keys())
|
|
173
|
+
for field in Config.model_fields.keys():
|
|
174
|
+
if field not in covered_keys:
|
|
175
|
+
config[field] = deepcopy(getattr(self, field))
|
|
176
|
+
|
|
177
|
+
return Config(**config)
|
infrahub_sdk/ctl/cli_commands.py
CHANGED
|
@@ -20,7 +20,6 @@ from rich.table import Table
|
|
|
20
20
|
|
|
21
21
|
from .. import __version__ as sdk_version
|
|
22
22
|
from ..async_typer import AsyncTyper
|
|
23
|
-
from ..code_generator import CodeGenerator
|
|
24
23
|
from ..ctl import config
|
|
25
24
|
from ..ctl.branch import app as branch_app
|
|
26
25
|
from ..ctl.check import run as run_check
|
|
@@ -42,6 +41,8 @@ from ..ctl.utils import (
|
|
|
42
41
|
)
|
|
43
42
|
from ..ctl.validate import app as validate_app
|
|
44
43
|
from ..exceptions import GraphQLError, ModuleImportError
|
|
44
|
+
from ..node import InfrahubNode
|
|
45
|
+
from ..protocols_generator.generator import CodeGenerator
|
|
45
46
|
from ..schema import MainSchemaTypesAll, SchemaRoot
|
|
46
47
|
from ..template import Jinja2Template
|
|
47
48
|
from ..template.exceptions import JinjaTemplateError
|
|
@@ -61,7 +62,7 @@ app.add_typer(schema_app, name="schema")
|
|
|
61
62
|
app.add_typer(validate_app, name="validate")
|
|
62
63
|
app.add_typer(repository_app, name="repository")
|
|
63
64
|
app.add_typer(menu_app, name="menu")
|
|
64
|
-
app.add_typer(object_app, name="object"
|
|
65
|
+
app.add_typer(object_app, name="object")
|
|
65
66
|
|
|
66
67
|
app.command(name="dump")(dump)
|
|
67
68
|
app.command(name="load")(load)
|
|
@@ -330,7 +331,12 @@ def transform(
|
|
|
330
331
|
console.print(f"[red]{exc.message}")
|
|
331
332
|
raise typer.Exit(1) from exc
|
|
332
333
|
|
|
333
|
-
transform = transform_class(
|
|
334
|
+
transform = transform_class(
|
|
335
|
+
client=client,
|
|
336
|
+
branch=branch,
|
|
337
|
+
infrahub_node=InfrahubNode,
|
|
338
|
+
convert_query_response=transform_config.convert_query_response,
|
|
339
|
+
)
|
|
334
340
|
# Get data
|
|
335
341
|
query_str = repository_config.get_query(name=transform.query).load_query()
|
|
336
342
|
data = asyncio.run(
|
infrahub_sdk/ctl/generator.py
CHANGED
|
@@ -62,7 +62,7 @@ async def run(
|
|
|
62
62
|
generator = generator_class(
|
|
63
63
|
query=generator_config.query,
|
|
64
64
|
client=client,
|
|
65
|
-
branch=branch,
|
|
65
|
+
branch=branch or "",
|
|
66
66
|
params=variables_dict,
|
|
67
67
|
convert_query_response=generator_config.convert_query_response,
|
|
68
68
|
infrahub_node=InfrahubNode,
|
|
@@ -91,7 +91,7 @@ async def run(
|
|
|
91
91
|
generator = generator_class(
|
|
92
92
|
query=generator_config.query,
|
|
93
93
|
client=client,
|
|
94
|
-
branch=branch,
|
|
94
|
+
branch=branch or "",
|
|
95
95
|
params=params,
|
|
96
96
|
convert_query_response=generator_config.convert_query_response,
|
|
97
97
|
infrahub_node=InfrahubNode,
|
infrahub_sdk/ctl/menu.py
CHANGED
|
@@ -7,9 +7,14 @@ from rich.console import Console
|
|
|
7
7
|
from ..async_typer import AsyncTyper
|
|
8
8
|
from ..ctl.client import initialize_client
|
|
9
9
|
from ..ctl.utils import catch_exception, init_logging
|
|
10
|
+
from ..exceptions import ObjectValidationError, ValidationError
|
|
10
11
|
from ..spec.menu import MenuFile
|
|
11
12
|
from .parameters import CONFIG_PARAM
|
|
12
|
-
from .utils import
|
|
13
|
+
from .utils import (
|
|
14
|
+
display_object_validate_format_error,
|
|
15
|
+
display_object_validate_format_success,
|
|
16
|
+
load_yamlfile_from_disk_and_exit,
|
|
17
|
+
)
|
|
13
18
|
|
|
14
19
|
app = AsyncTyper()
|
|
15
20
|
console = Console()
|
|
@@ -39,16 +44,54 @@ async def load(
|
|
|
39
44
|
files = load_yamlfile_from_disk_and_exit(paths=menus, file_type=MenuFile, console=console)
|
|
40
45
|
client = initialize_client()
|
|
41
46
|
|
|
47
|
+
has_errors = False
|
|
48
|
+
|
|
49
|
+
for file in files:
|
|
50
|
+
try:
|
|
51
|
+
await file.validate_format(client=client, branch=branch)
|
|
52
|
+
except ValidationError as exc:
|
|
53
|
+
has_errors = True
|
|
54
|
+
display_object_validate_format_error(file=file, error=exc, console=console)
|
|
55
|
+
|
|
56
|
+
if has_errors:
|
|
57
|
+
raise typer.Exit(1)
|
|
58
|
+
|
|
42
59
|
for file in files:
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
60
|
+
try:
|
|
61
|
+
await file.process(client=client, branch=branch)
|
|
62
|
+
except ObjectValidationError as exc:
|
|
63
|
+
has_errors = True
|
|
64
|
+
console.print(f"[red] {exc!s}")
|
|
65
|
+
|
|
66
|
+
if has_errors:
|
|
67
|
+
raise typer.Exit(1)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
@app.command()
|
|
71
|
+
@catch_exception(console=console)
|
|
72
|
+
async def validate(
|
|
73
|
+
paths: list[Path],
|
|
74
|
+
debug: bool = False,
|
|
75
|
+
branch: str = typer.Option(None, help="Branch on which to validate the objects."),
|
|
76
|
+
_: str = CONFIG_PARAM,
|
|
77
|
+
) -> None:
|
|
78
|
+
"""Validate one or multiple menu files."""
|
|
79
|
+
|
|
80
|
+
init_logging(debug=debug)
|
|
81
|
+
|
|
82
|
+
logging.getLogger("infrahub_sdk").setLevel(logging.INFO)
|
|
83
|
+
|
|
84
|
+
files = load_yamlfile_from_disk_and_exit(paths=paths, file_type=MenuFile, console=console)
|
|
85
|
+
client = initialize_client()
|
|
86
|
+
|
|
87
|
+
has_errors = False
|
|
88
|
+
for file in files:
|
|
89
|
+
try:
|
|
90
|
+
await file.validate_format(client=client, branch=branch)
|
|
91
|
+
display_object_validate_format_success(file=file, console=console)
|
|
92
|
+
except ValidationError as exc:
|
|
93
|
+
has_errors = True
|
|
94
|
+
display_object_validate_format_error(file=file, error=exc, console=console)
|
|
95
|
+
|
|
96
|
+
if has_errors:
|
|
97
|
+
raise typer.Exit(1)
|