infrahub-server 1.7.0rc0__py3-none-any.whl → 1.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/schema.py +5 -0
- infrahub/cli/db.py +6 -2
- infrahub/core/branch/models.py +11 -117
- infrahub/core/branch/tasks.py +7 -3
- infrahub/core/diff/merger/merger.py +5 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +2 -1
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +3 -2
- infrahub/core/migrations/graph/m015_diff_format_update.py +3 -2
- infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +3 -2
- infrahub/core/migrations/graph/m017_add_core_profile.py +6 -4
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +3 -4
- infrahub/core/migrations/graph/m020_duplicate_edges.py +3 -3
- infrahub/core/migrations/graph/m025_uniqueness_nulls.py +3 -4
- infrahub/core/migrations/graph/m026_0000_prefix_fix.py +4 -5
- infrahub/core/migrations/graph/m028_delete_diffs.py +3 -2
- infrahub/core/migrations/graph/m029_duplicates_cleanup.py +3 -2
- infrahub/core/migrations/graph/m031_check_number_attributes.py +4 -3
- infrahub/core/migrations/graph/m032_cleanup_orphaned_branch_relationships.py +3 -2
- infrahub/core/migrations/graph/m034_find_orphaned_schema_fields.py +3 -2
- infrahub/core/migrations/graph/m035_orphan_relationships.py +3 -3
- infrahub/core/migrations/graph/m036_drop_attr_value_index.py +3 -2
- infrahub/core/migrations/graph/m037_index_attr_vals.py +3 -2
- infrahub/core/migrations/graph/m038_redo_0000_prefix_fix.py +4 -5
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +3 -2
- infrahub/core/migrations/graph/m041_deleted_dup_edges.py +3 -2
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +5 -4
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +12 -5
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +15 -4
- infrahub/core/migrations/graph/m045_backfill_hfid_display_label_in_db_profile_template.py +10 -4
- infrahub/core/migrations/graph/m046_fill_agnostic_hfid_display_labels.py +6 -5
- infrahub/core/migrations/graph/m047_backfill_or_null_display_label.py +19 -5
- infrahub/core/migrations/graph/m048_undelete_rel_props.py +6 -4
- infrahub/core/migrations/graph/m049_remove_is_visible_relationship.py +3 -3
- infrahub/core/migrations/graph/m050_backfill_vertex_metadata.py +3 -3
- infrahub/core/migrations/graph/m051_subtract_branched_from_microsecond.py +39 -0
- infrahub/core/migrations/runner.py +6 -3
- infrahub/core/migrations/schema/attribute_kind_update.py +8 -11
- infrahub/core/migrations/schema/attribute_supports_profile.py +3 -8
- infrahub/core/migrations/schema/models.py +8 -0
- infrahub/core/migrations/schema/node_attribute_add.py +10 -13
- infrahub/core/migrations/schema/tasks.py +7 -1
- infrahub/core/migrations/shared.py +37 -30
- infrahub/core/node/__init__.py +2 -1
- infrahub/core/relationship/model.py +8 -2
- infrahub/core/schema/attribute_parameters.py +28 -1
- infrahub/core/schema/attribute_schema.py +9 -2
- infrahub/core/schema/manager.py +50 -38
- infrahub/core/validators/attribute/kind.py +5 -2
- infrahub/graphql/manager.py +8 -2
- infrahub/lock.py +7 -0
- infrahub/services/adapters/cache/redis.py +7 -0
- infrahub_sdk/analyzer.py +2 -2
- infrahub_sdk/branch.py +12 -39
- infrahub_sdk/checks.py +4 -4
- infrahub_sdk/client.py +36 -0
- infrahub_sdk/ctl/cli_commands.py +2 -1
- infrahub_sdk/ctl/graphql.py +15 -4
- infrahub_sdk/ctl/utils.py +2 -2
- infrahub_sdk/enums.py +6 -0
- infrahub_sdk/graphql/renderers.py +21 -0
- infrahub_sdk/graphql/utils.py +85 -0
- infrahub_sdk/node/attribute.py +12 -2
- infrahub_sdk/node/constants.py +11 -0
- infrahub_sdk/node/metadata.py +69 -0
- infrahub_sdk/node/node.py +65 -14
- infrahub_sdk/node/property.py +3 -0
- infrahub_sdk/node/related_node.py +24 -1
- infrahub_sdk/node/relationship.py +10 -1
- infrahub_sdk/operation.py +2 -2
- infrahub_sdk/schema/repository.py +1 -2
- infrahub_sdk/transforms.py +2 -2
- infrahub_sdk/types.py +18 -2
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.1.dist-info}/METADATA +6 -6
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.1.dist-info}/RECORD +80 -77
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.1.dist-info}/entry_points.txt +0 -1
- infrahub_testcontainers/performance_test.py +1 -1
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.1.dist-info}/licenses/LICENSE.txt +0 -0
infrahub/core/schema/manager.py
CHANGED
|
@@ -25,6 +25,7 @@ from infrahub.core.schema import (
|
|
|
25
25
|
SchemaRoot,
|
|
26
26
|
TemplateSchema,
|
|
27
27
|
)
|
|
28
|
+
from infrahub.core.timestamp import Timestamp
|
|
28
29
|
from infrahub.core.utils import parse_node_kind
|
|
29
30
|
from infrahub.exceptions import SchemaNotFoundError
|
|
30
31
|
from infrahub.log import get_logger
|
|
@@ -36,7 +37,6 @@ log = get_logger()
|
|
|
36
37
|
|
|
37
38
|
if TYPE_CHECKING:
|
|
38
39
|
from infrahub.core.branch import Branch
|
|
39
|
-
from infrahub.core.timestamp import Timestamp
|
|
40
40
|
from infrahub.database import InfrahubDatabase
|
|
41
41
|
|
|
42
42
|
|
|
@@ -179,18 +179,20 @@ class SchemaManager(NodeManager):
|
|
|
179
179
|
diff: SchemaDiff | None = None,
|
|
180
180
|
limit: list[str] | None = None,
|
|
181
181
|
update_db: bool = True,
|
|
182
|
+
at: Timestamp | None = None,
|
|
182
183
|
user_id: str = SYSTEM_USER_ID,
|
|
183
184
|
) -> None:
|
|
184
185
|
branch = await registry.get_branch(branch=branch, db=db)
|
|
186
|
+
at = Timestamp(at)
|
|
185
187
|
|
|
186
188
|
updated_schema = None
|
|
187
189
|
if update_db:
|
|
188
190
|
if diff:
|
|
189
191
|
schema_diff = await self.update_schema_to_db(
|
|
190
|
-
schema=schema, db=db, branch=branch, diff=diff, user_id=user_id
|
|
192
|
+
schema=schema, db=db, branch=branch, diff=diff, at=at, user_id=user_id
|
|
191
193
|
)
|
|
192
194
|
else:
|
|
193
|
-
await self.load_schema_to_db(schema=schema, db=db, branch=branch, limit=limit, user_id=user_id)
|
|
195
|
+
await self.load_schema_to_db(schema=schema, db=db, branch=branch, limit=limit, at=at, user_id=user_id)
|
|
194
196
|
# After updating the schema into the db
|
|
195
197
|
# we need to pull a fresh version because some default value are managed/generated within the node object
|
|
196
198
|
schema_diff = None
|
|
@@ -201,7 +203,7 @@ class SchemaManager(NodeManager):
|
|
|
201
203
|
)
|
|
202
204
|
|
|
203
205
|
updated_schema = await self.load_schema_from_db(
|
|
204
|
-
db=db, branch=branch, schema=schema, schema_diff=schema_diff
|
|
206
|
+
db=db, branch=branch, schema=schema, schema_diff=schema_diff, at=at
|
|
205
207
|
)
|
|
206
208
|
|
|
207
209
|
self.set_schema_branch(name=branch.name, schema=updated_schema or schema)
|
|
@@ -221,6 +223,7 @@ class SchemaManager(NodeManager):
|
|
|
221
223
|
db: InfrahubDatabase,
|
|
222
224
|
diff: SchemaDiff,
|
|
223
225
|
user_id: str,
|
|
226
|
+
at: Timestamp,
|
|
224
227
|
branch: Branch | str | None = None,
|
|
225
228
|
) -> SchemaBranchDiff:
|
|
226
229
|
"""Load all nodes, generics and groups from a SchemaRoot object into the database."""
|
|
@@ -231,7 +234,7 @@ class SchemaManager(NodeManager):
|
|
|
231
234
|
added_generics = []
|
|
232
235
|
for item_kind in diff.added.keys():
|
|
233
236
|
item = schema.get(name=item_kind, duplicate=False)
|
|
234
|
-
node = await self.load_node_to_db(node=item, branch=branch, db=db, user_id=user_id)
|
|
237
|
+
node = await self.load_node_to_db(node=item, branch=branch, db=db, at=at, user_id=user_id)
|
|
235
238
|
schema.set(name=item_kind, schema=node)
|
|
236
239
|
if item.is_node_schema:
|
|
237
240
|
added_nodes.append(item_kind)
|
|
@@ -244,10 +247,10 @@ class SchemaManager(NodeManager):
|
|
|
244
247
|
item = schema.get(name=item_kind, duplicate=False)
|
|
245
248
|
if item_diff:
|
|
246
249
|
node = await self.update_node_in_db_based_on_diff(
|
|
247
|
-
node=item, branch=branch, db=db, diff=item_diff, user_id=user_id
|
|
250
|
+
node=item, branch=branch, db=db, diff=item_diff, at=at, user_id=user_id
|
|
248
251
|
)
|
|
249
252
|
else:
|
|
250
|
-
node = await self.update_node_in_db(node=item, branch=branch, db=db, user_id=user_id)
|
|
253
|
+
node = await self.update_node_in_db(node=item, branch=branch, db=db, at=at, user_id=user_id)
|
|
251
254
|
schema.set(name=item_kind, schema=node)
|
|
252
255
|
if item.is_node_schema:
|
|
253
256
|
changed_nodes.append(item_kind)
|
|
@@ -258,7 +261,7 @@ class SchemaManager(NodeManager):
|
|
|
258
261
|
removed_generics = []
|
|
259
262
|
for item_kind in diff.removed.keys():
|
|
260
263
|
item = schema.get(name=item_kind, duplicate=False)
|
|
261
|
-
node = await self.delete_node_in_db(node=item, branch=branch, db=db, user_id=user_id)
|
|
264
|
+
node = await self.delete_node_in_db(node=item, branch=branch, db=db, at=at, user_id=user_id)
|
|
262
265
|
schema.delete(name=item_kind)
|
|
263
266
|
if item.is_node_schema:
|
|
264
267
|
removed_nodes.append(item_kind)
|
|
@@ -281,9 +284,10 @@ class SchemaManager(NodeManager):
|
|
|
281
284
|
branch: Branch | str | None = None,
|
|
282
285
|
limit: list[str] | None = None,
|
|
283
286
|
user_id: str = SYSTEM_USER_ID,
|
|
287
|
+
at: Timestamp | None = None,
|
|
284
288
|
) -> None:
|
|
285
289
|
"""Load all nodes, generics and groups from a SchemaRoot object into the database."""
|
|
286
|
-
|
|
290
|
+
at = Timestamp(at)
|
|
287
291
|
branch = await registry.get_branch(branch=branch, db=db)
|
|
288
292
|
|
|
289
293
|
for item_kind in schema.node_names + schema.generic_names_without_templates:
|
|
@@ -291,10 +295,10 @@ class SchemaManager(NodeManager):
|
|
|
291
295
|
continue
|
|
292
296
|
item = schema.get(name=item_kind, duplicate=False)
|
|
293
297
|
if not item.id:
|
|
294
|
-
node = await self.load_node_to_db(node=item, branch=branch, db=db, user_id=user_id)
|
|
298
|
+
node = await self.load_node_to_db(node=item, branch=branch, db=db, at=at, user_id=user_id)
|
|
295
299
|
schema.set(name=item_kind, schema=node)
|
|
296
300
|
else:
|
|
297
|
-
node = await self.update_node_in_db(node=item, branch=branch, db=db, user_id=user_id)
|
|
301
|
+
node = await self.update_node_in_db(node=item, branch=branch, db=db, at=at, user_id=user_id)
|
|
298
302
|
schema.set(name=item_kind, schema=node)
|
|
299
303
|
|
|
300
304
|
async def load_node_to_db(
|
|
@@ -302,6 +306,7 @@ class SchemaManager(NodeManager):
|
|
|
302
306
|
node: NodeSchema | GenericSchema,
|
|
303
307
|
db: InfrahubDatabase,
|
|
304
308
|
user_id: str,
|
|
309
|
+
at: Timestamp,
|
|
305
310
|
branch: Branch | str | None = None,
|
|
306
311
|
) -> NodeSchema | GenericSchema:
|
|
307
312
|
"""Load a Node with its attributes and its relationships to the database."""
|
|
@@ -322,7 +327,7 @@ class SchemaManager(NodeManager):
|
|
|
322
327
|
schema_dict = node.model_dump(exclude={"id", "state", "filters", "relationships", "attributes"})
|
|
323
328
|
obj = await Node.init(schema=node_schema, branch=branch, db=db)
|
|
324
329
|
await obj.new(**schema_dict, db=db)
|
|
325
|
-
await obj.save(db=db, user_id=user_id)
|
|
330
|
+
await obj.save(db=db, at=at, user_id=user_id)
|
|
326
331
|
new_node.id = obj.id
|
|
327
332
|
|
|
328
333
|
# Then create the Attributes and the relationships
|
|
@@ -333,7 +338,7 @@ class SchemaManager(NodeManager):
|
|
|
333
338
|
for item in node.attributes:
|
|
334
339
|
if item.inherited is False:
|
|
335
340
|
new_attr = await self.create_attribute_in_db(
|
|
336
|
-
schema=attribute_schema, item=item, parent=obj, branch=branch, db=db, user_id=user_id
|
|
341
|
+
schema=attribute_schema, item=item, parent=obj, branch=branch, db=db, at=at, user_id=user_id
|
|
337
342
|
)
|
|
338
343
|
else:
|
|
339
344
|
new_attr = item.duplicate()
|
|
@@ -342,7 +347,7 @@ class SchemaManager(NodeManager):
|
|
|
342
347
|
for item in node.relationships:
|
|
343
348
|
if item.inherited is False:
|
|
344
349
|
new_rel = await self.create_relationship_in_db(
|
|
345
|
-
schema=relationship_schema, item=item, parent=obj, branch=branch, db=db, user_id=user_id
|
|
350
|
+
schema=relationship_schema, item=item, parent=obj, branch=branch, db=db, at=at, user_id=user_id
|
|
346
351
|
)
|
|
347
352
|
else:
|
|
348
353
|
new_rel = item.duplicate()
|
|
@@ -357,6 +362,7 @@ class SchemaManager(NodeManager):
|
|
|
357
362
|
db: InfrahubDatabase,
|
|
358
363
|
node: NodeSchema | GenericSchema,
|
|
359
364
|
user_id: str,
|
|
365
|
+
at: Timestamp,
|
|
360
366
|
branch: Branch | str | None = None,
|
|
361
367
|
) -> NodeSchema | GenericSchema:
|
|
362
368
|
"""Update a Node with its attributes and its relationships in the database."""
|
|
@@ -380,11 +386,11 @@ class SchemaManager(NodeManager):
|
|
|
380
386
|
new_node = node.duplicate()
|
|
381
387
|
|
|
382
388
|
# Update the attributes and the relationships nodes as well
|
|
383
|
-
await obj.attributes.update(db=db, data=[item.id for item in node.local_attributes if item.id])
|
|
389
|
+
await obj.attributes.update(db=db, data=[item.id for item in node.local_attributes if item.id], at=at)
|
|
384
390
|
await obj.relationships.update(
|
|
385
|
-
db=db, data=[item.id for item in node.local_relationships if item.id and item.name != "profiles"]
|
|
391
|
+
db=db, data=[item.id for item in node.local_relationships if item.id and item.name != "profiles"], at=at
|
|
386
392
|
)
|
|
387
|
-
await obj.save(db=db, user_id=user_id)
|
|
393
|
+
await obj.save(db=db, at=at, user_id=user_id)
|
|
388
394
|
|
|
389
395
|
# Then Update the Attributes and the relationships
|
|
390
396
|
|
|
@@ -397,19 +403,19 @@ class SchemaManager(NodeManager):
|
|
|
397
403
|
|
|
398
404
|
for item in node.local_attributes:
|
|
399
405
|
if item.id and item.id in items:
|
|
400
|
-
await self.update_attribute_in_db(item=item, attr=items[item.id], db=db, user_id=user_id)
|
|
406
|
+
await self.update_attribute_in_db(item=item, attr=items[item.id], db=db, at=at, user_id=user_id)
|
|
401
407
|
elif not item.id:
|
|
402
408
|
new_attr = await self.create_attribute_in_db(
|
|
403
|
-
schema=attribute_schema, item=item, branch=branch, db=db, parent=obj, user_id=user_id
|
|
409
|
+
schema=attribute_schema, item=item, branch=branch, db=db, parent=obj, at=at, user_id=user_id
|
|
404
410
|
)
|
|
405
411
|
new_node.attributes.append(new_attr)
|
|
406
412
|
|
|
407
413
|
for item in node.local_relationships:
|
|
408
414
|
if item.id and item.id in items:
|
|
409
|
-
await self.update_relationship_in_db(item=item, rel=items[item.id], db=db, user_id=user_id)
|
|
415
|
+
await self.update_relationship_in_db(item=item, rel=items[item.id], db=db, at=at, user_id=user_id)
|
|
410
416
|
elif not item.id:
|
|
411
417
|
new_rel = await self.create_relationship_in_db(
|
|
412
|
-
schema=relationship_schema, item=item, branch=branch, db=db, parent=obj, user_id=user_id
|
|
418
|
+
schema=relationship_schema, item=item, branch=branch, db=db, parent=obj, at=at, user_id=user_id
|
|
413
419
|
)
|
|
414
420
|
new_node.relationships.append(new_rel)
|
|
415
421
|
|
|
@@ -423,6 +429,7 @@ class SchemaManager(NodeManager):
|
|
|
423
429
|
diff: HashableModelDiff,
|
|
424
430
|
node: NodeSchema | GenericSchema,
|
|
425
431
|
user_id: str,
|
|
432
|
+
at: Timestamp,
|
|
426
433
|
branch: Branch | str | None = None,
|
|
427
434
|
) -> NodeSchema | GenericSchema:
|
|
428
435
|
"""Update a Node with its attributes and its relationships in the database based on a HashableModelDiff."""
|
|
@@ -496,24 +503,24 @@ class SchemaManager(NodeManager):
|
|
|
496
503
|
items.update({field.id: field for field in missing_attrs + missing_rels})
|
|
497
504
|
|
|
498
505
|
if diff_attributes:
|
|
499
|
-
await obj.attributes.update(db=db, data=[item.id for item in node.local_attributes if item.id])
|
|
506
|
+
await obj.attributes.update(db=db, data=[item.id for item in node.local_attributes if item.id], at=at)
|
|
500
507
|
|
|
501
508
|
if diff_relationships:
|
|
502
|
-
await obj.relationships.update(db=db, data=[item.id for item in node.local_relationships if item.id])
|
|
509
|
+
await obj.relationships.update(db=db, data=[item.id for item in node.local_relationships if item.id], at=at)
|
|
503
510
|
|
|
504
|
-
await obj.save(db=db, user_id=user_id)
|
|
511
|
+
await obj.save(db=db, at=at, user_id=user_id)
|
|
505
512
|
|
|
506
513
|
if diff_attributes:
|
|
507
514
|
for item in node.local_attributes:
|
|
508
515
|
# if item is in changed and has no ID, then it is being overridden from a generic and must be added
|
|
509
516
|
if item.name in diff_attributes.added or (item.name in diff_attributes.changed and item.id is None):
|
|
510
517
|
created_item = await self.create_attribute_in_db(
|
|
511
|
-
schema=attribute_schema, item=item, branch=branch, db=db, parent=obj, user_id=user_id
|
|
518
|
+
schema=attribute_schema, item=item, branch=branch, db=db, parent=obj, at=at, user_id=user_id
|
|
512
519
|
)
|
|
513
520
|
new_attr = new_node.get_attribute(name=item.name)
|
|
514
521
|
new_attr.id = created_item.id
|
|
515
522
|
elif item.name in diff_attributes.changed and item.id and item.id in items:
|
|
516
|
-
await self.update_attribute_in_db(item=item, attr=items[item.id], db=db, user_id=user_id)
|
|
523
|
+
await self.update_attribute_in_db(item=item, attr=items[item.id], db=db, at=at, user_id=user_id)
|
|
517
524
|
elif item.name in diff_attributes.removed and item.id and item.id in items:
|
|
518
525
|
await items[item.id].delete(db=db, user_id=user_id)
|
|
519
526
|
elif (
|
|
@@ -530,12 +537,12 @@ class SchemaManager(NodeManager):
|
|
|
530
537
|
item.name in diff_relationships.changed and item.id is None
|
|
531
538
|
):
|
|
532
539
|
created_rel = await self.create_relationship_in_db(
|
|
533
|
-
schema=relationship_schema, item=item, branch=branch, db=db, parent=obj, user_id=user_id
|
|
540
|
+
schema=relationship_schema, item=item, branch=branch, db=db, parent=obj, at=at, user_id=user_id
|
|
534
541
|
)
|
|
535
542
|
new_rel = new_node.get_relationship(name=item.name)
|
|
536
543
|
new_rel.id = created_rel.id
|
|
537
544
|
elif item.name in diff_relationships.changed and item.id and item.id in items:
|
|
538
|
-
await self.update_relationship_in_db(item=item, rel=items[item.id], db=db, user_id=user_id)
|
|
545
|
+
await self.update_relationship_in_db(item=item, rel=items[item.id], db=db, at=at, user_id=user_id)
|
|
539
546
|
elif item.name in diff_relationships.removed and item.id and item.id in items:
|
|
540
547
|
await items[item.id].delete(db=db, user_id=user_id)
|
|
541
548
|
elif (
|
|
@@ -555,7 +562,7 @@ class SchemaManager(NodeManager):
|
|
|
555
562
|
if field_names_to_remove:
|
|
556
563
|
for field_schema in items.values():
|
|
557
564
|
if field_schema.name.value in field_names_to_remove:
|
|
558
|
-
await field_schema.delete(db=db, user_id=user_id)
|
|
565
|
+
await field_schema.delete(db=db, at=at, user_id=user_id)
|
|
559
566
|
|
|
560
567
|
# Save back the node with the (potentially) newly created IDs in the SchemaManager
|
|
561
568
|
self.set(name=new_node.kind, schema=new_node, branch=branch.name)
|
|
@@ -566,6 +573,7 @@ class SchemaManager(NodeManager):
|
|
|
566
573
|
db: InfrahubDatabase,
|
|
567
574
|
node: NodeSchema | GenericSchema,
|
|
568
575
|
user_id: str,
|
|
576
|
+
at: Timestamp,
|
|
569
577
|
branch: Branch | str | None = None,
|
|
570
578
|
) -> None:
|
|
571
579
|
"""Delete the node with its attributes and relationships."""
|
|
@@ -581,11 +589,11 @@ class SchemaManager(NodeManager):
|
|
|
581
589
|
|
|
582
590
|
# First delete the attributes and the relationships
|
|
583
591
|
for attr_schema_node in (await obj.attributes.get_peers(db=db)).values():
|
|
584
|
-
await attr_schema_node.delete(db=db, user_id=user_id)
|
|
592
|
+
await attr_schema_node.delete(db=db, at=at, user_id=user_id)
|
|
585
593
|
for rel_schema_node in (await obj.relationships.get_peers(db=db)).values():
|
|
586
|
-
await rel_schema_node.delete(db=db, user_id=user_id)
|
|
594
|
+
await rel_schema_node.delete(db=db, at=at, user_id=user_id)
|
|
587
595
|
|
|
588
|
-
await obj.delete(db=db, user_id=user_id)
|
|
596
|
+
await obj.delete(db=db, at=at, user_id=user_id)
|
|
589
597
|
|
|
590
598
|
@staticmethod
|
|
591
599
|
async def create_attribute_in_db(
|
|
@@ -595,20 +603,23 @@ class SchemaManager(NodeManager):
|
|
|
595
603
|
parent: Node,
|
|
596
604
|
db: InfrahubDatabase,
|
|
597
605
|
user_id: str,
|
|
606
|
+
at: Timestamp,
|
|
598
607
|
) -> AttributeSchema:
|
|
599
608
|
obj = await Node.init(schema=schema, branch=branch, db=db)
|
|
600
609
|
await obj.new(**item.to_node(), node=parent, db=db)
|
|
601
|
-
await obj.save(db=db, user_id=user_id)
|
|
610
|
+
await obj.save(db=db, at=at, user_id=user_id)
|
|
602
611
|
new_item = item.duplicate()
|
|
603
612
|
new_item.id = obj.id
|
|
604
613
|
return new_item
|
|
605
614
|
|
|
606
615
|
@staticmethod
|
|
607
|
-
async def update_attribute_in_db(
|
|
616
|
+
async def update_attribute_in_db(
|
|
617
|
+
item: AttributeSchema, attr: Node, db: InfrahubDatabase, at: Timestamp, user_id: str
|
|
618
|
+
) -> None:
|
|
608
619
|
item_dict = item.model_dump(exclude={"id", "state", "filters"})
|
|
609
620
|
for key, value in item_dict.items():
|
|
610
621
|
getattr(attr, key).value = value
|
|
611
|
-
await attr.save(db=db, user_id=user_id)
|
|
622
|
+
await attr.save(db=db, at=at, user_id=user_id)
|
|
612
623
|
|
|
613
624
|
@staticmethod
|
|
614
625
|
async def create_relationship_in_db(
|
|
@@ -618,22 +629,23 @@ class SchemaManager(NodeManager):
|
|
|
618
629
|
parent: Node,
|
|
619
630
|
db: InfrahubDatabase,
|
|
620
631
|
user_id: str,
|
|
632
|
+
at: Timestamp,
|
|
621
633
|
) -> RelationshipSchema:
|
|
622
634
|
obj = await Node.init(schema=schema, branch=branch, db=db)
|
|
623
635
|
await obj.new(**item.model_dump(exclude={"id", "state", "filters"}), node=parent, db=db)
|
|
624
|
-
await obj.save(db=db, user_id=user_id)
|
|
636
|
+
await obj.save(db=db, at=at, user_id=user_id)
|
|
625
637
|
new_item = item.duplicate()
|
|
626
638
|
new_item.id = obj.id
|
|
627
639
|
return new_item
|
|
628
640
|
|
|
629
641
|
@staticmethod
|
|
630
642
|
async def update_relationship_in_db(
|
|
631
|
-
item: RelationshipSchema, rel: Node, db: InfrahubDatabase, user_id: str
|
|
643
|
+
item: RelationshipSchema, rel: Node, db: InfrahubDatabase, at: Timestamp, user_id: str
|
|
632
644
|
) -> None:
|
|
633
645
|
item_dict = item.model_dump(exclude={"id", "state", "filters"})
|
|
634
646
|
for key, value in item_dict.items():
|
|
635
647
|
getattr(rel, key).value = value
|
|
636
|
-
await rel.save(db=db, user_id=user_id)
|
|
648
|
+
await rel.save(db=db, at=at, user_id=user_id)
|
|
637
649
|
|
|
638
650
|
async def load_schema(
|
|
639
651
|
self,
|
|
@@ -36,7 +36,7 @@ class AttributeKindUpdateValidatorQuery(AttributeSchemaValidatorQuery):
|
|
|
36
36
|
self.params["null_value"] = NULL_VALUE
|
|
37
37
|
|
|
38
38
|
query = """
|
|
39
|
-
MATCH
|
|
39
|
+
MATCH (n:%(node_kinds)s)
|
|
40
40
|
CALL (n) {
|
|
41
41
|
MATCH path = (root:Root)<-[rr:IS_PART_OF]-(n)-[ra:HAS_ATTRIBUTE]-(:Attribute { name: $attr_name } )-[rv:HAS_VALUE]-(av:AttributeValue)
|
|
42
42
|
WHERE all(
|
|
@@ -51,7 +51,10 @@ class AttributeKindUpdateValidatorQuery(AttributeSchemaValidatorQuery):
|
|
|
51
51
|
WHERE all(r in relationships(full_path) WHERE r.status = "active")
|
|
52
52
|
AND attribute_value IS NOT NULL
|
|
53
53
|
AND attribute_value <> $null_value
|
|
54
|
-
""" % {
|
|
54
|
+
""" % {
|
|
55
|
+
"branch_filter": branch_filter,
|
|
56
|
+
"node_kinds": f"{self.node_schema.kind}|Profile{self.node_schema.kind}|Template{self.node_schema.kind}",
|
|
57
|
+
}
|
|
55
58
|
|
|
56
59
|
self.add_to_query(query)
|
|
57
60
|
self.return_labels = ["node.uuid", "attribute_value", "value_relationship.branch as value_branch"]
|
infrahub/graphql/manager.py
CHANGED
|
@@ -598,7 +598,10 @@ class GraphQLSchemaManager:
|
|
|
598
598
|
required=False,
|
|
599
599
|
description="Human friendly identifier",
|
|
600
600
|
),
|
|
601
|
-
"_updated_at": graphene.DateTime(
|
|
601
|
+
"_updated_at": graphene.DateTime(
|
|
602
|
+
required=False,
|
|
603
|
+
deprecation_reason="Query the node_metadata field instead. Will be removed in Infrahub 1.9",
|
|
604
|
+
),
|
|
602
605
|
"display_label": graphene.String(required=False),
|
|
603
606
|
"Meta": type("Meta", (object,), meta_attrs),
|
|
604
607
|
}
|
|
@@ -1209,7 +1212,10 @@ class GraphQLSchemaManager:
|
|
|
1209
1212
|
|
|
1210
1213
|
main_attrs: dict[str, Any] = {
|
|
1211
1214
|
"node": graphene.Field(base_interface, required=False),
|
|
1212
|
-
"_updated_at": graphene.DateTime(
|
|
1215
|
+
"_updated_at": graphene.DateTime(
|
|
1216
|
+
required=False,
|
|
1217
|
+
deprecation_reason="Query the node_metadata field instead. Will be removed in Infrahub 1.9",
|
|
1218
|
+
),
|
|
1213
1219
|
"node_metadata": graphene.Field(node_metadata, required=True),
|
|
1214
1220
|
"Meta": type("Meta", (object,), meta_attrs),
|
|
1215
1221
|
}
|
infrahub/lock.py
CHANGED
|
@@ -10,6 +10,7 @@ from typing import TYPE_CHECKING
|
|
|
10
10
|
|
|
11
11
|
import redis.asyncio as redis
|
|
12
12
|
from prometheus_client import Histogram
|
|
13
|
+
from redis import UsernamePasswordCredentialProvider
|
|
13
14
|
from redis.asyncio.lock import Lock as GlobalLock
|
|
14
15
|
|
|
15
16
|
from infrahub import config
|
|
@@ -275,10 +276,16 @@ class InfrahubLockRegistry:
|
|
|
275
276
|
) -> None:
|
|
276
277
|
if config.SETTINGS.cache.enable and not local_only:
|
|
277
278
|
if config.SETTINGS.cache.driver == config.CacheDriver.Redis:
|
|
279
|
+
credential_provider: UsernamePasswordCredentialProvider | None = None
|
|
280
|
+
if config.SETTINGS.cache.username and config.SETTINGS.cache.password:
|
|
281
|
+
credential_provider = UsernamePasswordCredentialProvider(
|
|
282
|
+
username=config.SETTINGS.cache.username, password=config.SETTINGS.cache.password
|
|
283
|
+
)
|
|
278
284
|
self.connection = redis.Redis(
|
|
279
285
|
host=config.SETTINGS.cache.address,
|
|
280
286
|
port=config.SETTINGS.cache.service_port,
|
|
281
287
|
db=config.SETTINGS.cache.database,
|
|
288
|
+
credential_provider=credential_provider,
|
|
282
289
|
ssl=config.SETTINGS.cache.tls_enabled,
|
|
283
290
|
ssl_cert_reqs="optional" if not config.SETTINGS.cache.tls_insecure else "none",
|
|
284
291
|
ssl_check_hostname=not config.SETTINGS.cache.tls_insecure,
|
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
import redis.asyncio as redis
|
|
6
|
+
from redis import UsernamePasswordCredentialProvider
|
|
6
7
|
|
|
7
8
|
from infrahub import config
|
|
8
9
|
from infrahub.services.adapters.cache import InfrahubCache
|
|
@@ -13,10 +14,16 @@ if TYPE_CHECKING:
|
|
|
13
14
|
|
|
14
15
|
class RedisCache(InfrahubCache):
|
|
15
16
|
def __init__(self) -> None:
|
|
17
|
+
credential_provider: UsernamePasswordCredentialProvider | None = None
|
|
18
|
+
if config.SETTINGS.cache.username and config.SETTINGS.cache.password:
|
|
19
|
+
credential_provider = UsernamePasswordCredentialProvider(
|
|
20
|
+
username=config.SETTINGS.cache.username, password=config.SETTINGS.cache.password
|
|
21
|
+
)
|
|
16
22
|
self.connection = redis.Redis(
|
|
17
23
|
host=config.SETTINGS.cache.address,
|
|
18
24
|
port=config.SETTINGS.cache.service_port,
|
|
19
25
|
db=config.SETTINGS.cache.database,
|
|
26
|
+
credential_provider=credential_provider,
|
|
20
27
|
ssl=config.SETTINGS.cache.tls_enabled,
|
|
21
28
|
ssl_cert_reqs="optional" if not config.SETTINGS.cache.tls_insecure else "none",
|
|
22
29
|
ssl_check_hostname=not config.SETTINGS.cache.tls_insecure,
|
infrahub_sdk/analyzer.py
CHANGED
|
@@ -30,10 +30,10 @@ class GraphQLOperation(BaseModel):
|
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class GraphQLQueryAnalyzer:
|
|
33
|
-
def __init__(self, query: str, schema: GraphQLSchema | None = None) -> None:
|
|
33
|
+
def __init__(self, query: str, schema: GraphQLSchema | None = None, document: DocumentNode | None = None) -> None:
|
|
34
34
|
self.query: str = query
|
|
35
35
|
self.schema: GraphQLSchema | None = schema
|
|
36
|
-
self.document: DocumentNode = parse(self.query)
|
|
36
|
+
self.document: DocumentNode = document or parse(self.query)
|
|
37
37
|
self._fields: dict | None = None
|
|
38
38
|
|
|
39
39
|
@property
|
infrahub_sdk/branch.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import warnings
|
|
4
3
|
from enum import Enum
|
|
5
4
|
from typing import TYPE_CHECKING, Any, Literal, overload
|
|
6
5
|
from urllib.parse import urlencode
|
|
@@ -93,7 +92,6 @@ class InfrahubBranchManager(InfraHubBranchManagerBase):
|
|
|
93
92
|
sync_with_git: bool = True,
|
|
94
93
|
description: str = "",
|
|
95
94
|
wait_until_completion: Literal[True] = True,
|
|
96
|
-
background_execution: bool | None = False,
|
|
97
95
|
) -> BranchData: ...
|
|
98
96
|
|
|
99
97
|
@overload
|
|
@@ -103,7 +101,6 @@ class InfrahubBranchManager(InfraHubBranchManagerBase):
|
|
|
103
101
|
sync_with_git: bool = True,
|
|
104
102
|
description: str = "",
|
|
105
103
|
wait_until_completion: Literal[False] = False,
|
|
106
|
-
background_execution: bool | None = False,
|
|
107
104
|
) -> str: ...
|
|
108
105
|
|
|
109
106
|
async def create(
|
|
@@ -112,19 +109,9 @@ class InfrahubBranchManager(InfraHubBranchManagerBase):
|
|
|
112
109
|
sync_with_git: bool = True,
|
|
113
110
|
description: str = "",
|
|
114
111
|
wait_until_completion: bool = True,
|
|
115
|
-
background_execution: bool | None = False,
|
|
116
112
|
) -> BranchData | str:
|
|
117
|
-
if background_execution is not None:
|
|
118
|
-
warnings.warn(
|
|
119
|
-
"`background_execution` is deprecated, please use `wait_until_completion` instead.",
|
|
120
|
-
DeprecationWarning,
|
|
121
|
-
stacklevel=1,
|
|
122
|
-
)
|
|
123
|
-
|
|
124
|
-
background_execution = background_execution or not wait_until_completion
|
|
125
113
|
input_data = {
|
|
126
|
-
|
|
127
|
-
"background_execution": background_execution,
|
|
114
|
+
"wait_until_completion": wait_until_completion,
|
|
128
115
|
"data": {
|
|
129
116
|
"name": branch_name,
|
|
130
117
|
"description": description,
|
|
@@ -132,15 +119,14 @@ class InfrahubBranchManager(InfraHubBranchManagerBase):
|
|
|
132
119
|
},
|
|
133
120
|
}
|
|
134
121
|
|
|
135
|
-
mutation_query =
|
|
122
|
+
mutation_query = MUTATION_QUERY_DATA if wait_until_completion else MUTATION_QUERY_TASK
|
|
136
123
|
query = Mutation(mutation="BranchCreate", input_data=input_data, query=mutation_query)
|
|
137
124
|
response = await self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create")
|
|
138
125
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
return BranchData(**response["BranchCreate"]["object"])
|
|
126
|
+
if wait_until_completion:
|
|
127
|
+
return BranchData(**response["BranchCreate"]["object"])
|
|
128
|
+
|
|
129
|
+
return response["BranchCreate"]["task"]["id"]
|
|
144
130
|
|
|
145
131
|
async def delete(self, branch_name: str) -> bool:
|
|
146
132
|
input_data = {
|
|
@@ -261,7 +247,6 @@ class InfrahubBranchManagerSync(InfraHubBranchManagerBase):
|
|
|
261
247
|
sync_with_git: bool = True,
|
|
262
248
|
description: str = "",
|
|
263
249
|
wait_until_completion: Literal[True] = True,
|
|
264
|
-
background_execution: bool | None = False,
|
|
265
250
|
) -> BranchData: ...
|
|
266
251
|
|
|
267
252
|
@overload
|
|
@@ -271,7 +256,6 @@ class InfrahubBranchManagerSync(InfraHubBranchManagerBase):
|
|
|
271
256
|
sync_with_git: bool = True,
|
|
272
257
|
description: str = "",
|
|
273
258
|
wait_until_completion: Literal[False] = False,
|
|
274
|
-
background_execution: bool | None = False,
|
|
275
259
|
) -> str: ...
|
|
276
260
|
|
|
277
261
|
def create(
|
|
@@ -280,19 +264,9 @@ class InfrahubBranchManagerSync(InfraHubBranchManagerBase):
|
|
|
280
264
|
sync_with_git: bool = True,
|
|
281
265
|
description: str = "",
|
|
282
266
|
wait_until_completion: bool = True,
|
|
283
|
-
background_execution: bool | None = False,
|
|
284
267
|
) -> BranchData | str:
|
|
285
|
-
if background_execution is not None:
|
|
286
|
-
warnings.warn(
|
|
287
|
-
"`background_execution` is deprecated, please use `wait_until_completion` instead.",
|
|
288
|
-
DeprecationWarning,
|
|
289
|
-
stacklevel=1,
|
|
290
|
-
)
|
|
291
|
-
|
|
292
|
-
background_execution = background_execution or not wait_until_completion
|
|
293
268
|
input_data = {
|
|
294
|
-
|
|
295
|
-
"background_execution": background_execution,
|
|
269
|
+
"wait_until_completion": wait_until_completion,
|
|
296
270
|
"data": {
|
|
297
271
|
"name": branch_name,
|
|
298
272
|
"description": description,
|
|
@@ -300,15 +274,14 @@ class InfrahubBranchManagerSync(InfraHubBranchManagerBase):
|
|
|
300
274
|
},
|
|
301
275
|
}
|
|
302
276
|
|
|
303
|
-
mutation_query =
|
|
277
|
+
mutation_query = MUTATION_QUERY_DATA if wait_until_completion else MUTATION_QUERY_TASK
|
|
304
278
|
query = Mutation(mutation="BranchCreate", input_data=input_data, query=mutation_query)
|
|
305
279
|
response = self.client.execute_graphql(query=query.render(), tracker="mutation-branch-create")
|
|
306
280
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
return BranchData(**response["BranchCreate"]["object"])
|
|
281
|
+
if wait_until_completion:
|
|
282
|
+
return BranchData(**response["BranchCreate"]["object"])
|
|
283
|
+
|
|
284
|
+
return response["BranchCreate"]["task"]["id"]
|
|
312
285
|
|
|
313
286
|
def delete(self, branch_name: str) -> bool:
|
|
314
287
|
input_data = {
|
infrahub_sdk/checks.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
import importlib
|
|
5
|
-
import
|
|
4
|
+
import inspect
|
|
5
|
+
import pathlib
|
|
6
6
|
import warnings
|
|
7
7
|
from abc import abstractmethod
|
|
8
8
|
from typing import TYPE_CHECKING, Any
|
|
@@ -55,7 +55,7 @@ class InfrahubCheck:
|
|
|
55
55
|
self.branch = branch
|
|
56
56
|
self.params = params or {}
|
|
57
57
|
|
|
58
|
-
self.root_directory = root_directory or
|
|
58
|
+
self.root_directory = root_directory or str(pathlib.Path.cwd())
|
|
59
59
|
|
|
60
60
|
self._client = client
|
|
61
61
|
|
|
@@ -160,7 +160,7 @@ class InfrahubCheck:
|
|
|
160
160
|
data = await self.collect_data()
|
|
161
161
|
unpacked = data.get("data") or data
|
|
162
162
|
|
|
163
|
-
if
|
|
163
|
+
if inspect.iscoroutinefunction(self.validate):
|
|
164
164
|
await self.validate(data=unpacked)
|
|
165
165
|
else:
|
|
166
166
|
self.validate(data=unpacked)
|