infrahub-server 1.1.1__py3-none-any.whl → 1.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/__init__.py +13 -5
- infrahub/api/artifact.py +9 -15
- infrahub/api/auth.py +7 -1
- infrahub/api/dependencies.py +15 -2
- infrahub/api/diff/diff.py +13 -7
- infrahub/api/file.py +5 -10
- infrahub/api/internal.py +19 -6
- infrahub/api/menu.py +8 -6
- infrahub/api/oauth2.py +25 -10
- infrahub/api/oidc.py +26 -10
- infrahub/api/query.py +2 -2
- infrahub/api/schema.py +48 -59
- infrahub/api/storage.py +8 -8
- infrahub/api/transformation.py +6 -5
- infrahub/auth.py +1 -26
- infrahub/cli/__init__.py +1 -1
- infrahub/cli/context.py +5 -8
- infrahub/cli/db.py +6 -6
- infrahub/cli/git_agent.py +1 -1
- infrahub/computed_attribute/models.py +1 -1
- infrahub/computed_attribute/tasks.py +1 -1
- infrahub/config.py +5 -5
- infrahub/core/account.py +2 -10
- infrahub/core/attribute.py +22 -0
- infrahub/core/branch/models.py +1 -1
- infrahub/core/branch/tasks.py +4 -3
- infrahub/core/diff/calculator.py +14 -0
- infrahub/core/diff/combiner.py +6 -2
- infrahub/core/diff/conflicts_enricher.py +2 -2
- infrahub/core/diff/coordinator.py +296 -87
- infrahub/core/diff/data_check_synchronizer.py +33 -4
- infrahub/core/diff/enricher/cardinality_one.py +3 -3
- infrahub/core/diff/enricher/hierarchy.py +4 -1
- infrahub/core/diff/merger/merger.py +11 -1
- infrahub/core/diff/merger/serializer.py +5 -29
- infrahub/core/diff/model/path.py +88 -4
- infrahub/core/diff/query/field_specifiers.py +35 -0
- infrahub/core/diff/query/roots_metadata.py +48 -0
- infrahub/core/diff/query/save.py +1 -0
- infrahub/core/diff/query_parser.py +27 -11
- infrahub/core/diff/repository/deserializer.py +7 -3
- infrahub/core/diff/repository/repository.py +100 -9
- infrahub/core/diff/tasks.py +1 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/integrity/object_conflict/conflict_recorder.py +6 -1
- infrahub/core/ipam/utilization.py +6 -1
- infrahub/core/manager.py +8 -0
- infrahub/core/merge.py +6 -1
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +1 -1
- infrahub/core/migrations/graph/m015_diff_format_update.py +1 -1
- infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +1 -1
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +101 -0
- infrahub/core/migrations/query/attribute_add.py +5 -5
- infrahub/core/migrations/schema/tasks.py +2 -2
- infrahub/core/migrations/shared.py +3 -3
- infrahub/core/node/__init__.py +8 -2
- infrahub/core/node/constraints/grouped_uniqueness.py +9 -2
- infrahub/core/query/__init__.py +5 -2
- infrahub/core/query/diff.py +32 -19
- infrahub/core/query/ipam.py +30 -22
- infrahub/core/query/node.py +91 -40
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +2 -2
- infrahub/core/schema/generated/relationship_schema.py +1 -1
- infrahub/core/schema/schema_branch_computed.py +1 -1
- infrahub/core/task/task_log.py +1 -1
- infrahub/core/validators/attribute/kind.py +1 -1
- infrahub/core/validators/interface.py +1 -2
- infrahub/core/validators/models/violation.py +1 -14
- infrahub/core/validators/shared.py +2 -2
- infrahub/core/validators/tasks.py +7 -4
- infrahub/core/validators/uniqueness/index.py +2 -4
- infrahub/database/index.py +1 -1
- infrahub/dependencies/builder/constraint/schema/aggregated.py +2 -0
- infrahub/dependencies/builder/constraint/schema/attribute_kind.py +8 -0
- infrahub/dependencies/builder/diff/data_check_synchronizer.py +2 -0
- infrahub/git/base.py +3 -3
- infrahub/git/integrator.py +1 -1
- infrahub/graphql/api/endpoints.py +12 -3
- infrahub/graphql/app.py +2 -2
- infrahub/graphql/auth/query_permission_checker/default_branch_checker.py +2 -17
- infrahub/graphql/auth/query_permission_checker/merge_operation_checker.py +1 -12
- infrahub/graphql/auth/query_permission_checker/object_permission_checker.py +6 -40
- infrahub/graphql/auth/query_permission_checker/super_admin_checker.py +5 -8
- infrahub/graphql/enums.py +2 -2
- infrahub/graphql/initialization.py +27 -8
- infrahub/graphql/manager.py +9 -3
- infrahub/graphql/models.py +6 -0
- infrahub/graphql/mutations/account.py +14 -10
- infrahub/graphql/mutations/computed_attribute.py +11 -22
- infrahub/graphql/mutations/diff.py +2 -0
- infrahub/graphql/mutations/main.py +5 -16
- infrahub/graphql/mutations/proposed_change.py +11 -20
- infrahub/graphql/mutations/resource_manager.py +6 -3
- infrahub/graphql/mutations/schema.py +8 -7
- infrahub/graphql/mutations/tasks.py +1 -1
- infrahub/graphql/permissions.py +3 -4
- infrahub/graphql/queries/account.py +2 -11
- infrahub/graphql/queries/resource_manager.py +21 -10
- infrahub/graphql/query.py +3 -1
- infrahub/graphql/resolvers/resolver.py +5 -1
- infrahub/graphql/types/task.py +14 -2
- infrahub/menu/generator.py +6 -18
- infrahub/message_bus/messages/event_node_mutated.py +2 -2
- infrahub/message_bus/operations/check/repository.py +2 -4
- infrahub/message_bus/operations/event/branch.py +2 -4
- infrahub/message_bus/operations/requests/proposed_change.py +1 -1
- infrahub/message_bus/operations/requests/repository.py +3 -5
- infrahub/message_bus/types.py +1 -1
- infrahub/permissions/__init__.py +12 -3
- infrahub/permissions/backend.py +2 -17
- infrahub/permissions/constants.py +12 -8
- infrahub/permissions/local_backend.py +5 -102
- infrahub/permissions/manager.py +135 -0
- infrahub/permissions/report.py +14 -25
- infrahub/permissions/types.py +6 -0
- infrahub/proposed_change/tasks.py +1 -1
- infrahub/task_manager/models.py +34 -5
- infrahub/task_manager/task.py +14 -6
- infrahub/visuals.py +1 -3
- infrahub_sdk/client.py +204 -43
- infrahub_sdk/ctl/cli_commands.py +106 -6
- infrahub_sdk/data.py +3 -2
- infrahub_sdk/graphql.py +5 -0
- infrahub_sdk/node.py +21 -2
- infrahub_sdk/queries.py +69 -0
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/testing/schemas/animal.py +1 -0
- infrahub_sdk/types.py +6 -0
- infrahub_sdk/utils.py +17 -0
- {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/METADATA +1 -1
- {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/RECORD +136 -131
- infrahub/core/diff/query/empty_roots.py +0 -33
- {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/WHEEL +0 -0
- {infrahub_server-1.1.1.dist-info → infrahub_server-1.1.3.dist-info}/entry_points.txt +0 -0
infrahub/core/query/diff.py
CHANGED
|
@@ -568,17 +568,26 @@ WITH CASE
|
|
|
568
568
|
ELSE [[$new_node_field_specifiers, $branch_from_time], [$current_node_field_specifiers, $from_time]]
|
|
569
569
|
END AS diff_filter_params_list
|
|
570
570
|
UNWIND diff_filter_params_list AS diff_filter_params
|
|
571
|
+
WITH diff_filter_params[0] AS node_field_specifiers_list, diff_filter_params[1] AS from_time
|
|
571
572
|
CALL {
|
|
572
|
-
|
|
573
|
-
|
|
573
|
+
// -------------------------------------
|
|
574
|
+
// These lists contain duplicate data, but vastly improve querying speed below
|
|
575
|
+
// -------------------------------------
|
|
576
|
+
WITH node_field_specifiers_list
|
|
577
|
+
UNWIND node_field_specifiers_list AS nfs
|
|
578
|
+
WITH nfs[0] AS uuid, nfs[1] AS field_name
|
|
579
|
+
WITH collect(DISTINCT uuid) as uuids, collect(DISTINCT field_name) AS field_names
|
|
580
|
+
RETURN uuids AS node_ids_list, field_names AS field_names_list
|
|
581
|
+
}
|
|
582
|
+
CALL {
|
|
583
|
+
WITH node_field_specifiers_list, node_ids_list, from_time
|
|
574
584
|
CALL {
|
|
575
|
-
WITH node_field_specifiers_list, from_time
|
|
576
|
-
WITH reduce(node_ids = [], nfs IN node_field_specifiers_list | node_ids + [nfs[0]]) AS node_ids_list, from_time
|
|
585
|
+
WITH node_field_specifiers_list, from_time, node_ids_list
|
|
577
586
|
// -------------------------------------
|
|
578
587
|
// Identify nodes added/removed on branch
|
|
579
588
|
// -------------------------------------
|
|
580
589
|
MATCH (q:Root)<-[diff_rel:IS_PART_OF {branch: $branch_name}]-(p:Node)
|
|
581
|
-
WHERE (node_ids_list
|
|
590
|
+
WHERE (size(node_ids_list) = 0 OR p.uuid IN node_ids_list)
|
|
582
591
|
AND (from_time <= diff_rel.from < $to_time)
|
|
583
592
|
AND (diff_rel.to IS NULL OR (from_time <= diff_rel.to < $to_time))
|
|
584
593
|
AND p.branch_support = $branch_aware
|
|
@@ -647,20 +656,20 @@ CALL {
|
|
|
647
656
|
}
|
|
648
657
|
RETURN diff_path
|
|
649
658
|
UNION
|
|
650
|
-
WITH
|
|
651
|
-
WITH diff_filter_params[0] AS node_field_specifiers_list, diff_filter_params[1] AS from_time
|
|
659
|
+
WITH node_field_specifiers_list, node_ids_list, field_names_list, from_time
|
|
652
660
|
CALL {
|
|
653
|
-
WITH node_field_specifiers_list, from_time
|
|
661
|
+
WITH node_field_specifiers_list, node_ids_list, field_names_list, from_time
|
|
654
662
|
// -------------------------------------
|
|
655
663
|
// Identify attributes/relationships added/removed on branch
|
|
656
664
|
// -------------------------------------
|
|
657
665
|
CALL {
|
|
658
|
-
WITH node_field_specifiers_list, from_time
|
|
666
|
+
WITH node_field_specifiers_list, node_ids_list, field_names_list, from_time
|
|
659
667
|
MATCH (root:Root)<-[r_root:IS_PART_OF]-(p:Node)-[diff_rel:HAS_ATTRIBUTE {branch: $branch_name}]->(q:Attribute)
|
|
660
668
|
// exclude attributes and relationships under added/removed nodes b/c they are covered above
|
|
661
|
-
WHERE
|
|
662
|
-
AND r_root.branch IN [$branch_name, $base_branch_name, $global_branch_name]
|
|
669
|
+
WHERE r_root.branch IN [$branch_name, $base_branch_name, $global_branch_name]
|
|
663
670
|
AND q.branch_support = $branch_aware
|
|
671
|
+
AND (size(node_ids_list) = 0 OR p.uuid IN node_ids_list)
|
|
672
|
+
AND (size(field_names_list) = 0 OR q.name IN field_names_list)
|
|
664
673
|
// if p has a different type of branch support and was addded within our timeframe
|
|
665
674
|
AND (r_root.from < from_time OR p.branch_support = $branch_agnostic)
|
|
666
675
|
AND r_root.status = "active"
|
|
@@ -669,14 +678,16 @@ CALL {
|
|
|
669
678
|
AND (diff_rel.to IS NULL OR (from_time <= diff_rel.to < $to_time))
|
|
670
679
|
AND r_root.from <= diff_rel.from
|
|
671
680
|
AND (r_root.to IS NULL OR diff_rel.branch <> r_root.branch OR r_root.to >= diff_rel.from)
|
|
681
|
+
AND (node_field_specifiers_list IS NULL OR [p.uuid, q.name] IN node_field_specifiers_list)
|
|
672
682
|
RETURN root, r_root, p, diff_rel, q
|
|
673
683
|
UNION ALL
|
|
674
|
-
WITH node_field_specifiers_list, from_time
|
|
684
|
+
WITH node_field_specifiers_list, node_ids_list, field_names_list, from_time
|
|
675
685
|
MATCH (root:Root)<-[r_root:IS_PART_OF]-(p:Node)-[diff_rel:IS_RELATED {branch: $branch_name}]-(q:Relationship)
|
|
676
686
|
// exclude attributes and relationships under added/removed nodes b/c they are covered above
|
|
677
|
-
WHERE
|
|
678
|
-
AND r_root.branch IN [$branch_name, $base_branch_name, $global_branch_name]
|
|
687
|
+
WHERE r_root.branch IN [$branch_name, $base_branch_name, $global_branch_name]
|
|
679
688
|
AND q.branch_support = $branch_aware
|
|
689
|
+
AND (size(node_ids_list) = 0 OR p.uuid IN node_ids_list)
|
|
690
|
+
AND (size(field_names_list) = 0 OR q.name IN field_names_list)
|
|
680
691
|
// if p has a different type of branch support and was addded within our timeframe
|
|
681
692
|
AND (r_root.from < from_time OR p.branch_support = $branch_agnostic)
|
|
682
693
|
// get attributes and relationships added on the branch during the timeframe
|
|
@@ -684,6 +695,7 @@ CALL {
|
|
|
684
695
|
AND (diff_rel.to IS NULL OR (from_time <= diff_rel.to < $to_time))
|
|
685
696
|
AND r_root.from <= diff_rel.from
|
|
686
697
|
AND (r_root.to IS NULL OR diff_rel.branch <> r_root.branch OR r_root.to >= diff_rel.from)
|
|
698
|
+
AND (node_field_specifiers_list IS NULL OR [p.uuid, q.name] IN node_field_specifiers_list)
|
|
687
699
|
RETURN root, r_root, p, diff_rel, q
|
|
688
700
|
}
|
|
689
701
|
WITH root, r_root, p, diff_rel, q, from_time
|
|
@@ -762,19 +774,19 @@ CALL {
|
|
|
762
774
|
}
|
|
763
775
|
RETURN mid_diff_path AS diff_path
|
|
764
776
|
UNION
|
|
765
|
-
WITH
|
|
766
|
-
WITH diff_filter_params[0] AS node_field_specifiers_list, diff_filter_params[1] AS from_time
|
|
777
|
+
WITH node_field_specifiers_list, node_ids_list, field_names_list, from_time
|
|
767
778
|
CALL {
|
|
768
|
-
WITH node_field_specifiers_list, from_time
|
|
779
|
+
WITH node_field_specifiers_list, node_ids_list, field_names_list, from_time
|
|
769
780
|
// -------------------------------------
|
|
770
781
|
// Identify properties added/removed on branch
|
|
771
782
|
// -------------------------------------
|
|
772
783
|
MATCH diff_rel_path = (root:Root)<-[r_root:IS_PART_OF]-(n:Node)-[r_node]-(p)-[diff_rel {branch: $branch_name}]->(q)
|
|
773
|
-
WHERE (
|
|
774
|
-
AND (
|
|
784
|
+
WHERE (
|
|
775
785
|
(from_time <= diff_rel.from < $to_time)
|
|
776
786
|
OR (from_time <= diff_rel.to < $to_time)
|
|
777
787
|
)
|
|
788
|
+
AND (size(node_ids_list) = 0 OR n.uuid IN node_ids_list)
|
|
789
|
+
AND (size(field_names_list) = 0 OR p.name IN field_names_list)
|
|
778
790
|
// exclude attributes and relationships under added/removed nodes, attrs, and rels b/c they are covered above
|
|
779
791
|
AND ALL(
|
|
780
792
|
r in [r_root, r_node]
|
|
@@ -785,6 +797,7 @@ CALL {
|
|
|
785
797
|
AND type(diff_rel) IN ["IS_VISIBLE", "IS_PROTECTED", "HAS_SOURCE", "HAS_OWNER", "HAS_VALUE"]
|
|
786
798
|
AND any(l in labels(q) WHERE l in ["Boolean", "Node", "AttributeValue"])
|
|
787
799
|
AND type(r_node) IN ["HAS_ATTRIBUTE", "IS_RELATED"]
|
|
800
|
+
AND (node_field_specifiers_list IS NULL OR [n.uuid, p.name] IN node_field_specifiers_list)
|
|
788
801
|
AND ALL(
|
|
789
802
|
r_pair IN [[r_root, r_node], [r_node, diff_rel]]
|
|
790
803
|
// filter out paths where a base branch edge follows a branch edge
|
infrahub/core/query/ipam.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import ipaddress
|
|
4
4
|
from dataclasses import dataclass
|
|
5
|
-
from typing import TYPE_CHECKING, Iterable
|
|
5
|
+
from typing import TYPE_CHECKING, Iterable
|
|
6
6
|
|
|
7
7
|
from infrahub.core.constants import InfrahubKind
|
|
8
8
|
from infrahub.core.ipam.constants import AllIPTypes, IPAddressType, IPNetworkType
|
|
@@ -38,7 +38,7 @@ class IPAddressData:
|
|
|
38
38
|
|
|
39
39
|
|
|
40
40
|
def _get_namespace_id(
|
|
41
|
-
namespace:
|
|
41
|
+
namespace: Node | str | None = None,
|
|
42
42
|
) -> str:
|
|
43
43
|
if namespace and isinstance(namespace, str):
|
|
44
44
|
return namespace
|
|
@@ -54,7 +54,7 @@ class IPPrefixSubnetFetch(Query):
|
|
|
54
54
|
def __init__(
|
|
55
55
|
self,
|
|
56
56
|
obj: IPNetworkType,
|
|
57
|
-
namespace:
|
|
57
|
+
namespace: Node | str | None = None,
|
|
58
58
|
**kwargs,
|
|
59
59
|
):
|
|
60
60
|
self.obj = obj
|
|
@@ -97,8 +97,7 @@ class IPPrefixSubnetFetch(Query):
|
|
|
97
97
|
AND av.binary_address STARTS WITH $prefix_binary
|
|
98
98
|
AND av.prefixlen > $maxprefixlen
|
|
99
99
|
AND av.version = $ip_version
|
|
100
|
-
AND all(r IN relationships(path2) WHERE (%(branch_filter)s))
|
|
101
|
-
// TODO Need to check for delete nodes
|
|
100
|
+
AND all(r IN relationships(path2) WHERE (%(branch_filter)s) and r.status = "active")
|
|
102
101
|
WITH
|
|
103
102
|
collect([pfx, av]) as all_prefixes_and_value,
|
|
104
103
|
collect(pfx) as all_prefixes
|
|
@@ -146,7 +145,7 @@ class IPPrefixIPAddressFetch(Query):
|
|
|
146
145
|
def __init__(
|
|
147
146
|
self,
|
|
148
147
|
obj: IPNetworkType,
|
|
149
|
-
namespace:
|
|
148
|
+
namespace: Node | str | None = None,
|
|
150
149
|
**kwargs,
|
|
151
150
|
):
|
|
152
151
|
self.obj = obj
|
|
@@ -189,7 +188,7 @@ class IPPrefixIPAddressFetch(Query):
|
|
|
189
188
|
AND av.binary_address STARTS WITH $prefix_binary
|
|
190
189
|
AND av.prefixlen >= $maxprefixlen
|
|
191
190
|
AND av.version = $ip_version
|
|
192
|
-
AND all(r IN relationships(path2) WHERE (%(branch_filter)s))
|
|
191
|
+
AND all(r IN relationships(path2) WHERE (%(branch_filter)s) and r.status = "active")
|
|
193
192
|
""" % {
|
|
194
193
|
"ns_label": InfrahubKind.IPNAMESPACE,
|
|
195
194
|
"node_label": InfrahubKind.IPADDRESS,
|
|
@@ -216,9 +215,9 @@ class IPPrefixIPAddressFetch(Query):
|
|
|
216
215
|
async def get_subnets(
|
|
217
216
|
db: InfrahubDatabase,
|
|
218
217
|
ip_prefix: IPNetworkType,
|
|
219
|
-
namespace:
|
|
220
|
-
branch:
|
|
221
|
-
at:
|
|
218
|
+
namespace: Node | str | None = None,
|
|
219
|
+
branch: Branch | str | None = None,
|
|
220
|
+
at: Timestamp | str | None = None,
|
|
222
221
|
branch_agnostic: bool = False,
|
|
223
222
|
) -> Iterable[IPPrefixData]:
|
|
224
223
|
branch = await registry.get_branch(db=db, branch=branch)
|
|
@@ -232,9 +231,9 @@ async def get_subnets(
|
|
|
232
231
|
async def get_ip_addresses(
|
|
233
232
|
db: InfrahubDatabase,
|
|
234
233
|
ip_prefix: IPNetworkType,
|
|
235
|
-
namespace:
|
|
236
|
-
branch:
|
|
237
|
-
at=None,
|
|
234
|
+
namespace: Node | str | None = None,
|
|
235
|
+
branch: Branch | str | None = None,
|
|
236
|
+
at: Timestamp | str | None = None,
|
|
238
237
|
branch_agnostic: bool = False,
|
|
239
238
|
) -> Iterable[IPAddressData]:
|
|
240
239
|
branch = await registry.get_branch(db=db, branch=branch)
|
|
@@ -249,8 +248,17 @@ class IPPrefixUtilization(Query):
|
|
|
249
248
|
name = "ipprefix_utilization_prefix"
|
|
250
249
|
type = QueryType.READ
|
|
251
250
|
|
|
252
|
-
def __init__(self, ip_prefixes: list[str], **kwargs):
|
|
251
|
+
def __init__(self, ip_prefixes: list[str], allocated_kinds: list[str], **kwargs):
|
|
253
252
|
self.ip_prefixes = ip_prefixes
|
|
253
|
+
self.allocated_kinds: list[str] = []
|
|
254
|
+
self.allocated_kinds_rel: list[str] = []
|
|
255
|
+
|
|
256
|
+
for kind in sorted(allocated_kinds):
|
|
257
|
+
self.allocated_kinds.append(f'"{kind}"')
|
|
258
|
+
self.allocated_kinds_rel.append(
|
|
259
|
+
{InfrahubKind.IPADDRESS: '"ip_prefix__ip_address"', InfrahubKind.IPPREFIX: '"parent__child"'}[kind]
|
|
260
|
+
)
|
|
261
|
+
|
|
254
262
|
super().__init__(**kwargs)
|
|
255
263
|
|
|
256
264
|
async def query_init(self, db: InfrahubDatabase, **kwargs) -> None:
|
|
@@ -266,8 +274,8 @@ class IPPrefixUtilization(Query):
|
|
|
266
274
|
CALL {{
|
|
267
275
|
WITH pfx
|
|
268
276
|
MATCH (pfx)-[r_rel1:IS_RELATED]-(rl:Relationship)<-[r_rel2:IS_RELATED]-(child:Node)
|
|
269
|
-
WHERE rl.name IN ["
|
|
270
|
-
AND any(l IN labels(child) WHERE l
|
|
277
|
+
WHERE rl.name IN [{", ".join(self.allocated_kinds_rel)}]
|
|
278
|
+
AND any(l IN labels(child) WHERE l IN [{", ".join(self.allocated_kinds)}])
|
|
271
279
|
AND ({rel_filter("r_rel1")})
|
|
272
280
|
AND ({rel_filter("r_rel2")})
|
|
273
281
|
RETURN r_rel1, rl, r_rel2, child
|
|
@@ -319,8 +327,8 @@ class IPPrefixReconcileQuery(Query):
|
|
|
319
327
|
def __init__(
|
|
320
328
|
self,
|
|
321
329
|
ip_value: AllIPTypes,
|
|
322
|
-
namespace:
|
|
323
|
-
node_uuid:
|
|
330
|
+
namespace: Node | str | None = None,
|
|
331
|
+
node_uuid: str | None = None,
|
|
324
332
|
**kwargs,
|
|
325
333
|
):
|
|
326
334
|
self.ip_value = ip_value
|
|
@@ -584,7 +592,7 @@ class IPPrefixReconcileQuery(Query):
|
|
|
584
592
|
self.add_to_query(get_new_children_query)
|
|
585
593
|
self.return_labels = ["ip_node", "current_parent", "current_children", "new_parent", "new_children"]
|
|
586
594
|
|
|
587
|
-
def _get_uuid_from_query(self, node_name: str) ->
|
|
595
|
+
def _get_uuid_from_query(self, node_name: str) -> str | None:
|
|
588
596
|
results = list(self.get_results())
|
|
589
597
|
if not results:
|
|
590
598
|
return None
|
|
@@ -611,13 +619,13 @@ class IPPrefixReconcileQuery(Query):
|
|
|
611
619
|
element_uuids.append(str(element_uuid))
|
|
612
620
|
return element_uuids
|
|
613
621
|
|
|
614
|
-
def get_ip_node_uuid(self) ->
|
|
622
|
+
def get_ip_node_uuid(self) -> str | None:
|
|
615
623
|
return self._get_uuid_from_query("ip_node")
|
|
616
624
|
|
|
617
|
-
def get_current_parent_uuid(self) ->
|
|
625
|
+
def get_current_parent_uuid(self) -> str | None:
|
|
618
626
|
return self._get_uuid_from_query("current_parent")
|
|
619
627
|
|
|
620
|
-
def get_calculated_parent_uuid(self) ->
|
|
628
|
+
def get_calculated_parent_uuid(self) -> str | None:
|
|
621
629
|
return self._get_uuid_from_query("new_parent")
|
|
622
630
|
|
|
623
631
|
def get_current_children_uuids(self) -> list[str]:
|
infrahub/core/query/node.py
CHANGED
|
@@ -1,19 +1,25 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from collections import defaultdict
|
|
4
|
+
from copy import copy
|
|
4
5
|
from dataclasses import dataclass
|
|
5
6
|
from dataclasses import field as dataclass_field
|
|
6
7
|
from enum import Enum
|
|
7
8
|
from typing import TYPE_CHECKING, Any, AsyncIterator, Generator, Optional, Union
|
|
8
9
|
|
|
9
10
|
from infrahub import config
|
|
10
|
-
from infrahub.core.constants import
|
|
11
|
+
from infrahub.core.constants import (
|
|
12
|
+
AttributeDBNodeType,
|
|
13
|
+
RelationshipDirection,
|
|
14
|
+
RelationshipHierarchyDirection,
|
|
15
|
+
)
|
|
11
16
|
from infrahub.core.query import Query, QueryResult, QueryType
|
|
12
17
|
from infrahub.core.query.subquery import build_subquery_filter, build_subquery_order
|
|
13
18
|
from infrahub.core.query.utils import find_node_schema
|
|
14
19
|
from infrahub.core.schema.attribute_schema import AttributeSchema
|
|
15
20
|
from infrahub.core.utils import build_regex_attrs, extract_field_filters
|
|
16
21
|
from infrahub.exceptions import QueryError
|
|
22
|
+
from infrahub.graphql.models import OrderModel
|
|
17
23
|
|
|
18
24
|
if TYPE_CHECKING:
|
|
19
25
|
from neo4j.graph import Node as Neo4jNode
|
|
@@ -804,7 +810,12 @@ class NodeGetListQuery(Query):
|
|
|
804
810
|
type = QueryType.READ
|
|
805
811
|
|
|
806
812
|
def __init__(
|
|
807
|
-
self,
|
|
813
|
+
self,
|
|
814
|
+
schema: NodeSchema,
|
|
815
|
+
filters: Optional[dict] = None,
|
|
816
|
+
partial_match: bool = False,
|
|
817
|
+
order: OrderModel | None = None,
|
|
818
|
+
**kwargs: Any,
|
|
808
819
|
) -> None:
|
|
809
820
|
self.schema = schema
|
|
810
821
|
self.filters = filters
|
|
@@ -812,8 +823,30 @@ class NodeGetListQuery(Query):
|
|
|
812
823
|
self._variables_to_track = ["n", "rb"]
|
|
813
824
|
self._validate_filters()
|
|
814
825
|
|
|
826
|
+
# Force disabling order when `limit` is 1 as it simplifies the query a lot.
|
|
827
|
+
if "limit" in kwargs and kwargs["limit"] == 1:
|
|
828
|
+
if order is None:
|
|
829
|
+
order = OrderModel(disable=True)
|
|
830
|
+
else:
|
|
831
|
+
order = copy(order)
|
|
832
|
+
order.disable = True
|
|
833
|
+
|
|
834
|
+
self.order = order
|
|
835
|
+
|
|
815
836
|
super().__init__(**kwargs)
|
|
816
837
|
|
|
838
|
+
@property
|
|
839
|
+
def has_filters(self) -> bool:
|
|
840
|
+
if not self.filters or self.has_filter_by_id:
|
|
841
|
+
return False
|
|
842
|
+
return True
|
|
843
|
+
|
|
844
|
+
@property
|
|
845
|
+
def has_filter_by_id(self) -> bool:
|
|
846
|
+
if self.filters and "id" in self.filters:
|
|
847
|
+
return True
|
|
848
|
+
return False
|
|
849
|
+
|
|
817
850
|
def _validate_filters(self) -> None:
|
|
818
851
|
if not self.filters:
|
|
819
852
|
return
|
|
@@ -844,52 +877,74 @@ class NodeGetListQuery(Query):
|
|
|
844
877
|
self.order_by = []
|
|
845
878
|
|
|
846
879
|
self.return_labels = ["n.uuid", "rb.branch", f"{db.get_id_function_name()}(rb) as rb_id"]
|
|
847
|
-
where_clause_elements = []
|
|
848
880
|
|
|
849
881
|
branch_filter, branch_params = self.branch.get_query_filter_path(
|
|
850
882
|
at=self.at, branch_agnostic=self.branch_agnostic
|
|
851
883
|
)
|
|
852
884
|
self.params.update(branch_params)
|
|
853
885
|
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
886
|
+
# The initial subquery is used to filter out deleted nodes because we can have multiple valid results per branch
|
|
887
|
+
# and we need to filter out the one that have been deleted in the branch.
|
|
888
|
+
# If we are on the default branch, the subquery is not required because only one valid result is expected at a given time
|
|
889
|
+
if not self.branch.is_default:
|
|
890
|
+
topquery = """
|
|
891
|
+
MATCH (n:%(node_kind)s)
|
|
892
|
+
CALL {
|
|
893
|
+
WITH n
|
|
894
|
+
MATCH (root:Root)<-[r:IS_PART_OF]-(n)
|
|
895
|
+
WHERE %(branch_filter)s
|
|
896
|
+
RETURN r
|
|
897
|
+
ORDER BY r.branch_level DESC, r.from DESC
|
|
898
|
+
LIMIT 1
|
|
899
|
+
}
|
|
900
|
+
WITH n, r as rb
|
|
901
|
+
WHERE rb.status = "active"
|
|
902
|
+
""" % {"branch_filter": branch_filter, "node_kind": self.schema.kind}
|
|
903
|
+
self.add_to_query(topquery)
|
|
904
|
+
else:
|
|
905
|
+
topquery = """
|
|
906
|
+
MATCH (root:Root)<-[r:IS_PART_OF]-(n:%(node_kind)s)
|
|
859
907
|
WHERE %(branch_filter)s
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
""" % {"branch_filter": branch_filter, "node_kind": self.schema.kind}
|
|
867
|
-
self.add_to_query(query)
|
|
868
|
-
use_simple = False
|
|
869
|
-
if self.filters and "id" in self.filters:
|
|
870
|
-
use_simple = True
|
|
871
|
-
where_clause_elements.append("n.uuid = $uuid")
|
|
908
|
+
WITH n, r as rb
|
|
909
|
+
WHERE rb.status = "active"
|
|
910
|
+
""" % {"branch_filter": branch_filter, "node_kind": self.schema.kind}
|
|
911
|
+
self.add_to_query(topquery)
|
|
912
|
+
|
|
913
|
+
if self.has_filter_by_id and self.filters:
|
|
872
914
|
self.params["uuid"] = self.filters["id"]
|
|
873
|
-
|
|
874
|
-
|
|
915
|
+
self.add_to_query(" AND n.uuid = $uuid")
|
|
916
|
+
return
|
|
917
|
+
|
|
918
|
+
disable_order = not self.schema.order_by or (self.order is not None and self.order.disable)
|
|
919
|
+
if not self.has_filters and disable_order:
|
|
920
|
+
# Always order by uuid to guarantee pagination, see https://github.com/opsmill/infrahub/pull/4704.
|
|
875
921
|
self.order_by = ["n.uuid"]
|
|
876
|
-
if use_simple:
|
|
877
|
-
if where_clause_elements:
|
|
878
|
-
self.add_to_query(" AND " + " AND ".join(where_clause_elements))
|
|
879
922
|
return
|
|
880
923
|
|
|
881
924
|
if self.filters and "ids" in self.filters:
|
|
882
925
|
self.add_to_query("AND n.uuid IN $node_ids")
|
|
883
926
|
self.params["node_ids"] = self.filters["ids"]
|
|
884
927
|
|
|
885
|
-
field_attribute_requirements = self._get_field_requirements()
|
|
928
|
+
field_attribute_requirements = self._get_field_requirements(disable_order=disable_order)
|
|
886
929
|
use_profiles = any(far for far in field_attribute_requirements if far.supports_profile)
|
|
887
930
|
await self._add_node_filter_attributes(
|
|
888
931
|
db=db, field_attribute_requirements=field_attribute_requirements, branch_filter=branch_filter
|
|
889
932
|
)
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
933
|
+
|
|
934
|
+
if not disable_order:
|
|
935
|
+
await self._add_node_order_attributes(
|
|
936
|
+
db=db, field_attribute_requirements=field_attribute_requirements, branch_filter=branch_filter
|
|
937
|
+
)
|
|
938
|
+
for far in field_attribute_requirements:
|
|
939
|
+
if not far.is_order:
|
|
940
|
+
continue
|
|
941
|
+
if far.supports_profile:
|
|
942
|
+
self.order_by.append(far.final_value_query_variable)
|
|
943
|
+
continue
|
|
944
|
+
self.order_by.append(far.node_value_query_variable)
|
|
945
|
+
|
|
946
|
+
# Always order by uuid to guarantee pagination, see https://github.com/opsmill/infrahub/pull/4704.
|
|
947
|
+
self.order_by.append("n.uuid")
|
|
893
948
|
|
|
894
949
|
if use_profiles:
|
|
895
950
|
await self._add_profiles_per_node_query(db=db, branch_filter=branch_filter)
|
|
@@ -899,15 +954,6 @@ class NodeGetListQuery(Query):
|
|
|
899
954
|
await self._add_profile_rollups(field_attribute_requirements=field_attribute_requirements)
|
|
900
955
|
|
|
901
956
|
self._add_final_filter(field_attribute_requirements=field_attribute_requirements)
|
|
902
|
-
self.order_by = []
|
|
903
|
-
for far in field_attribute_requirements:
|
|
904
|
-
if not far.is_order:
|
|
905
|
-
continue
|
|
906
|
-
if far.supports_profile:
|
|
907
|
-
self.order_by.append(far.final_value_query_variable)
|
|
908
|
-
continue
|
|
909
|
-
self.order_by.append(far.node_value_query_variable)
|
|
910
|
-
self.order_by.append("n.uuid")
|
|
911
957
|
|
|
912
958
|
async def _add_node_filter_attributes(
|
|
913
959
|
self,
|
|
@@ -1154,7 +1200,7 @@ class NodeGetListQuery(Query):
|
|
|
1154
1200
|
where_str = "WHERE " + " AND ".join(where_parts)
|
|
1155
1201
|
self.add_to_query(where_str)
|
|
1156
1202
|
|
|
1157
|
-
def _get_field_requirements(self) -> list[FieldAttributeRequirement]:
|
|
1203
|
+
def _get_field_requirements(self, disable_order: bool) -> list[FieldAttributeRequirement]:
|
|
1158
1204
|
internal_filters = ["any", "attribute", "relationship"]
|
|
1159
1205
|
field_requirements_map: dict[tuple[str, str], FieldAttributeRequirement] = {}
|
|
1160
1206
|
index = 1
|
|
@@ -1176,7 +1222,8 @@ class NodeGetListQuery(Query):
|
|
|
1176
1222
|
types=[FieldAttributeRequirementType.FILTER],
|
|
1177
1223
|
)
|
|
1178
1224
|
index += 1
|
|
1179
|
-
|
|
1225
|
+
|
|
1226
|
+
if disable_order:
|
|
1180
1227
|
return list(field_requirements_map.values())
|
|
1181
1228
|
|
|
1182
1229
|
for order_by_path in self.schema.order_by:
|
|
@@ -1214,12 +1261,14 @@ class NodeGetHierarchyQuery(Query):
|
|
|
1214
1261
|
direction: RelationshipHierarchyDirection,
|
|
1215
1262
|
node_schema: Union[NodeSchema, GenericSchema],
|
|
1216
1263
|
filters: Optional[dict] = None,
|
|
1264
|
+
hierarchical_ordering: bool = False,
|
|
1217
1265
|
**kwargs: Any,
|
|
1218
1266
|
) -> None:
|
|
1219
1267
|
self.filters = filters or {}
|
|
1220
1268
|
self.direction = direction
|
|
1221
1269
|
self.node_id = node_id
|
|
1222
1270
|
self.node_schema = node_schema
|
|
1271
|
+
self.hierarchical_ordering = hierarchical_ordering
|
|
1223
1272
|
|
|
1224
1273
|
super().__init__(**kwargs)
|
|
1225
1274
|
|
|
@@ -1322,6 +1371,8 @@ class NodeGetHierarchyQuery(Query):
|
|
|
1322
1371
|
# ----------------------------------------------------------------------------
|
|
1323
1372
|
# ORDER Results
|
|
1324
1373
|
# ----------------------------------------------------------------------------
|
|
1374
|
+
if self.hierarchical_ordering:
|
|
1375
|
+
return
|
|
1325
1376
|
if hasattr(hierarchy_schema, "order_by") and hierarchy_schema.order_by:
|
|
1326
1377
|
order_cnt = 1
|
|
1327
1378
|
|
|
@@ -8,8 +8,8 @@ from pydantic import Field
|
|
|
8
8
|
|
|
9
9
|
from infrahub.core.constants import AllowOverrideType, HashableModelState
|
|
10
10
|
from infrahub.core.models import HashableModel
|
|
11
|
-
from infrahub.core.schema.computed_attribute import ComputedAttribute # noqa:
|
|
12
|
-
from infrahub.core.schema.dropdown import DropdownChoice # noqa:
|
|
11
|
+
from infrahub.core.schema.computed_attribute import ComputedAttribute # noqa: TC001
|
|
12
|
+
from infrahub.core.schema.dropdown import DropdownChoice # noqa: TC001
|
|
13
13
|
|
|
14
14
|
if TYPE_CHECKING:
|
|
15
15
|
from infrahub.core.constants import BranchSupportType
|
|
@@ -8,8 +8,8 @@ from pydantic import Field
|
|
|
8
8
|
|
|
9
9
|
from infrahub.core.constants import BranchSupportType, HashableModelState
|
|
10
10
|
from infrahub.core.models import HashableModel
|
|
11
|
-
from infrahub.core.schema.attribute_schema import AttributeSchema # noqa:
|
|
12
|
-
from infrahub.core.schema.relationship_schema import RelationshipSchema # noqa:
|
|
11
|
+
from infrahub.core.schema.attribute_schema import AttributeSchema # noqa: TC001
|
|
12
|
+
from infrahub.core.schema.relationship_schema import RelationshipSchema # noqa: TC001
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class GeneratedBaseNodeSchema(HashableModel):
|
|
@@ -6,7 +6,7 @@ from typing import TYPE_CHECKING
|
|
|
6
6
|
|
|
7
7
|
from pydantic import BaseModel, Field
|
|
8
8
|
|
|
9
|
-
from infrahub.core.schema import AttributeSchema # noqa:
|
|
9
|
+
from infrahub.core.schema import AttributeSchema # noqa: TC001
|
|
10
10
|
|
|
11
11
|
if TYPE_CHECKING:
|
|
12
12
|
from infrahub.core.schema import NodeSchema, SchemaAttributePath
|
infrahub/core/task/task_log.py
CHANGED
|
@@ -4,7 +4,7 @@ from typing import TYPE_CHECKING
|
|
|
4
4
|
|
|
5
5
|
from pydantic import ConfigDict, Field
|
|
6
6
|
|
|
7
|
-
from infrahub.core.constants import Severity # noqa:
|
|
7
|
+
from infrahub.core.constants import Severity # noqa: TC001
|
|
8
8
|
from infrahub.core.node.standard import StandardNode
|
|
9
9
|
from infrahub.core.query.task_log import TaskLogNodeCreateQuery
|
|
10
10
|
from infrahub.core.timestamp import current_timestamp
|
|
@@ -67,7 +67,7 @@ class AttributeKindUpdateValidatorQuery(AttributeSchemaValidatorQuery):
|
|
|
67
67
|
if value in (None, NULL_VALUE):
|
|
68
68
|
continue
|
|
69
69
|
try:
|
|
70
|
-
infrahub_attribute_class.
|
|
70
|
+
infrahub_attribute_class.validate_format(
|
|
71
71
|
value=result.get("attribute_value"), name=self.attribute_schema.name, schema=self.attribute_schema
|
|
72
72
|
)
|
|
73
73
|
except ValidationError:
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import List
|
|
3
2
|
|
|
4
3
|
from infrahub.core.path import GroupedDataPaths
|
|
5
4
|
|
|
@@ -15,4 +14,4 @@ class ConstraintCheckerInterface(ABC):
|
|
|
15
14
|
def supports(self, request: SchemaConstraintValidatorRequest) -> bool: ...
|
|
16
15
|
|
|
17
16
|
@abstractmethod
|
|
18
|
-
async def check(self, request: SchemaConstraintValidatorRequest) ->
|
|
17
|
+
async def check(self, request: SchemaConstraintValidatorRequest) -> list[GroupedDataPaths]: ...
|
|
@@ -1,17 +1,4 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
3
|
-
from pydantic import BaseModel, Field
|
|
4
|
-
|
|
5
|
-
from infrahub.core.branch import Branch
|
|
6
|
-
from infrahub.core.path import SchemaPath
|
|
7
|
-
from infrahub.core.schema import GenericSchema, NodeSchema
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
class SchemaConstraintValidatorRequest(BaseModel):
|
|
11
|
-
branch: Branch = Field(..., description="The name of the branch to target")
|
|
12
|
-
constraint_name: str = Field(..., description="The name of the constraint to validate")
|
|
13
|
-
node_schema: Union[NodeSchema, GenericSchema] = Field(..., description="Schema of Node or Generic to validate")
|
|
14
|
-
schema_path: SchemaPath = Field(..., description="SchemaPath to the element of the schema to validate")
|
|
1
|
+
from pydantic import BaseModel
|
|
15
2
|
|
|
16
3
|
|
|
17
4
|
class SchemaViolation(BaseModel):
|
|
@@ -2,9 +2,9 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import Any, Union
|
|
4
4
|
|
|
5
|
-
from infrahub.core.path import GroupedDataPaths, SchemaPath # noqa:
|
|
5
|
+
from infrahub.core.path import GroupedDataPaths, SchemaPath # noqa: TC001
|
|
6
6
|
from infrahub.core.query import Query, QueryType
|
|
7
|
-
from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema, RelationshipSchema # noqa:
|
|
7
|
+
from infrahub.core.schema import AttributeSchema, GenericSchema, NodeSchema, RelationshipSchema # noqa: TC001
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
class SchemaValidatorQuery(Query):
|
|
@@ -5,9 +5,9 @@ from prefect import flow, task
|
|
|
5
5
|
from prefect.cache_policies import NONE
|
|
6
6
|
from prefect.logging import get_run_logger
|
|
7
7
|
|
|
8
|
-
from infrahub.core.branch import Branch # noqa:
|
|
9
|
-
from infrahub.core.path import SchemaPath # noqa:
|
|
10
|
-
from infrahub.core.schema import GenericSchema, NodeSchema # noqa:
|
|
8
|
+
from infrahub.core.branch import Branch # noqa: TC001
|
|
9
|
+
from infrahub.core.path import SchemaPath # noqa: TC001
|
|
10
|
+
from infrahub.core.schema import GenericSchema, NodeSchema # noqa: TC001
|
|
11
11
|
from infrahub.core.validators.aggregated_checker import AggregatedConstraintChecker
|
|
12
12
|
from infrahub.core.validators.model import (
|
|
13
13
|
SchemaConstraintValidatorRequest,
|
|
@@ -32,11 +32,14 @@ async def schema_validate_migrations(message: SchemaValidateMigrationData) -> li
|
|
|
32
32
|
log.info(f"{len(message.constraints)} constraint(s) to validate")
|
|
33
33
|
# NOTE this task is a good candidate to add a progress bar
|
|
34
34
|
for constraint in message.constraints:
|
|
35
|
+
schema = message.schema_branch.get(name=constraint.path.schema_kind)
|
|
36
|
+
if not isinstance(schema, (GenericSchema, NodeSchema)):
|
|
37
|
+
continue
|
|
35
38
|
batch.add(
|
|
36
39
|
task=schema_path_validate,
|
|
37
40
|
branch=message.branch,
|
|
38
41
|
constraint_name=constraint.constraint_name,
|
|
39
|
-
node_schema=
|
|
42
|
+
node_schema=schema,
|
|
40
43
|
schema_path=constraint.path,
|
|
41
44
|
)
|
|
42
45
|
|
|
@@ -3,8 +3,6 @@ from __future__ import annotations
|
|
|
3
3
|
from collections import defaultdict
|
|
4
4
|
from typing import TYPE_CHECKING, Any, Iterable, Optional
|
|
5
5
|
|
|
6
|
-
from infrahub.core.constants import NULL_VALUE
|
|
7
|
-
|
|
8
6
|
if TYPE_CHECKING:
|
|
9
7
|
from infrahub.core.query import QueryResult
|
|
10
8
|
from infrahub.core.schema import SchemaAttributePath, SchemaAttributePathValue
|
|
@@ -49,13 +47,13 @@ class UniquenessQueryResultsIndex:
|
|
|
49
47
|
if relationship_identifier:
|
|
50
48
|
if relationship_identifier not in self._relationship_index:
|
|
51
49
|
self._relationship_index[relationship_identifier] = defaultdict(set)
|
|
52
|
-
if attr_value and
|
|
50
|
+
if attr_value and node_id:
|
|
53
51
|
self._relationship_index[relationship_identifier][attr_value].add(node_id)
|
|
54
52
|
self._node_index[node_id][relationship_identifier] = attr_value
|
|
55
53
|
elif attr_name:
|
|
56
54
|
if attr_name not in self._attribute_index:
|
|
57
55
|
self._attribute_index[attr_name] = defaultdict(set)
|
|
58
|
-
if attr_value and
|
|
56
|
+
if attr_value and node_id:
|
|
59
57
|
self._attribute_index[attr_name][attr_value].add(node_id)
|
|
60
58
|
self._node_index[node_id][attr_name] = attr_value
|
|
61
59
|
|