infrahub-server 1.2.7__py3-none-any.whl → 1.2.9rc0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/transformation.py +1 -0
- infrahub/artifacts/models.py +4 -0
- infrahub/cli/db.py +15 -6
- infrahub/computed_attribute/tasks.py +1 -0
- infrahub/config.py +2 -1
- infrahub/constants/__init__.py +0 -0
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/graph/index.py +3 -1
- infrahub/core/initialization.py +23 -7
- infrahub/core/manager.py +16 -5
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +9 -8
- infrahub/core/protocols.py +1 -0
- infrahub/core/query/node.py +96 -29
- infrahub/core/schema/definitions/core/builtin.py +2 -4
- infrahub/core/schema/definitions/core/transform.py +1 -0
- infrahub/core/validators/aggregated_checker.py +2 -2
- infrahub/core/validators/uniqueness/query.py +30 -9
- infrahub/database/__init__.py +1 -16
- infrahub/database/index.py +1 -1
- infrahub/database/memgraph.py +1 -12
- infrahub/database/neo4j.py +1 -13
- infrahub/git/integrator.py +27 -3
- infrahub/git/models.py +4 -0
- infrahub/git/tasks.py +3 -0
- infrahub/git_credential/helper.py +2 -2
- infrahub/graphql/mutations/computed_attribute.py +5 -1
- infrahub/message_bus/operations/requests/proposed_change.py +6 -0
- infrahub/message_bus/types.py +3 -0
- infrahub/patch/queries/consolidate_duplicated_nodes.py +109 -0
- infrahub/patch/queries/delete_duplicated_edges.py +138 -0
- infrahub/proposed_change/tasks.py +1 -0
- infrahub/server.py +1 -3
- infrahub/transformations/models.py +3 -0
- infrahub/transformations/tasks.py +1 -0
- infrahub/webhook/models.py +3 -0
- infrahub_sdk/client.py +4 -4
- infrahub_sdk/config.py +17 -0
- infrahub_sdk/ctl/cli_commands.py +7 -1
- infrahub_sdk/ctl/generator.py +2 -2
- infrahub_sdk/generator.py +12 -66
- infrahub_sdk/operation.py +80 -0
- infrahub_sdk/protocols.py +12 -0
- infrahub_sdk/recorder.py +3 -0
- infrahub_sdk/schema/repository.py +4 -0
- infrahub_sdk/transforms.py +15 -27
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9rc0.dist-info}/METADATA +2 -2
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9rc0.dist-info}/RECORD +53 -50
- infrahub_testcontainers/container.py +1 -0
- infrahub_testcontainers/docker-compose.test.yml +4 -0
- infrahub/database/manager.py +0 -15
- /infrahub/{database/constants.py → constants/database.py} +0 -0
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9rc0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9rc0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9rc0.dist-info}/entry_points.txt +0 -0
infrahub/api/transformation.py
CHANGED
|
@@ -88,6 +88,7 @@ async def transform_python(
|
|
|
88
88
|
branch=branch_params.branch.name,
|
|
89
89
|
transform_location=f"{transform.file_path.value}::{transform.class_name.value}",
|
|
90
90
|
timeout=transform.timeout.value,
|
|
91
|
+
convert_query_response=transform.convert_query_response.value or False,
|
|
91
92
|
data=data,
|
|
92
93
|
)
|
|
93
94
|
|
infrahub/artifacts/models.py
CHANGED
|
@@ -12,6 +12,10 @@ class CheckArtifactCreate(BaseModel):
|
|
|
12
12
|
content_type: str = Field(..., description="Content type of the artifact")
|
|
13
13
|
transform_type: str = Field(..., description="The type of transform associated with this artifact")
|
|
14
14
|
transform_location: str = Field(..., description="The transforms location within the repository")
|
|
15
|
+
convert_query_response: bool = Field(
|
|
16
|
+
default=False,
|
|
17
|
+
description="Indicate if the query response should be converted to InfrahubNode objects for Python transforms",
|
|
18
|
+
)
|
|
15
19
|
repository_id: str = Field(..., description="The unique ID of the Repository")
|
|
16
20
|
repository_name: str = Field(..., description="The name of the Repository")
|
|
17
21
|
repository_kind: str = Field(..., description="The kind of the Repository")
|
infrahub/cli/db.py
CHANGED
|
@@ -23,7 +23,7 @@ from infrahub import config
|
|
|
23
23
|
from infrahub.core import registry
|
|
24
24
|
from infrahub.core.graph import GRAPH_VERSION
|
|
25
25
|
from infrahub.core.graph.constraints import ConstraintManagerBase, ConstraintManagerMemgraph, ConstraintManagerNeo4j
|
|
26
|
-
from infrahub.core.graph.index import node_indexes, rel_indexes
|
|
26
|
+
from infrahub.core.graph.index import attr_value_index, node_indexes, rel_indexes
|
|
27
27
|
from infrahub.core.graph.schema import (
|
|
28
28
|
GRAPH_SCHEMA,
|
|
29
29
|
GraphAttributeProperties,
|
|
@@ -48,6 +48,8 @@ from infrahub.core.utils import delete_all_nodes
|
|
|
48
48
|
from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
|
|
49
49
|
from infrahub.core.validators.tasks import schema_validate_migrations
|
|
50
50
|
from infrahub.database import DatabaseType
|
|
51
|
+
from infrahub.database.memgraph import IndexManagerMemgraph
|
|
52
|
+
from infrahub.database.neo4j import IndexManagerNeo4j
|
|
51
53
|
from infrahub.log import get_logger
|
|
52
54
|
from infrahub.services import InfrahubServices
|
|
53
55
|
from infrahub.services.adapters.message_bus.local import BusSimulator
|
|
@@ -59,6 +61,7 @@ from .patch import patch_app
|
|
|
59
61
|
if TYPE_CHECKING:
|
|
60
62
|
from infrahub.cli.context import CliContext
|
|
61
63
|
from infrahub.database import InfrahubDatabase
|
|
64
|
+
from infrahub.database.index import IndexManagerBase
|
|
62
65
|
|
|
63
66
|
app = AsyncTyper()
|
|
64
67
|
app.add_typer(patch_app, name="patch")
|
|
@@ -249,14 +252,20 @@ async def index(
|
|
|
249
252
|
|
|
250
253
|
context: CliContext = ctx.obj
|
|
251
254
|
dbdriver = await context.init_db(retry=1)
|
|
252
|
-
dbdriver.
|
|
255
|
+
if dbdriver.db_type is DatabaseType.MEMGRAPH:
|
|
256
|
+
index_manager: IndexManagerBase = IndexManagerMemgraph(db=dbdriver)
|
|
257
|
+
index_manager = IndexManagerNeo4j(db=dbdriver)
|
|
258
|
+
|
|
259
|
+
if config.SETTINGS.experimental_features.value_db_index:
|
|
260
|
+
node_indexes.append(attr_value_index)
|
|
261
|
+
index_manager.init(nodes=node_indexes, rels=rel_indexes)
|
|
253
262
|
|
|
254
263
|
if action == IndexAction.ADD:
|
|
255
|
-
await
|
|
264
|
+
await index_manager.add()
|
|
256
265
|
elif action == IndexAction.DROP:
|
|
257
|
-
await
|
|
266
|
+
await index_manager.drop()
|
|
258
267
|
|
|
259
|
-
indexes = await
|
|
268
|
+
indexes = await index_manager.list()
|
|
260
269
|
|
|
261
270
|
console = Console()
|
|
262
271
|
|
|
@@ -412,7 +421,7 @@ async def update_core_schema(
|
|
|
412
421
|
update_db=True,
|
|
413
422
|
)
|
|
414
423
|
default_branch.update_schema_hash()
|
|
415
|
-
rprint("The Core Schema has been updated")
|
|
424
|
+
rprint("The Core Schema has been updated, make sure to rebase any open branches after the upgrade")
|
|
416
425
|
if debug:
|
|
417
426
|
rprint(f"New schema hash: {default_branch.active_schema_hash.main}")
|
|
418
427
|
await default_branch.save(db=dbt)
|
|
@@ -113,6 +113,7 @@ async def process_transform(
|
|
|
113
113
|
location=f"{transform.file_path.value}::{transform.class_name.value}",
|
|
114
114
|
data=data,
|
|
115
115
|
client=service.client,
|
|
116
|
+
convert_query_response=transform.convert_query_response.value,
|
|
116
117
|
) # type: ignore[misc]
|
|
117
118
|
|
|
118
119
|
await service.client.execute_graphql(
|
infrahub/config.py
CHANGED
|
@@ -23,7 +23,7 @@ from pydantic import (
|
|
|
23
23
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
24
24
|
from typing_extensions import Self
|
|
25
25
|
|
|
26
|
-
from infrahub.database
|
|
26
|
+
from infrahub.constants.database import DatabaseType
|
|
27
27
|
from infrahub.exceptions import InitializationError, ProcessingError
|
|
28
28
|
|
|
29
29
|
if TYPE_CHECKING:
|
|
@@ -629,6 +629,7 @@ class AnalyticsSettings(BaseSettings):
|
|
|
629
629
|
class ExperimentalFeaturesSettings(BaseSettings):
|
|
630
630
|
model_config = SettingsConfigDict(env_prefix="INFRAHUB_EXPERIMENTAL_")
|
|
631
631
|
graphql_enums: bool = False
|
|
632
|
+
value_db_index: bool = False
|
|
632
633
|
|
|
633
634
|
|
|
634
635
|
class SecuritySettings(BaseSettings):
|
|
File without changes
|
|
@@ -150,6 +150,7 @@ class ContentType(InfrahubStringEnum):
|
|
|
150
150
|
APPLICATION_JSON = "application/json"
|
|
151
151
|
APPLICATION_YAML = "application/yaml"
|
|
152
152
|
APPLICATION_XML = "application/xml"
|
|
153
|
+
APPLICATION_HCL = "application/hcl"
|
|
153
154
|
TEXT_PLAIN = "text/plain"
|
|
154
155
|
TEXT_MARKDOWN = "text/markdown"
|
|
155
156
|
TEXT_CSV = "text/csv"
|
infrahub/core/graph/index.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from infrahub.database
|
|
3
|
+
from infrahub.constants.database import IndexType
|
|
4
4
|
from infrahub.database.index import IndexItem
|
|
5
5
|
|
|
6
6
|
node_indexes: list[IndexItem] = [
|
|
@@ -17,6 +17,8 @@ node_indexes: list[IndexItem] = [
|
|
|
17
17
|
IndexItem(name="diff_node_uuid", label="DiffNode", properties=["uuid"], type=IndexType.TEXT),
|
|
18
18
|
]
|
|
19
19
|
|
|
20
|
+
attr_value_index = IndexItem(name="attr_value", label="AttributeValue", properties=["value"], type=IndexType.RANGE)
|
|
21
|
+
|
|
20
22
|
rel_indexes: list[IndexItem] = [
|
|
21
23
|
IndexItem(name="attr_from", label="HAS_ATTRIBUTE", properties=["from"], type=IndexType.RANGE),
|
|
22
24
|
IndexItem(name="attr_branch", label="HAS_ATTRIBUTE", properties=["branch"], type=IndexType.RANGE),
|
infrahub/core/initialization.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
import importlib
|
|
2
|
+
from typing import TYPE_CHECKING
|
|
2
3
|
from uuid import uuid4
|
|
3
4
|
|
|
4
5
|
from infrahub import config, lock
|
|
6
|
+
from infrahub.constants.database import DatabaseType
|
|
5
7
|
from infrahub.core import registry
|
|
6
8
|
from infrahub.core.branch import Branch
|
|
7
9
|
from infrahub.core.constants import (
|
|
@@ -13,6 +15,7 @@ from infrahub.core.constants import (
|
|
|
13
15
|
PermissionDecision,
|
|
14
16
|
)
|
|
15
17
|
from infrahub.core.graph import GRAPH_VERSION
|
|
18
|
+
from infrahub.core.graph.index import attr_value_index, node_indexes, rel_indexes
|
|
16
19
|
from infrahub.core.manager import NodeManager
|
|
17
20
|
from infrahub.core.node import Node
|
|
18
21
|
from infrahub.core.node.ipam import BuiltinIPPrefix
|
|
@@ -25,6 +28,8 @@ from infrahub.core.root import Root
|
|
|
25
28
|
from infrahub.core.schema import SchemaRoot, core_models, internal_schema
|
|
26
29
|
from infrahub.core.schema.manager import SchemaManager
|
|
27
30
|
from infrahub.database import InfrahubDatabase
|
|
31
|
+
from infrahub.database.memgraph import IndexManagerMemgraph
|
|
32
|
+
from infrahub.database.neo4j import IndexManagerNeo4j
|
|
28
33
|
from infrahub.exceptions import DatabaseError
|
|
29
34
|
from infrahub.graphql.manager import GraphQLSchemaManager
|
|
30
35
|
from infrahub.log import get_logger
|
|
@@ -32,6 +37,9 @@ from infrahub.menu.utils import create_default_menu
|
|
|
32
37
|
from infrahub.permissions import PermissionBackend
|
|
33
38
|
from infrahub.storage import InfrahubObjectStorage
|
|
34
39
|
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from infrahub.database.index import IndexManagerBase
|
|
42
|
+
|
|
35
43
|
log = get_logger()
|
|
36
44
|
|
|
37
45
|
|
|
@@ -115,7 +123,19 @@ async def initialize_registry(db: InfrahubDatabase, initialize: bool = False) ->
|
|
|
115
123
|
registry.permission_backends = initialize_permission_backends()
|
|
116
124
|
|
|
117
125
|
|
|
118
|
-
async def
|
|
126
|
+
async def add_indexes(db: InfrahubDatabase) -> None:
|
|
127
|
+
if db.db_type is DatabaseType.MEMGRAPH:
|
|
128
|
+
index_manager: IndexManagerBase = IndexManagerMemgraph(db=db)
|
|
129
|
+
index_manager = IndexManagerNeo4j(db=db)
|
|
130
|
+
|
|
131
|
+
if config.SETTINGS.experimental_features.value_db_index:
|
|
132
|
+
node_indexes.append(attr_value_index)
|
|
133
|
+
index_manager.init(nodes=node_indexes, rels=rel_indexes)
|
|
134
|
+
log.debug("Loading database indexes ..")
|
|
135
|
+
await index_manager.add()
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
async def initialization(db: InfrahubDatabase, add_database_indexes: bool = False) -> None:
|
|
119
139
|
if config.SETTINGS.database.db_type == config.DatabaseType.MEMGRAPH:
|
|
120
140
|
session = await db.session()
|
|
121
141
|
await session.run(query="SET DATABASE SETTING 'log.level' TO 'INFO'")
|
|
@@ -129,12 +149,8 @@ async def initialization(db: InfrahubDatabase) -> None:
|
|
|
129
149
|
log.debug("Checking Root Node")
|
|
130
150
|
await initialize_registry(db=db, initialize=True)
|
|
131
151
|
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
log.debug("Loading database indexes ..")
|
|
135
|
-
await db.manager.index.add()
|
|
136
|
-
else:
|
|
137
|
-
log.warning("The database index manager hasn't been initialized.")
|
|
152
|
+
if add_database_indexes:
|
|
153
|
+
await add_indexes(db=db)
|
|
138
154
|
|
|
139
155
|
# ---------------------------------------------------
|
|
140
156
|
# Load all schema in the database into the registry
|
infrahub/core/manager.py
CHANGED
|
@@ -1229,20 +1229,31 @@ class NodeManager:
|
|
|
1229
1229
|
if not prefetch_relationships and not fields:
|
|
1230
1230
|
return
|
|
1231
1231
|
cardinality_one_identifiers_by_kind: dict[str, dict[str, RelationshipDirection]] | None = None
|
|
1232
|
-
|
|
1232
|
+
outbound_identifiers: set[str] | None = None
|
|
1233
|
+
inbound_identifiers: set[str] | None = None
|
|
1234
|
+
bidirectional_identifiers: set[str] | None = None
|
|
1233
1235
|
if not prefetch_relationships:
|
|
1234
1236
|
cardinality_one_identifiers_by_kind = _get_cardinality_one_identifiers_by_kind(
|
|
1235
1237
|
nodes=nodes_by_id.values(), fields=fields or {}
|
|
1236
1238
|
)
|
|
1237
|
-
|
|
1239
|
+
outbound_identifiers = set()
|
|
1240
|
+
inbound_identifiers = set()
|
|
1241
|
+
bidirectional_identifiers = set()
|
|
1238
1242
|
for identifier_direction_map in cardinality_one_identifiers_by_kind.values():
|
|
1239
|
-
|
|
1240
|
-
|
|
1243
|
+
for identifier, direction in identifier_direction_map.items():
|
|
1244
|
+
if direction is RelationshipDirection.OUTBOUND:
|
|
1245
|
+
outbound_identifiers.add(identifier)
|
|
1246
|
+
elif direction is RelationshipDirection.INBOUND:
|
|
1247
|
+
inbound_identifiers.add(identifier)
|
|
1248
|
+
elif direction is RelationshipDirection.BIDIR:
|
|
1249
|
+
bidirectional_identifiers.add(identifier)
|
|
1241
1250
|
|
|
1242
1251
|
query = await NodeListGetRelationshipsQuery.init(
|
|
1243
1252
|
db=db,
|
|
1244
1253
|
ids=list(nodes_by_id.keys()),
|
|
1245
|
-
|
|
1254
|
+
outbound_identifiers=None if outbound_identifiers is None else list(outbound_identifiers),
|
|
1255
|
+
inbound_identifiers=None if inbound_identifiers is None else list(inbound_identifiers),
|
|
1256
|
+
bidirectional_identifiers=None if bidirectional_identifiers is None else list(bidirectional_identifiers),
|
|
1246
1257
|
branch=branch,
|
|
1247
1258
|
at=at,
|
|
1248
1259
|
branch_agnostic=branch_agnostic,
|
|
@@ -2,11 +2,12 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING, Sequence
|
|
4
4
|
|
|
5
|
+
from infrahub.constants.database import IndexType
|
|
5
6
|
from infrahub.core.migrations.shared import MigrationResult
|
|
6
7
|
from infrahub.core.query import Query # noqa: TC001
|
|
7
8
|
from infrahub.database import DatabaseType
|
|
8
|
-
from infrahub.database.constants import IndexType
|
|
9
9
|
from infrahub.database.index import IndexItem
|
|
10
|
+
from infrahub.database.neo4j import IndexManagerNeo4j
|
|
10
11
|
|
|
11
12
|
from ..shared import GraphMigration
|
|
12
13
|
|
|
@@ -29,13 +30,13 @@ class Migration014(GraphMigration):
|
|
|
29
30
|
if db.db_type != DatabaseType.NEO4J:
|
|
30
31
|
return result
|
|
31
32
|
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
33
|
+
try:
|
|
34
|
+
index_manager = IndexManagerNeo4j(db=db)
|
|
35
|
+
index_manager.init(nodes=[INDEX_TO_DELETE], rels=[])
|
|
36
|
+
await index_manager.drop()
|
|
37
|
+
except Exception as exc:
|
|
38
|
+
result.errors.append(str(exc))
|
|
39
|
+
return result
|
|
39
40
|
|
|
40
41
|
return result
|
|
41
42
|
|
infrahub/core/protocols.py
CHANGED
infrahub/core/query/node.py
CHANGED
|
@@ -649,51 +649,118 @@ class NodeListGetRelationshipsQuery(Query):
|
|
|
649
649
|
type: QueryType = QueryType.READ
|
|
650
650
|
insert_return: bool = False
|
|
651
651
|
|
|
652
|
-
def __init__(
|
|
652
|
+
def __init__(
|
|
653
|
+
self,
|
|
654
|
+
ids: list[str],
|
|
655
|
+
outbound_identifiers: list[str] | None = None,
|
|
656
|
+
inbound_identifiers: list[str] | None = None,
|
|
657
|
+
bidirectional_identifiers: list[str] | None = None,
|
|
658
|
+
**kwargs,
|
|
659
|
+
):
|
|
653
660
|
self.ids = ids
|
|
654
|
-
self.
|
|
661
|
+
self.outbound_identifiers = outbound_identifiers
|
|
662
|
+
self.inbound_identifiers = inbound_identifiers
|
|
663
|
+
self.bidirectional_identifiers = bidirectional_identifiers
|
|
655
664
|
super().__init__(**kwargs)
|
|
656
665
|
|
|
657
666
|
async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
|
|
658
667
|
self.params["ids"] = self.ids
|
|
659
|
-
self.params["
|
|
668
|
+
self.params["outbound_identifiers"] = self.outbound_identifiers
|
|
669
|
+
self.params["inbound_identifiers"] = self.inbound_identifiers
|
|
670
|
+
self.params["bidirectional_identifiers"] = self.bidirectional_identifiers
|
|
660
671
|
|
|
661
672
|
rels_filter, rels_params = self.branch.get_query_filter_path(at=self.at, branch_agnostic=self.branch_agnostic)
|
|
662
673
|
self.params.update(rels_params)
|
|
663
674
|
|
|
664
675
|
query = """
|
|
665
676
|
MATCH (n:Node) WHERE n.uuid IN $ids
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
677
|
+
CALL {
|
|
678
|
+
WITH n
|
|
679
|
+
MATCH (n)<-[:IS_RELATED]-(rel:Relationship)<-[:IS_RELATED]-(peer)
|
|
680
|
+
WHERE ($inbound_identifiers IS NULL OR rel.name in $inbound_identifiers)
|
|
681
|
+
AND n.uuid <> peer.uuid
|
|
682
|
+
WITH DISTINCT n, rel, peer
|
|
683
|
+
CALL {
|
|
684
|
+
WITH n, rel, peer
|
|
685
|
+
MATCH (n)<-[r:IS_RELATED]-(rel)
|
|
686
|
+
WHERE (%(filters)s)
|
|
687
|
+
WITH n, rel, peer, r
|
|
688
|
+
ORDER BY r.from DESC
|
|
689
|
+
LIMIT 1
|
|
690
|
+
WITH n, rel, peer, r AS r1
|
|
691
|
+
WHERE r1.status = "active"
|
|
692
|
+
MATCH (rel)<-[r:IS_RELATED]-(peer)
|
|
693
|
+
WHERE (%(filters)s)
|
|
694
|
+
WITH r1, r
|
|
695
|
+
ORDER BY r.from DESC
|
|
696
|
+
LIMIT 1
|
|
697
|
+
WITH r1, r AS r2
|
|
698
|
+
WHERE r2.status = "active"
|
|
699
|
+
RETURN 1 AS is_active
|
|
700
|
+
}
|
|
701
|
+
RETURN n.uuid AS n_uuid, rel.name AS rel_name, peer.uuid AS peer_uuid, "inbound" as direction
|
|
702
|
+
UNION
|
|
703
|
+
WITH n
|
|
704
|
+
MATCH (n)-[:IS_RELATED]->(rel:Relationship)-[:IS_RELATED]->(peer)
|
|
705
|
+
WHERE ($outbound_identifiers IS NULL OR rel.name in $outbound_identifiers)
|
|
706
|
+
AND n.uuid <> peer.uuid
|
|
707
|
+
WITH DISTINCT n, rel, peer
|
|
708
|
+
CALL {
|
|
709
|
+
WITH n, rel, peer
|
|
710
|
+
MATCH (n)-[r:IS_RELATED]->(rel)
|
|
711
|
+
WHERE (%(filters)s)
|
|
712
|
+
WITH n, rel, peer, r
|
|
713
|
+
ORDER BY r.from DESC
|
|
714
|
+
LIMIT 1
|
|
715
|
+
WITH n, rel, peer, r AS r1
|
|
716
|
+
WHERE r1.status = "active"
|
|
717
|
+
MATCH (rel)-[r:IS_RELATED]->(peer)
|
|
718
|
+
WHERE (%(filters)s)
|
|
719
|
+
WITH r1, r
|
|
720
|
+
ORDER BY r.from DESC
|
|
721
|
+
LIMIT 1
|
|
722
|
+
WITH r1, r AS r2
|
|
723
|
+
WHERE r2.status = "active"
|
|
724
|
+
RETURN 1 AS is_active
|
|
725
|
+
}
|
|
726
|
+
RETURN n.uuid AS n_uuid, rel.name AS rel_name, peer.uuid AS peer_uuid, "outbound" as direction
|
|
727
|
+
UNION
|
|
728
|
+
WITH n
|
|
729
|
+
MATCH (n)-[:IS_RELATED]->(rel:Relationship)<-[:IS_RELATED]-(peer)
|
|
730
|
+
WHERE ($bidirectional_identifiers IS NULL OR rel.name in $bidirectional_identifiers)
|
|
731
|
+
AND n.uuid <> peer.uuid
|
|
732
|
+
WITH DISTINCT n, rel, peer
|
|
733
|
+
CALL {
|
|
734
|
+
WITH n, rel, peer
|
|
735
|
+
MATCH (n)-[r:IS_RELATED]->(rel)
|
|
736
|
+
WHERE (%(filters)s)
|
|
737
|
+
WITH n, rel, peer, r
|
|
738
|
+
ORDER BY r.from DESC
|
|
739
|
+
LIMIT 1
|
|
740
|
+
WITH n, rel, peer, r AS r1
|
|
741
|
+
WHERE r1.status = "active"
|
|
742
|
+
MATCH (rel)<-[r:IS_RELATED]-(peer)
|
|
743
|
+
WHERE (%(filters)s)
|
|
744
|
+
WITH r1, r
|
|
745
|
+
ORDER BY r.from DESC
|
|
746
|
+
LIMIT 1
|
|
747
|
+
WITH r1, r AS r2
|
|
748
|
+
WHERE r2.status = "active"
|
|
749
|
+
RETURN 1 AS is_active
|
|
750
|
+
}
|
|
751
|
+
RETURN n.uuid AS n_uuid, rel.name AS rel_name, peer.uuid AS peer_uuid, "bidirectional" as direction
|
|
752
|
+
}
|
|
753
|
+
RETURN DISTINCT n_uuid, rel_name, peer_uuid, direction
|
|
685
754
|
""" % {"filters": rels_filter}
|
|
686
|
-
|
|
687
755
|
self.add_to_query(query)
|
|
688
|
-
|
|
689
|
-
self.return_labels = ["n", "rel", "peer", "r1", "r2", "direction"]
|
|
756
|
+
self.return_labels = ["n_uuid", "rel_name", "peer_uuid", "direction"]
|
|
690
757
|
|
|
691
758
|
def get_peers_group_by_node(self) -> GroupedPeerNodes:
|
|
692
759
|
gpn = GroupedPeerNodes()
|
|
693
|
-
for result in self.
|
|
694
|
-
node_id = result.get("
|
|
695
|
-
rel_name = result.get("
|
|
696
|
-
peer_id = result.get("
|
|
760
|
+
for result in self.get_results():
|
|
761
|
+
node_id = result.get("n_uuid")
|
|
762
|
+
rel_name = result.get("rel_name")
|
|
763
|
+
peer_id = result.get("peer_uuid")
|
|
697
764
|
direction = str(result.get("direction"))
|
|
698
765
|
direction_enum = {
|
|
699
766
|
"inbound": RelationshipDirection.INBOUND,
|
|
@@ -1,6 +1,4 @@
|
|
|
1
|
-
from infrahub.core.constants import
|
|
2
|
-
BranchSupportType,
|
|
3
|
-
)
|
|
1
|
+
from infrahub.core.constants import BranchSupportType
|
|
4
2
|
|
|
5
3
|
from ...attribute_schema import AttributeSchema as Attr
|
|
6
4
|
from ...node_schema import NodeSchema
|
|
@@ -8,7 +6,7 @@ from ...node_schema import NodeSchema
|
|
|
8
6
|
builtin_tag = NodeSchema(
|
|
9
7
|
name="Tag",
|
|
10
8
|
namespace="Builtin",
|
|
11
|
-
description="Standard Tag object to
|
|
9
|
+
description="Standard Tag object to attach to other objects to provide some context.",
|
|
12
10
|
include_in_menu=True,
|
|
13
11
|
icon="mdi:tag-multiple",
|
|
14
12
|
label="Tag",
|
|
@@ -97,9 +97,9 @@ class AggregatedConstraintChecker:
|
|
|
97
97
|
error_detail_str += f"={data_path.value!r}"
|
|
98
98
|
error_detail_str_list.append(error_detail_str)
|
|
99
99
|
if data_path.peer_id:
|
|
100
|
-
error_detail_str
|
|
100
|
+
error_detail_str = f"{data_path.field_name}.id={data_path.peer_id}"
|
|
101
101
|
error_detail_str_list.append(error_detail_str)
|
|
102
|
-
if
|
|
102
|
+
if error_detail_str_list:
|
|
103
103
|
error_str += " The error relates to field "
|
|
104
104
|
error_str += ",".join(error_detail_str_list)
|
|
105
105
|
error_str += "."
|
|
@@ -30,7 +30,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
30
30
|
def get_context(self) -> dict[str, str]:
|
|
31
31
|
return {"kind": self.query_request.kind}
|
|
32
32
|
|
|
33
|
-
async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002
|
|
33
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002,PLR0915
|
|
34
34
|
branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False)
|
|
35
35
|
self.params.update(branch_params)
|
|
36
36
|
from_times = db.render_list_comprehension(items="relationships(potential_path)", item_name="from")
|
|
@@ -39,7 +39,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
39
39
|
)
|
|
40
40
|
|
|
41
41
|
attribute_names = set()
|
|
42
|
-
attr_paths, attr_paths_with_value = [], []
|
|
42
|
+
attr_paths, attr_paths_with_value, attr_values = [], [], []
|
|
43
43
|
for attr_path in self.query_request.unique_attribute_paths:
|
|
44
44
|
try:
|
|
45
45
|
property_rel_name = self.attribute_property_map[attr_path.property_name or "value"]
|
|
@@ -50,6 +50,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
50
50
|
attribute_names.add(attr_path.attribute_name)
|
|
51
51
|
if attr_path.value:
|
|
52
52
|
attr_paths_with_value.append((attr_path.attribute_name, property_rel_name, attr_path.value))
|
|
53
|
+
attr_values.append(attr_path.value)
|
|
53
54
|
else:
|
|
54
55
|
attr_paths.append((attr_path.attribute_name, property_rel_name))
|
|
55
56
|
|
|
@@ -57,6 +58,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
57
58
|
relationship_attr_paths = []
|
|
58
59
|
relationship_only_attr_paths = []
|
|
59
60
|
relationship_only_attr_values = []
|
|
61
|
+
relationship_attr_values = []
|
|
60
62
|
relationship_attr_paths_with_value = []
|
|
61
63
|
for rel_path in self.query_request.relationship_attribute_paths:
|
|
62
64
|
relationship_names.add(rel_path.identifier)
|
|
@@ -64,6 +66,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
64
66
|
relationship_attr_paths_with_value.append(
|
|
65
67
|
(rel_path.identifier, rel_path.attribute_name, rel_path.value)
|
|
66
68
|
)
|
|
69
|
+
relationship_attr_values.append(rel_path.value)
|
|
67
70
|
elif rel_path.attribute_name:
|
|
68
71
|
relationship_attr_paths.append((rel_path.identifier, rel_path.attribute_name))
|
|
69
72
|
else:
|
|
@@ -87,12 +90,14 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
87
90
|
"node_kind": self.query_request.kind,
|
|
88
91
|
"attr_paths": attr_paths,
|
|
89
92
|
"attr_paths_with_value": attr_paths_with_value,
|
|
93
|
+
"attr_values": attr_values,
|
|
90
94
|
"attribute_names": list(attribute_names),
|
|
91
95
|
"relationship_names": list(relationship_names),
|
|
92
96
|
"relationship_attr_paths": relationship_attr_paths,
|
|
93
97
|
"relationship_attr_paths_with_value": relationship_attr_paths_with_value,
|
|
94
98
|
"relationship_only_attr_paths": relationship_only_attr_paths,
|
|
95
99
|
"relationship_only_attr_values": relationship_only_attr_values,
|
|
100
|
+
"relationship_attr_values": relationship_attr_values,
|
|
96
101
|
"min_count_required": self.min_count_required,
|
|
97
102
|
}
|
|
98
103
|
)
|
|
@@ -100,16 +105,28 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
100
105
|
attr_paths_subquery = """
|
|
101
106
|
MATCH attr_path = (start_node:%(node_kind)s)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue)
|
|
102
107
|
WHERE attr.name in $attribute_names
|
|
103
|
-
AND
|
|
104
|
-
OR [attr.name, type(r), attr_value.value] in $attr_paths_with_value)
|
|
108
|
+
AND [attr.name, type(r)] in $attr_paths
|
|
105
109
|
RETURN start_node, attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value
|
|
106
110
|
""" % {"node_kind": self.query_request.kind}
|
|
107
111
|
|
|
108
|
-
|
|
112
|
+
attr_paths_with_value_subquery = """
|
|
113
|
+
MATCH attr_path = (start_node:%(node_kind)s)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue)
|
|
114
|
+
WHERE attr.name in $attribute_names AND attr_value.value in $attr_values
|
|
115
|
+
AND [attr.name, type(r), attr_value.value] in $attr_paths_with_value
|
|
116
|
+
RETURN start_node, attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value
|
|
117
|
+
""" % {"node_kind": self.query_request.kind}
|
|
118
|
+
|
|
119
|
+
relationship_attr_paths_subquery = """
|
|
109
120
|
MATCH rel_path = (start_node:%(node_kind)s)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue)
|
|
110
121
|
WHERE relationship_node.name in $relationship_names
|
|
111
|
-
AND
|
|
112
|
-
|
|
122
|
+
AND [relationship_node.name, rel_attr.name] in $relationship_attr_paths
|
|
123
|
+
RETURN start_node, rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value
|
|
124
|
+
""" % {"node_kind": self.query_request.kind}
|
|
125
|
+
|
|
126
|
+
relationship_attr_paths_with_value_subquery = """
|
|
127
|
+
MATCH rel_path = (start_node:%(node_kind)s)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue)
|
|
128
|
+
WHERE relationship_node.name in $relationship_names AND rel_attr_value.value in $relationship_attr_values
|
|
129
|
+
AND [relationship_node.name, rel_attr.name, rel_attr_value.value] in $relationship_attr_paths_with_value
|
|
113
130
|
RETURN start_node, rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value
|
|
114
131
|
""" % {"node_kind": self.query_request.kind}
|
|
115
132
|
|
|
@@ -125,9 +142,13 @@ class NodeUniqueAttributeConstraintQuery(Query):
|
|
|
125
142
|
}
|
|
126
143
|
|
|
127
144
|
select_subqueries = []
|
|
128
|
-
if attr_paths
|
|
145
|
+
if attr_paths:
|
|
129
146
|
select_subqueries.append(attr_paths_subquery)
|
|
130
|
-
if
|
|
147
|
+
if attr_paths_with_value:
|
|
148
|
+
select_subqueries.append(attr_paths_with_value_subquery)
|
|
149
|
+
if relationship_attr_paths:
|
|
150
|
+
select_subqueries.append(relationship_attr_paths_subquery)
|
|
151
|
+
if relationship_attr_paths_with_value:
|
|
131
152
|
select_subqueries.append(relationship_attr_paths_with_value_subquery)
|
|
132
153
|
if relationship_only_attr_paths:
|
|
133
154
|
select_subqueries.append(relationship_only_attr_paths_subquery)
|
infrahub/database/__init__.py
CHANGED
|
@@ -26,16 +26,14 @@ from opentelemetry import trace
|
|
|
26
26
|
from typing_extensions import Self
|
|
27
27
|
|
|
28
28
|
from infrahub import config, lock
|
|
29
|
+
from infrahub.constants.database import DatabaseType, Neo4jRuntime
|
|
29
30
|
from infrahub.core import registry
|
|
30
31
|
from infrahub.core.query import QueryType
|
|
31
32
|
from infrahub.exceptions import DatabaseError
|
|
32
33
|
from infrahub.log import get_logger
|
|
33
34
|
from infrahub.utils import InfrahubStringEnum
|
|
34
35
|
|
|
35
|
-
from .constants import DatabaseType, Neo4jRuntime
|
|
36
|
-
from .memgraph import DatabaseManagerMemgraph
|
|
37
36
|
from .metrics import CONNECTION_POOL_USAGE, QUERY_EXECUTION_METRICS, TRANSACTION_RETRIES
|
|
38
|
-
from .neo4j import DatabaseManagerNeo4j
|
|
39
37
|
|
|
40
38
|
if TYPE_CHECKING:
|
|
41
39
|
from types import TracebackType
|
|
@@ -44,8 +42,6 @@ if TYPE_CHECKING:
|
|
|
44
42
|
from infrahub.core.schema import MainSchemaTypes, NodeSchema
|
|
45
43
|
from infrahub.core.schema.schema_branch import SchemaBranch
|
|
46
44
|
|
|
47
|
-
from .manager import DatabaseManager
|
|
48
|
-
|
|
49
45
|
validated_database = {}
|
|
50
46
|
R = TypeVar("R")
|
|
51
47
|
|
|
@@ -134,7 +130,6 @@ class InfrahubDatabase:
|
|
|
134
130
|
mode: InfrahubDatabaseMode = InfrahubDatabaseMode.DRIVER,
|
|
135
131
|
db_type: DatabaseType | None = None,
|
|
136
132
|
default_neo4j_runtime: Neo4jRuntime = Neo4jRuntime.DEFAULT,
|
|
137
|
-
db_manager: DatabaseManager | None = None,
|
|
138
133
|
schemas: list[SchemaBranch] | None = None,
|
|
139
134
|
session: AsyncSession | None = None,
|
|
140
135
|
session_mode: InfrahubDatabaseSessionMode = InfrahubDatabaseSessionMode.WRITE,
|
|
@@ -161,14 +156,6 @@ class InfrahubDatabase:
|
|
|
161
156
|
else:
|
|
162
157
|
self.db_type = config.SETTINGS.database.db_type
|
|
163
158
|
|
|
164
|
-
if db_manager:
|
|
165
|
-
self.manager = db_manager
|
|
166
|
-
self.manager.db = self
|
|
167
|
-
elif self.db_type == DatabaseType.NEO4J:
|
|
168
|
-
self.manager = DatabaseManagerNeo4j(db=self)
|
|
169
|
-
elif self.db_type == DatabaseType.MEMGRAPH:
|
|
170
|
-
self.manager = DatabaseManagerMemgraph(db=self)
|
|
171
|
-
|
|
172
159
|
def __del__(self) -> None:
|
|
173
160
|
if not self._session or not self._is_session_local or self._session.closed():
|
|
174
161
|
return
|
|
@@ -228,7 +215,6 @@ class InfrahubDatabase:
|
|
|
228
215
|
db_type=self.db_type,
|
|
229
216
|
default_neo4j_runtime=self.default_neo4j_runtime,
|
|
230
217
|
schemas=schemas or self._schemas.values(),
|
|
231
|
-
db_manager=self.manager,
|
|
232
218
|
driver=self._driver,
|
|
233
219
|
session_mode=session_mode,
|
|
234
220
|
queries_names_to_config=self.queries_names_to_config,
|
|
@@ -243,7 +229,6 @@ class InfrahubDatabase:
|
|
|
243
229
|
db_type=self.db_type,
|
|
244
230
|
default_neo4j_runtime=self.default_neo4j_runtime,
|
|
245
231
|
schemas=schemas or self._schemas.values(),
|
|
246
|
-
db_manager=self.manager,
|
|
247
232
|
driver=self._driver,
|
|
248
233
|
session=self._session,
|
|
249
234
|
session_mode=self._session_mode,
|