infrahub-server 1.6.3__py3-none-any.whl → 1.7.0b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +4 -2
- infrahub/api/schema.py +3 -1
- infrahub/artifacts/tasks.py +1 -0
- infrahub/auth.py +2 -2
- infrahub/cli/db.py +6 -6
- infrahub/computed_attribute/gather.py +3 -4
- infrahub/computed_attribute/tasks.py +23 -6
- infrahub/config.py +8 -0
- infrahub/constants/enums.py +12 -0
- infrahub/core/account.py +5 -8
- infrahub/core/attribute.py +106 -108
- infrahub/core/branch/models.py +44 -71
- infrahub/core/branch/tasks.py +5 -3
- infrahub/core/changelog/diff.py +1 -20
- infrahub/core/changelog/models.py +0 -7
- infrahub/core/constants/__init__.py +17 -0
- infrahub/core/constants/database.py +0 -1
- infrahub/core/constants/schema.py +0 -1
- infrahub/core/convert_object_type/repository_conversion.py +3 -4
- infrahub/core/diff/data_check_synchronizer.py +3 -2
- infrahub/core/diff/enricher/cardinality_one.py +1 -1
- infrahub/core/diff/merger/merger.py +27 -1
- infrahub/core/diff/merger/serializer.py +3 -10
- infrahub/core/diff/model/diff.py +1 -1
- infrahub/core/diff/query/merge.py +376 -135
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/graph/constraints.py +2 -2
- infrahub/core/graph/schema.py +2 -12
- infrahub/core/manager.py +132 -126
- infrahub/core/metadata/__init__.py +0 -0
- infrahub/core/metadata/interface.py +37 -0
- infrahub/core/metadata/model.py +31 -0
- infrahub/core/metadata/query/__init__.py +0 -0
- infrahub/core/metadata/query/node_metadata.py +301 -0
- infrahub/core/migrations/graph/__init__.py +4 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +3 -8
- infrahub/core/migrations/graph/m017_add_core_profile.py +5 -2
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +2 -1
- infrahub/core/migrations/graph/m019_restore_rels_to_time.py +0 -10
- infrahub/core/migrations/graph/m020_duplicate_edges.py +0 -8
- infrahub/core/migrations/graph/m025_uniqueness_nulls.py +2 -1
- infrahub/core/migrations/graph/m026_0000_prefix_fix.py +2 -1
- infrahub/core/migrations/graph/m029_duplicates_cleanup.py +0 -1
- infrahub/core/migrations/graph/m031_check_number_attributes.py +2 -2
- infrahub/core/migrations/graph/m038_redo_0000_prefix_fix.py +2 -1
- infrahub/core/migrations/graph/m049_remove_is_visible_relationship.py +38 -0
- infrahub/core/migrations/graph/m050_backfill_vertex_metadata.py +168 -0
- infrahub/core/migrations/query/attribute_add.py +17 -6
- infrahub/core/migrations/query/attribute_remove.py +19 -5
- infrahub/core/migrations/query/attribute_rename.py +21 -5
- infrahub/core/migrations/query/node_duplicate.py +19 -4
- infrahub/core/migrations/schema/attribute_kind_update.py +25 -7
- infrahub/core/migrations/schema/attribute_supports_profile.py +3 -1
- infrahub/core/migrations/schema/models.py +3 -0
- infrahub/core/migrations/schema/node_attribute_add.py +4 -1
- infrahub/core/migrations/schema/node_remove.py +24 -2
- infrahub/core/migrations/schema/tasks.py +4 -1
- infrahub/core/migrations/shared.py +13 -6
- infrahub/core/models.py +6 -6
- infrahub/core/node/__init__.py +156 -57
- infrahub/core/node/create.py +7 -3
- infrahub/core/node/standard.py +100 -14
- infrahub/core/property.py +0 -1
- infrahub/core/protocols_base.py +6 -2
- infrahub/core/query/__init__.py +6 -7
- infrahub/core/query/attribute.py +161 -46
- infrahub/core/query/branch.py +57 -69
- infrahub/core/query/diff.py +4 -4
- infrahub/core/query/node.py +618 -180
- infrahub/core/query/relationship.py +449 -300
- infrahub/core/query/standard_node.py +25 -5
- infrahub/core/query/utils.py +2 -4
- infrahub/core/relationship/constraints/profiles_removal.py +168 -0
- infrahub/core/relationship/model.py +293 -139
- infrahub/core/schema/attribute_parameters.py +1 -28
- infrahub/core/schema/attribute_schema.py +17 -11
- infrahub/core/schema/manager.py +63 -43
- infrahub/core/schema/relationship_schema.py +6 -2
- infrahub/core/schema/schema_branch.py +48 -76
- infrahub/core/task/task.py +4 -2
- infrahub/core/utils.py +0 -22
- infrahub/core/validators/attribute/kind.py +2 -5
- infrahub/core/validators/determiner.py +3 -3
- infrahub/database/__init__.py +3 -3
- infrahub/dependencies/builder/constraint/grouped/node_runner.py +2 -0
- infrahub/dependencies/builder/constraint/relationship_manager/profiles_removal.py +8 -0
- infrahub/dependencies/registry.py +2 -0
- infrahub/display_labels/tasks.py +12 -3
- infrahub/git/integrator.py +18 -18
- infrahub/git/tasks.py +1 -1
- infrahub/graphql/app.py +2 -2
- infrahub/graphql/constants.py +3 -0
- infrahub/graphql/context.py +1 -1
- infrahub/graphql/initialization.py +11 -0
- infrahub/graphql/loaders/account.py +134 -0
- infrahub/graphql/loaders/node.py +5 -12
- infrahub/graphql/loaders/peers.py +5 -7
- infrahub/graphql/manager.py +158 -18
- infrahub/graphql/metadata.py +91 -0
- infrahub/graphql/models.py +33 -3
- infrahub/graphql/mutations/account.py +5 -5
- infrahub/graphql/mutations/attribute.py +0 -2
- infrahub/graphql/mutations/branch.py +9 -5
- infrahub/graphql/mutations/computed_attribute.py +1 -1
- infrahub/graphql/mutations/display_label.py +1 -1
- infrahub/graphql/mutations/hfid.py +1 -1
- infrahub/graphql/mutations/ipam.py +4 -6
- infrahub/graphql/mutations/main.py +9 -4
- infrahub/graphql/mutations/profile.py +16 -22
- infrahub/graphql/mutations/proposed_change.py +4 -4
- infrahub/graphql/mutations/relationship.py +40 -10
- infrahub/graphql/mutations/repository.py +14 -12
- infrahub/graphql/mutations/schema.py +2 -2
- infrahub/graphql/queries/branch.py +62 -6
- infrahub/graphql/queries/diff/tree.py +5 -5
- infrahub/graphql/resolvers/account_metadata.py +84 -0
- infrahub/graphql/resolvers/ipam.py +6 -8
- infrahub/graphql/resolvers/many_relationship.py +77 -35
- infrahub/graphql/resolvers/resolver.py +16 -12
- infrahub/graphql/resolvers/single_relationship.py +87 -23
- infrahub/graphql/subscription/graphql_query.py +2 -0
- infrahub/graphql/types/__init__.py +0 -1
- infrahub/graphql/types/attribute.py +10 -5
- infrahub/graphql/types/branch.py +40 -53
- infrahub/graphql/types/enums.py +3 -0
- infrahub/graphql/types/metadata.py +28 -0
- infrahub/graphql/types/node.py +22 -2
- infrahub/graphql/types/relationship.py +10 -2
- infrahub/graphql/types/standard_node.py +4 -3
- infrahub/hfid/tasks.py +12 -3
- infrahub/profiles/gather.py +56 -0
- infrahub/profiles/mandatory_fields_checker.py +116 -0
- infrahub/profiles/models.py +66 -0
- infrahub/profiles/node_applier.py +153 -12
- infrahub/profiles/queries/get_profile_data.py +143 -31
- infrahub/profiles/tasks.py +79 -27
- infrahub/profiles/triggers.py +22 -0
- infrahub/proposed_change/tasks.py +4 -1
- infrahub/tasks/artifact.py +1 -0
- infrahub/transformations/tasks.py +2 -2
- infrahub/trigger/catalogue.py +2 -0
- infrahub/trigger/models.py +1 -0
- infrahub/trigger/setup.py +3 -3
- infrahub/trigger/tasks.py +3 -0
- infrahub/validators/tasks.py +1 -0
- infrahub/webhook/models.py +1 -1
- infrahub/webhook/tasks.py +1 -1
- infrahub/workers/dependencies.py +9 -3
- infrahub/workers/infrahub_async.py +13 -4
- infrahub/workflows/catalogue.py +19 -0
- infrahub_sdk/node/constants.py +1 -0
- infrahub_sdk/node/related_node.py +13 -4
- infrahub_sdk/node/relationship.py +8 -0
- {infrahub_server-1.6.3.dist-info → infrahub_server-1.7.0b0.dist-info}/METADATA +17 -16
- {infrahub_server-1.6.3.dist-info → infrahub_server-1.7.0b0.dist-info}/RECORD +161 -143
- infrahub_testcontainers/container.py +3 -3
- infrahub_testcontainers/docker-compose-cluster.test.yml +7 -7
- infrahub_testcontainers/docker-compose.test.yml +13 -5
- {infrahub_server-1.6.3.dist-info → infrahub_server-1.7.0b0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.6.3.dist-info → infrahub_server-1.7.0b0.dist-info}/entry_points.txt +0 -0
- {infrahub_server-1.6.3.dist-info → infrahub_server-1.7.0b0.dist-info}/licenses/LICENSE.txt +0 -0
|
File without changes
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
|
|
3
|
+
from infrahub.core.timestamp import Timestamp
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class MetadataInterface:
|
|
7
|
+
@abstractmethod
|
|
8
|
+
def _set_created_at(self, value: Timestamp | None) -> None:
|
|
9
|
+
raise NotImplementedError()
|
|
10
|
+
|
|
11
|
+
@abstractmethod
|
|
12
|
+
def _set_created_by(self, value: str | None) -> None:
|
|
13
|
+
raise NotImplementedError()
|
|
14
|
+
|
|
15
|
+
@abstractmethod
|
|
16
|
+
def _set_updated_at(self, value: Timestamp | None) -> None:
|
|
17
|
+
raise NotImplementedError()
|
|
18
|
+
|
|
19
|
+
@abstractmethod
|
|
20
|
+
def _set_updated_by(self, value: str | None) -> None:
|
|
21
|
+
raise NotImplementedError()
|
|
22
|
+
|
|
23
|
+
@abstractmethod
|
|
24
|
+
def _get_created_at(self) -> Timestamp | None:
|
|
25
|
+
raise NotImplementedError()
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def _get_created_by(self) -> str | None:
|
|
29
|
+
raise NotImplementedError()
|
|
30
|
+
|
|
31
|
+
@abstractmethod
|
|
32
|
+
def _get_updated_at(self) -> Timestamp | None:
|
|
33
|
+
raise NotImplementedError()
|
|
34
|
+
|
|
35
|
+
@abstractmethod
|
|
36
|
+
def _get_updated_by(self) -> str | None:
|
|
37
|
+
raise NotImplementedError()
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from infrahub.core.constants import MetadataOptions
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from infrahub.core.timestamp import Timestamp
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(frozen=True)
|
|
13
|
+
class MetadataQueryOptions:
|
|
14
|
+
node_level: MetadataOptions = MetadataOptions.NONE
|
|
15
|
+
attribute_level: MetadataOptions = MetadataOptions.NONE
|
|
16
|
+
relationship_level: MetadataOptions = MetadataOptions.NONE
|
|
17
|
+
|
|
18
|
+
def __or__(self, other: MetadataQueryOptions) -> MetadataQueryOptions:
|
|
19
|
+
return MetadataQueryOptions(
|
|
20
|
+
node_level=self.node_level | other.node_level,
|
|
21
|
+
attribute_level=self.attribute_level | other.attribute_level,
|
|
22
|
+
relationship_level=self.relationship_level | other.relationship_level,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class MetadataInfo:
|
|
28
|
+
created_at: Timestamp | None = None
|
|
29
|
+
created_by: str | None = None
|
|
30
|
+
updated_at: Timestamp | None = None
|
|
31
|
+
updated_by: str | None = None
|
|
File without changes
|
|
@@ -0,0 +1,301 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
|
+
|
|
7
|
+
from infrahub.core.query import Query, QueryType
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from infrahub.core.timestamp import Timestamp
|
|
11
|
+
from infrahub.database import InfrahubDatabase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class RelationshipDirection(Enum):
|
|
15
|
+
OUTBOUND = "outbound"
|
|
16
|
+
INBOUND = "inbound"
|
|
17
|
+
BIDIRECTIONAL = "bidirectional"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@dataclass
|
|
21
|
+
class AttributeMetadata:
|
|
22
|
+
uuid: str
|
|
23
|
+
name: str
|
|
24
|
+
is_deleted: bool
|
|
25
|
+
created_at: Timestamp | None = None
|
|
26
|
+
created_by: str | None = None
|
|
27
|
+
updated_at: Timestamp | None = None
|
|
28
|
+
updated_by: str | None = None
|
|
29
|
+
|
|
30
|
+
def __hash__(self) -> int:
|
|
31
|
+
return hash(
|
|
32
|
+
(
|
|
33
|
+
self.uuid,
|
|
34
|
+
self.name,
|
|
35
|
+
self.is_deleted,
|
|
36
|
+
self.created_at,
|
|
37
|
+
self.created_by,
|
|
38
|
+
self.updated_at,
|
|
39
|
+
self.updated_by,
|
|
40
|
+
)
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class RelationshipMetadata:
|
|
46
|
+
uuid: str
|
|
47
|
+
identifier: str # Relationship.name in DB (e.g., "testcar__testperson")
|
|
48
|
+
peer_uuid: str
|
|
49
|
+
direction: RelationshipDirection
|
|
50
|
+
is_deleted: bool
|
|
51
|
+
created_at: Timestamp | None = None
|
|
52
|
+
created_by: str | None = None
|
|
53
|
+
updated_at: Timestamp | None = None
|
|
54
|
+
updated_by: str | None = None
|
|
55
|
+
|
|
56
|
+
def __hash__(self) -> int:
|
|
57
|
+
return hash(
|
|
58
|
+
(
|
|
59
|
+
self.uuid,
|
|
60
|
+
self.identifier,
|
|
61
|
+
self.peer_uuid,
|
|
62
|
+
self.direction,
|
|
63
|
+
self.is_deleted,
|
|
64
|
+
self.created_at,
|
|
65
|
+
self.created_by,
|
|
66
|
+
self.updated_at,
|
|
67
|
+
self.updated_by,
|
|
68
|
+
)
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@dataclass
|
|
73
|
+
class NodeMetadata:
|
|
74
|
+
uuid: str
|
|
75
|
+
kind: str
|
|
76
|
+
is_deleted: bool
|
|
77
|
+
created_at: Timestamp | None = None
|
|
78
|
+
created_by: str | None = None
|
|
79
|
+
updated_at: Timestamp | None = None
|
|
80
|
+
updated_by: str | None = None
|
|
81
|
+
attributes: list[AttributeMetadata] = field(default_factory=list)
|
|
82
|
+
relationships: list[RelationshipMetadata] = field(default_factory=list)
|
|
83
|
+
|
|
84
|
+
def __hash__(self) -> int:
|
|
85
|
+
return hash(
|
|
86
|
+
(
|
|
87
|
+
self.uuid,
|
|
88
|
+
self.kind,
|
|
89
|
+
self.is_deleted,
|
|
90
|
+
self.created_at,
|
|
91
|
+
self.created_by,
|
|
92
|
+
self.updated_at,
|
|
93
|
+
self.updated_by,
|
|
94
|
+
tuple(self.attributes),
|
|
95
|
+
tuple(self.relationships),
|
|
96
|
+
)
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
class NodeMetadataDefaultBranchQuery(Query):
|
|
101
|
+
"""Query to retrieve metadata for nodes and their attributes/relationships.
|
|
102
|
+
|
|
103
|
+
This query only works on the default branch and reads metadata directly from
|
|
104
|
+
vertex properties. It supports retrieving deleted nodes, attributes, and
|
|
105
|
+
relationships.
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
name = "node_metadata"
|
|
109
|
+
type = QueryType.READ
|
|
110
|
+
insert_return = False
|
|
111
|
+
|
|
112
|
+
def __init__(self, node_uuids: list[str], **kwargs: Any) -> None:
|
|
113
|
+
self.node_uuids = node_uuids
|
|
114
|
+
super().__init__(**kwargs)
|
|
115
|
+
|
|
116
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002
|
|
117
|
+
if not self.branch.is_default:
|
|
118
|
+
raise ValueError("NodeMetadataQuery only runs on the default branch")
|
|
119
|
+
|
|
120
|
+
self.params["node_uuids"] = self.node_uuids
|
|
121
|
+
self.params["branch"] = self.branch.name
|
|
122
|
+
|
|
123
|
+
# Query nodes with their metadata and deletion status
|
|
124
|
+
# Then query attributes and relationships
|
|
125
|
+
query = """
|
|
126
|
+
// ------------------
|
|
127
|
+
// Part 1: Get node metadata with deletion status
|
|
128
|
+
// ------------------
|
|
129
|
+
UNWIND $node_uuids AS node_uuid
|
|
130
|
+
CALL (node_uuid) {
|
|
131
|
+
MATCH (n:Node {uuid: node_uuid})-[r_ipo:IS_PART_OF]->(root:Root)
|
|
132
|
+
WHERE r_ipo.branch = $branch
|
|
133
|
+
RETURN n, r_ipo
|
|
134
|
+
ORDER BY r_ipo.from DESC
|
|
135
|
+
LIMIT 1
|
|
136
|
+
}
|
|
137
|
+
WITH n,
|
|
138
|
+
CASE
|
|
139
|
+
WHEN r_ipo.status = "deleted" THEN true
|
|
140
|
+
WHEN r_ipo.to IS NOT NULL THEN true
|
|
141
|
+
ELSE false
|
|
142
|
+
END AS node_is_deleted
|
|
143
|
+
|
|
144
|
+
// ------------------
|
|
145
|
+
// Part 2: Get attribute details
|
|
146
|
+
// ------------------
|
|
147
|
+
OPTIONAL MATCH (n)-[:HAS_ATTRIBUTE {branch: $branch}]->(attr:Attribute)
|
|
148
|
+
WITH DISTINCT n, node_is_deleted, attr
|
|
149
|
+
CALL (n, attr) {
|
|
150
|
+
OPTIONAL MATCH (n)-[r_attr:HAS_ATTRIBUTE]->(attr)
|
|
151
|
+
WHERE r_attr.branch = $branch
|
|
152
|
+
WITH r_attr, attr
|
|
153
|
+
ORDER BY r_attr.from DESC
|
|
154
|
+
LIMIT 1
|
|
155
|
+
RETURN {
|
|
156
|
+
uuid: attr.uuid,
|
|
157
|
+
name: attr.name,
|
|
158
|
+
is_deleted: CASE
|
|
159
|
+
WHEN r_attr.status = "deleted" THEN true
|
|
160
|
+
WHEN r_attr.to IS NOT NULL THEN true
|
|
161
|
+
ELSE false
|
|
162
|
+
END,
|
|
163
|
+
created_at: attr.created_at,
|
|
164
|
+
created_by: attr.created_by,
|
|
165
|
+
updated_at: attr.updated_at,
|
|
166
|
+
updated_by: attr.updated_by
|
|
167
|
+
} AS attribute_details
|
|
168
|
+
}
|
|
169
|
+
WITH n, node_is_deleted, COALESCE(collect(attribute_details), []) AS attributes
|
|
170
|
+
|
|
171
|
+
// ------------------
|
|
172
|
+
// Part 3: Get relationship details
|
|
173
|
+
// ------------------
|
|
174
|
+
OPTIONAL MATCH (n)-[:IS_RELATED {branch: $branch}]-(rel:Relationship)-[:IS_RELATED {branch: $branch}]-(peer:Node)
|
|
175
|
+
WHERE n <> peer
|
|
176
|
+
WITH DISTINCT n, node_is_deleted, attributes, rel, peer
|
|
177
|
+
CALL (n, rel, peer) {
|
|
178
|
+
OPTIONAL MATCH (n)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(peer:Node)
|
|
179
|
+
WHERE r1.branch = $branch AND r2.branch = $branch
|
|
180
|
+
WITH r1, r2
|
|
181
|
+
ORDER BY r1.from DESC, r2.from DESC
|
|
182
|
+
LIMIT 1
|
|
183
|
+
RETURN {
|
|
184
|
+
uuid: rel.uuid,
|
|
185
|
+
identifier: rel.name,
|
|
186
|
+
peer_uuid: peer.uuid,
|
|
187
|
+
direction: CASE
|
|
188
|
+
WHEN startNode(r1) = n AND startNode(r2) = rel THEN "outbound"
|
|
189
|
+
WHEN startNode(r1) = rel AND startNode(r2) = peer THEN "inbound"
|
|
190
|
+
ELSE "bidirectional"
|
|
191
|
+
END,
|
|
192
|
+
is_deleted: CASE
|
|
193
|
+
WHEN (r1.status = "deleted" OR r1.to IS NOT NULL) AND (r2.status = "deleted" OR r2.to IS NOT NULL) THEN true
|
|
194
|
+
ELSE false
|
|
195
|
+
END,
|
|
196
|
+
created_at: rel.created_at,
|
|
197
|
+
created_by: rel.created_by,
|
|
198
|
+
updated_at: rel.updated_at,
|
|
199
|
+
updated_by: rel.updated_by
|
|
200
|
+
} AS relationship_details
|
|
201
|
+
}
|
|
202
|
+
WITH n, node_is_deleted, attributes, COALESCE(collect(relationship_details), []) AS relationships
|
|
203
|
+
|
|
204
|
+
RETURN n.uuid AS node_uuid, n.kind AS node_kind, node_is_deleted,
|
|
205
|
+
n.created_at AS node_created_at, n.created_by AS node_created_by,
|
|
206
|
+
n.updated_at AS node_updated_at, n.updated_by AS node_updated_by,
|
|
207
|
+
attributes, relationships
|
|
208
|
+
"""
|
|
209
|
+
self.return_labels = [
|
|
210
|
+
"node_uuid",
|
|
211
|
+
"node_kind",
|
|
212
|
+
"node_is_deleted",
|
|
213
|
+
"node_created_at",
|
|
214
|
+
"node_created_by",
|
|
215
|
+
"node_updated_at",
|
|
216
|
+
"node_updated_by",
|
|
217
|
+
"attributes",
|
|
218
|
+
"relationships",
|
|
219
|
+
]
|
|
220
|
+
self.add_to_query(query)
|
|
221
|
+
|
|
222
|
+
def get_metadatas(self) -> list[NodeMetadata]:
|
|
223
|
+
"""Process query results into NodeMetadata dataclasses."""
|
|
224
|
+
from infrahub.core.timestamp import Timestamp
|
|
225
|
+
|
|
226
|
+
nodes: list[NodeMetadata] = []
|
|
227
|
+
|
|
228
|
+
for result in self.get_results():
|
|
229
|
+
node_uuid = result.get_as_type("node_uuid", return_type=str)
|
|
230
|
+
node_kind = result.get_as_type("node_kind", return_type=str)
|
|
231
|
+
node_is_deleted = result.get_as_type("node_is_deleted", bool)
|
|
232
|
+
|
|
233
|
+
node_created_at_str = result.get_as_str("node_created_at")
|
|
234
|
+
node_created_at = Timestamp(node_created_at_str) if node_created_at_str else None
|
|
235
|
+
node_created_by = result.get_as_str("node_created_by")
|
|
236
|
+
node_updated_at_str = result.get_as_str("node_updated_at")
|
|
237
|
+
node_updated_at = Timestamp(node_updated_at_str) if node_updated_at_str else None
|
|
238
|
+
node_updated_by = result.get_as_str("node_updated_by")
|
|
239
|
+
|
|
240
|
+
# Parse attributes
|
|
241
|
+
attributes_data: list[dict[str, Any]] = result.get_as_type("attributes", list)
|
|
242
|
+
attributes: list[AttributeMetadata] = []
|
|
243
|
+
for attr_data in attributes_data:
|
|
244
|
+
attr_created_at_str = attr_data.get("created_at")
|
|
245
|
+
attr_created_at = Timestamp(attr_created_at_str) if attr_created_at_str else None
|
|
246
|
+
attr_updated_at_str = attr_data.get("updated_at")
|
|
247
|
+
attr_updated_at = Timestamp(attr_updated_at_str) if attr_updated_at_str else None
|
|
248
|
+
|
|
249
|
+
attributes.append(
|
|
250
|
+
AttributeMetadata(
|
|
251
|
+
uuid=attr_data["uuid"],
|
|
252
|
+
name=attr_data["name"],
|
|
253
|
+
is_deleted=attr_data["is_deleted"],
|
|
254
|
+
created_at=attr_created_at,
|
|
255
|
+
created_by=attr_data.get("created_by"),
|
|
256
|
+
updated_at=attr_updated_at,
|
|
257
|
+
updated_by=attr_data.get("updated_by"),
|
|
258
|
+
)
|
|
259
|
+
)
|
|
260
|
+
|
|
261
|
+
# Parse relationships
|
|
262
|
+
relationships_data: list[dict[str, Any]] = result.get_as_type("relationships", list)
|
|
263
|
+
relationships: list[RelationshipMetadata] = []
|
|
264
|
+
for rel_data in relationships_data:
|
|
265
|
+
rel_created_at_str = rel_data.get("created_at")
|
|
266
|
+
rel_created_at = Timestamp(rel_created_at_str) if rel_created_at_str else None
|
|
267
|
+
rel_updated_at_str = rel_data.get("updated_at")
|
|
268
|
+
rel_updated_at = Timestamp(rel_updated_at_str) if rel_updated_at_str else None
|
|
269
|
+
|
|
270
|
+
direction_str = rel_data["direction"]
|
|
271
|
+
direction = RelationshipDirection(direction_str)
|
|
272
|
+
|
|
273
|
+
relationships.append(
|
|
274
|
+
RelationshipMetadata(
|
|
275
|
+
uuid=rel_data["uuid"],
|
|
276
|
+
identifier=rel_data["identifier"],
|
|
277
|
+
peer_uuid=rel_data["peer_uuid"],
|
|
278
|
+
direction=direction,
|
|
279
|
+
is_deleted=rel_data["is_deleted"],
|
|
280
|
+
created_at=rel_created_at,
|
|
281
|
+
created_by=rel_data.get("created_by"),
|
|
282
|
+
updated_at=rel_updated_at,
|
|
283
|
+
updated_by=rel_data.get("updated_by"),
|
|
284
|
+
)
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
nodes.append(
|
|
288
|
+
NodeMetadata(
|
|
289
|
+
uuid=node_uuid,
|
|
290
|
+
kind=node_kind,
|
|
291
|
+
is_deleted=node_is_deleted,
|
|
292
|
+
created_at=node_created_at,
|
|
293
|
+
created_by=node_created_by,
|
|
294
|
+
updated_at=node_updated_at,
|
|
295
|
+
updated_by=node_updated_by,
|
|
296
|
+
attributes=attributes,
|
|
297
|
+
relationships=relationships,
|
|
298
|
+
)
|
|
299
|
+
)
|
|
300
|
+
|
|
301
|
+
return nodes
|
|
@@ -50,6 +50,8 @@ from .m045_backfill_hfid_display_label_in_db_profile_template import Migration04
|
|
|
50
50
|
from .m046_fill_agnostic_hfid_display_labels import Migration046
|
|
51
51
|
from .m047_backfill_or_null_display_label import Migration047
|
|
52
52
|
from .m048_undelete_rel_props import Migration048
|
|
53
|
+
from .m049_remove_is_visible_relationship import Migration049
|
|
54
|
+
from .m050_backfill_vertex_metadata import Migration050
|
|
53
55
|
|
|
54
56
|
if TYPE_CHECKING:
|
|
55
57
|
from ..shared import MigrationTypes
|
|
@@ -104,6 +106,8 @@ MIGRATIONS: list[type[MigrationTypes]] = [
|
|
|
104
106
|
Migration046,
|
|
105
107
|
Migration047,
|
|
106
108
|
Migration048,
|
|
109
|
+
Migration049,
|
|
110
|
+
Migration050,
|
|
107
111
|
]
|
|
108
112
|
|
|
109
113
|
|
|
@@ -73,7 +73,6 @@ class Migration013ConvertCoreRepositoryWithCred(Query):
|
|
|
73
73
|
|
|
74
74
|
self.params["current_time"] = self.at.to_string()
|
|
75
75
|
self.params["is_protected_default"] = False
|
|
76
|
-
self.params["is_visible_default"] = True
|
|
77
76
|
self.params["branch_support"] = BranchSupportType.AGNOSTIC.value
|
|
78
77
|
|
|
79
78
|
self.params["rel_identifier"] = "gitrepository__credential"
|
|
@@ -91,8 +90,7 @@ class Migration013ConvertCoreRepositoryWithCred(Query):
|
|
|
91
90
|
// Prepare some nodes we'll need later
|
|
92
91
|
// --------------------------------
|
|
93
92
|
MERGE (is_protected_value:Boolean { value: $is_protected_default })
|
|
94
|
-
|
|
95
|
-
WITH git_repo, root, is_protected_value, is_visible_value
|
|
93
|
+
WITH git_repo, root, is_protected_value
|
|
96
94
|
// --------------------------------
|
|
97
95
|
// Retrieve the name of the current repository
|
|
98
96
|
// --------------------------------
|
|
@@ -106,9 +104,9 @@ class Migration013ConvertCoreRepositoryWithCred(Query):
|
|
|
106
104
|
ORDER BY r1.branch_level DESC, r1.from DESC
|
|
107
105
|
LIMIT 1
|
|
108
106
|
}
|
|
109
|
-
WITH n1 as git_repo, r11 as r1, r22 as r2, av1 as git_name_value, root, is_protected_value
|
|
107
|
+
WITH n1 as git_repo, r11 as r1, r22 as r2, av1 as git_name_value, root, is_protected_value
|
|
110
108
|
WHERE r1.status = "active" AND r2.status = "active"
|
|
111
|
-
WITH DISTINCT(git_repo) as git_repo, root, is_protected_value,
|
|
109
|
+
WITH DISTINCT(git_repo) as git_repo, root, is_protected_value, git_name_value
|
|
112
110
|
// --------------------------------
|
|
113
111
|
// Create new CorePasswordCredential node
|
|
114
112
|
// --------------------------------
|
|
@@ -119,20 +117,17 @@ class Migration013ConvertCoreRepositoryWithCred(Query):
|
|
|
119
117
|
CREATE (attr_name)<-[:HAS_ATTRIBUTE $rel_props_new ]-(cred)
|
|
120
118
|
CREATE (attr_name)-[:HAS_VALUE $rel_props_new ]->(git_name_value)
|
|
121
119
|
CREATE (attr_name)-[:IS_PROTECTED $rel_props_new]->(is_protected_value)
|
|
122
|
-
CREATE (attr_name)-[:IS_VISIBLE $rel_props_new]->(is_visible_value)
|
|
123
120
|
// attribute: label
|
|
124
121
|
CREATE (attr_lbl:Attribute { name: "label", branch_support: $branch_support })
|
|
125
122
|
CREATE (attr_lbl)<-[:HAS_ATTRIBUTE $rel_props_new ]-(cred)
|
|
126
123
|
CREATE (attr_lbl)-[:HAS_VALUE $rel_props_new ]->(git_name_value)
|
|
127
124
|
CREATE (attr_lbl)-[:IS_PROTECTED $rel_props_new]->(is_protected_value)
|
|
128
|
-
CREATE (attr_lbl)-[:IS_VISIBLE $rel_props_new]->(is_visible_value)
|
|
129
125
|
// attribute: description
|
|
130
126
|
CREATE (attr_desc:Attribute { name: "description", branch_support: $branch_support })
|
|
131
127
|
MERGE (av_desc:AttributeValue { value: "Credential for " + git_name_value.value, is_default: true })
|
|
132
128
|
CREATE (attr_desc)<-[:HAS_ATTRIBUTE $rel_props_new ]-(cred)
|
|
133
129
|
CREATE (attr_desc)-[:HAS_VALUE $rel_props_new ]->(av_desc)
|
|
134
130
|
CREATE (attr_desc)-[:IS_PROTECTED $rel_props_new]->(is_protected_value)
|
|
135
|
-
CREATE (attr_desc)-[:IS_VISIBLE $rel_props_new]->(is_visible_value)
|
|
136
131
|
%(attr_name_guid)s
|
|
137
132
|
%(attr_label_guid)s
|
|
138
133
|
%(attr_desc_guid)s
|
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING, Sequence
|
|
4
4
|
|
|
5
5
|
from infrahub.core import registry
|
|
6
|
+
from infrahub.core.constants import SYSTEM_USER_ID
|
|
6
7
|
from infrahub.core.migrations.shared import MigrationResult
|
|
7
8
|
from infrahub.core.schema.definitions.core import core_profile_schema_definition
|
|
8
9
|
from infrahub.core.schema.manager import SchemaManager
|
|
@@ -26,7 +27,7 @@ class Migration017(InternalSchemaMigration):
|
|
|
26
27
|
|
|
27
28
|
return result
|
|
28
29
|
|
|
29
|
-
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
30
|
+
async def execute(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> MigrationResult:
|
|
30
31
|
"""
|
|
31
32
|
Load CoreProfile schema node in db.
|
|
32
33
|
"""
|
|
@@ -35,6 +36,8 @@ class Migration017(InternalSchemaMigration):
|
|
|
35
36
|
manager.set_schema_branch(name=default_branch.name, schema=self.get_internal_schema())
|
|
36
37
|
|
|
37
38
|
db.add_schema(manager.get_schema_branch(default_branch.name))
|
|
38
|
-
await manager.load_node_to_db(
|
|
39
|
+
await manager.load_node_to_db(
|
|
40
|
+
node=core_profile_schema_definition, db=db, branch=default_branch, user_id=user_id
|
|
41
|
+
)
|
|
39
42
|
|
|
40
43
|
return MigrationResult()
|
|
@@ -4,6 +4,7 @@ from collections import defaultdict
|
|
|
4
4
|
from typing import TYPE_CHECKING, Sequence
|
|
5
5
|
|
|
6
6
|
from infrahub.core import registry
|
|
7
|
+
from infrahub.core.constants import SYSTEM_USER_ID
|
|
7
8
|
from infrahub.core.diff.payload_builder import get_display_labels_per_kind
|
|
8
9
|
from infrahub.core.migrations.shared import MigrationResult
|
|
9
10
|
from infrahub.core.schema import GenericSchema, NodeSchema, SchemaRoot, internal_schema
|
|
@@ -95,5 +96,5 @@ class Migration018(InternalSchemaMigration):
|
|
|
95
96
|
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
96
97
|
return MigrationResult()
|
|
97
98
|
|
|
98
|
-
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
99
|
+
async def execute(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> MigrationResult: # noqa: ARG002
|
|
99
100
|
return await validate_nulls_in_uniqueness_constraints(db=db)
|
|
@@ -169,11 +169,6 @@ class DeleteNodesRelsQuery(Query):
|
|
|
169
169
|
MERGE (rel)-[:IS_PROTECTED {status: "deleted", branch: branch, branch_level: branch_level, from: deleted_time}]->(peer_2)
|
|
170
170
|
}
|
|
171
171
|
|
|
172
|
-
CALL (rel, peer_2, branch, branch_level, deleted_time) {
|
|
173
|
-
MATCH (rel)-[:IS_VISIBLE]->(peer_2)
|
|
174
|
-
MERGE (rel)-[:IS_VISIBLE {status: "deleted", branch: branch, branch_level: branch_level, from: deleted_time}]->(peer_2)
|
|
175
|
-
}
|
|
176
|
-
|
|
177
172
|
CALL (rel, peer_2, branch, branch_level, deleted_time) {
|
|
178
173
|
MATCH (rel)-[:HAS_OWNER]->(peer_2)
|
|
179
174
|
MERGE (rel)-[:HAS_OWNER {status: "deleted", branch: branch, branch_level: branch_level, from: deleted_time}]->(peer_2)
|
|
@@ -194,11 +189,6 @@ class DeleteNodesRelsQuery(Query):
|
|
|
194
189
|
MERGE (rel)<-[:IS_PROTECTED {status: "deleted", branch: branch, branch_level: branch_level, from: deleted_time}]-(peer_2)
|
|
195
190
|
}
|
|
196
191
|
|
|
197
|
-
CALL (rel, peer_2, branch, branch_level, deleted_time) {
|
|
198
|
-
MATCH (rel)<-[:IS_VISIBLE]-(peer_2)
|
|
199
|
-
MERGE (rel)<-[:IS_VISIBLE {status: "deleted", branch: branch, branch_level: branch_level, from: deleted_time}]-(peer_2)
|
|
200
|
-
}
|
|
201
|
-
|
|
202
192
|
CALL (rel, peer_2, branch, branch_level, deleted_time) {
|
|
203
193
|
MATCH (rel)<-[:HAS_OWNER]-(peer_2)
|
|
204
194
|
MERGE (rel)<-[:HAS_OWNER {status: "deleted", branch: branch, branch_level: branch_level, from: deleted_time}]-(peer_2)
|
|
@@ -108,13 +108,6 @@ CALL (a, branch, status, from, to, b, e_id_to_keep) {
|
|
|
108
108
|
self.add_to_query(query)
|
|
109
109
|
|
|
110
110
|
|
|
111
|
-
class DeleteDuplicateIsVisibleEdgesQuery(DeleteDuplicateBooleanEdgesQuery):
|
|
112
|
-
name = "delete_duplicate_is_visible_edges"
|
|
113
|
-
type = QueryType.WRITE
|
|
114
|
-
insert_return = False
|
|
115
|
-
edge_type = DatabaseEdgeType.IS_VISIBLE
|
|
116
|
-
|
|
117
|
-
|
|
118
111
|
class DeleteDuplicateIsProtectedEdgesQuery(DeleteDuplicateBooleanEdgesQuery):
|
|
119
112
|
name = "delete_duplicate_is_protected_edges"
|
|
120
113
|
type = QueryType.WRITE
|
|
@@ -144,7 +137,6 @@ class Migration020(GraphMigration):
|
|
|
144
137
|
minimum_version: int = 19
|
|
145
138
|
queries: Sequence[type[Query]] = [
|
|
146
139
|
DeleteDuplicateHasValueEdgesQuery,
|
|
147
|
-
DeleteDuplicateIsVisibleEdgesQuery,
|
|
148
140
|
DeleteDuplicateIsProtectedEdgesQuery,
|
|
149
141
|
]
|
|
150
142
|
|
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING, Sequence
|
|
4
4
|
|
|
5
|
+
from infrahub.core.constants import SYSTEM_USER_ID
|
|
5
6
|
from infrahub.core.migrations.shared import MigrationResult
|
|
6
7
|
from infrahub.log import get_logger
|
|
7
8
|
|
|
@@ -22,5 +23,5 @@ class Migration025(InternalSchemaMigration):
|
|
|
22
23
|
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
23
24
|
return MigrationResult()
|
|
24
25
|
|
|
25
|
-
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
26
|
+
async def execute(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> MigrationResult: # noqa: ARG002
|
|
26
27
|
return await validate_nulls_in_uniqueness_constraints(db=db)
|
|
@@ -4,6 +4,7 @@ import ipaddress
|
|
|
4
4
|
from typing import TYPE_CHECKING, Sequence
|
|
5
5
|
|
|
6
6
|
from infrahub.core.branch.models import Branch
|
|
7
|
+
from infrahub.core.constants import SYSTEM_USER_ID
|
|
7
8
|
from infrahub.core.initialization import initialization
|
|
8
9
|
from infrahub.core.ipam.reconciler import IpamReconciler
|
|
9
10
|
from infrahub.core.manager import NodeManager
|
|
@@ -28,7 +29,7 @@ class Migration026(InternalSchemaMigration):
|
|
|
28
29
|
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
29
30
|
return MigrationResult()
|
|
30
31
|
|
|
31
|
-
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
32
|
+
async def execute(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> MigrationResult: # noqa: ARG002
|
|
32
33
|
# load schemas from database into registry
|
|
33
34
|
initialize_lock()
|
|
34
35
|
await initialization(db=db)
|
|
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING, Sequence
|
|
|
5
5
|
from infrahub import config
|
|
6
6
|
from infrahub.core import registry
|
|
7
7
|
from infrahub.core.branch import Branch
|
|
8
|
-
from infrahub.core.constants import SchemaPathType
|
|
8
|
+
from infrahub.core.constants import SYSTEM_USER_ID, SchemaPathType
|
|
9
9
|
from infrahub.core.initialization import initialization
|
|
10
10
|
from infrahub.core.migrations.shared import InternalSchemaMigration, MigrationResult, SchemaMigration
|
|
11
11
|
from infrahub.core.path import SchemaPath
|
|
@@ -35,7 +35,7 @@ class Migration031(InternalSchemaMigration):
|
|
|
35
35
|
minimum_version: int = 30
|
|
36
36
|
migrations: Sequence[SchemaMigration] = []
|
|
37
37
|
|
|
38
|
-
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
38
|
+
async def execute(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> MigrationResult: # noqa: ARG002
|
|
39
39
|
"""Retrieve all number attributes that have a min/max/excluded_values
|
|
40
40
|
For any of these attributes, check if corresponding existing nodes are valid."""
|
|
41
41
|
|
|
@@ -4,6 +4,7 @@ import ipaddress
|
|
|
4
4
|
from typing import TYPE_CHECKING, Sequence
|
|
5
5
|
|
|
6
6
|
from infrahub.core.branch.models import Branch
|
|
7
|
+
from infrahub.core.constants import SYSTEM_USER_ID
|
|
7
8
|
from infrahub.core.initialization import initialization
|
|
8
9
|
from infrahub.core.ipam.reconciler import IpamReconciler
|
|
9
10
|
from infrahub.core.manager import NodeManager
|
|
@@ -37,7 +38,7 @@ class Migration038(InternalSchemaMigration):
|
|
|
37
38
|
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
38
39
|
return MigrationResult()
|
|
39
40
|
|
|
40
|
-
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
41
|
+
async def execute(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> MigrationResult: # noqa: ARG002
|
|
41
42
|
# load schemas from database into registry
|
|
42
43
|
initialize_lock()
|
|
43
44
|
await initialization(db=db)
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Sequence
|
|
4
|
+
|
|
5
|
+
from infrahub.core.migrations.shared import MigrationResult
|
|
6
|
+
from infrahub.core.query import Query, QueryType
|
|
7
|
+
|
|
8
|
+
from ..shared import GraphMigration
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from infrahub.database import InfrahubDatabase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class RemoveIsVisibleRelationshipQuery(Query):
|
|
15
|
+
name = "remove_is_visible_relationship"
|
|
16
|
+
type: QueryType = QueryType.WRITE
|
|
17
|
+
insert_return = False
|
|
18
|
+
|
|
19
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
|
|
20
|
+
query = """
|
|
21
|
+
MATCH ()-[rel:IS_VISIBLE]->()
|
|
22
|
+
CALL (rel) {
|
|
23
|
+
DELETE rel
|
|
24
|
+
} IN TRANSACTIONS
|
|
25
|
+
"""
|
|
26
|
+
self.add_to_query(query)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Migration049(GraphMigration):
|
|
30
|
+
name: str = "049_remove_is_visible_relationship"
|
|
31
|
+
minimum_version: int = 48
|
|
32
|
+
queries: Sequence[type[Query]] = [RemoveIsVisibleRelationshipQuery]
|
|
33
|
+
|
|
34
|
+
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
35
|
+
return MigrationResult()
|
|
36
|
+
|
|
37
|
+
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
38
|
+
return await self.do_execute(db=db)
|