infrahub-server 1.4.10__py3-none-any.whl → 1.5.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +3 -0
- infrahub/auth.py +5 -5
- infrahub/cli/db.py +26 -2
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +25 -22
- infrahub/core/branch/models.py +2 -2
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +4 -3
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +2 -2
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +9 -84
- infrahub/core/migrations/graph/__init__.py +6 -0
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +166 -0
- infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
- infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
- infrahub/core/migrations/schema/node_attribute_add.py +5 -2
- infrahub/core/migrations/shared.py +5 -6
- infrahub/core/node/__init__.py +165 -42
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/create.py +67 -35
- infrahub/core/node/lock_utils.py +98 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +27 -15
- infrahub/core/query/node.py +61 -185
- infrahub/core/query/relationship.py +43 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +60 -20
- infrahub/core/schema/attribute_schema.py +0 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/internal.py +14 -1
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +137 -2
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +186 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +38 -1
- infrahub/git/integrator.py +22 -14
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +2 -2
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +212 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +11 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +34 -13
- infrahub/graphql/mutations/display_label.py +111 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +118 -0
- infrahub/graphql/mutations/ipam.py +21 -8
- infrahub/graphql/mutations/main.py +37 -153
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +2 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +185 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +67 -30
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/middleware.py +26 -1
- infrahub/patch/plan_writer.py +2 -2
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +99 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +10 -1
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +16 -3
- infrahub/services/__init__.py +8 -5
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/tasks.py +3 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workflows/catalogue.py +110 -3
- infrahub/workflows/initialization.py +16 -0
- infrahub/workflows/models.py +17 -2
- infrahub_sdk/branch.py +5 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +364 -84
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +18 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +18 -3
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +38 -0
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/object.py +120 -7
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +9 -11
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +177 -134
- infrahub_testcontainers/container.py +1 -1
- infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
infrahub/actions/tasks.py
CHANGED
|
@@ -1,17 +1,114 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from collections import defaultdict
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
|
+
|
|
6
|
+
from infrahub_sdk.graphql import Mutation, Query
|
|
7
|
+
from infrahub_sdk.types import Order
|
|
4
8
|
from prefect import flow
|
|
5
9
|
|
|
6
10
|
from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
|
|
11
|
+
from infrahub.core.constants import InfrahubKind
|
|
12
|
+
from infrahub.generators.models import (
|
|
13
|
+
GeneratorDefinitionModel,
|
|
14
|
+
RequestGeneratorRun,
|
|
15
|
+
)
|
|
7
16
|
from infrahub.services import InfrahubServices # noqa: TC001 needed for prefect flow
|
|
8
17
|
from infrahub.trigger.models import TriggerType
|
|
9
18
|
from infrahub.trigger.setup import setup_triggers_specific
|
|
19
|
+
from infrahub.workers.dependencies import get_client, get_workflow
|
|
20
|
+
from infrahub.workflows.catalogue import REQUEST_GENERATOR_RUN
|
|
10
21
|
from infrahub.workflows.utils import add_tags
|
|
11
22
|
|
|
12
23
|
from .gather import gather_trigger_action_rules
|
|
13
24
|
from .models import EventGroupMember # noqa: TC001 needed for prefect flow
|
|
14
25
|
|
|
26
|
+
if TYPE_CHECKING:
|
|
27
|
+
from infrahub_sdk.client import InfrahubClient
|
|
28
|
+
from infrahub_sdk.node import InfrahubNode
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_generator_run_query(definition_id: str, target_ids: list[str]) -> Query:
|
|
32
|
+
return Query(
|
|
33
|
+
name=InfrahubKind.GENERATORDEFINITION,
|
|
34
|
+
query={
|
|
35
|
+
InfrahubKind.GENERATORDEFINITION: {
|
|
36
|
+
"@filters": {
|
|
37
|
+
"ids": [definition_id],
|
|
38
|
+
},
|
|
39
|
+
"edges": {
|
|
40
|
+
"node": {
|
|
41
|
+
"id": None,
|
|
42
|
+
"name": {
|
|
43
|
+
"value": None,
|
|
44
|
+
},
|
|
45
|
+
"class_name": {
|
|
46
|
+
"value": None,
|
|
47
|
+
},
|
|
48
|
+
"file_path": {
|
|
49
|
+
"value": None,
|
|
50
|
+
},
|
|
51
|
+
"query": {
|
|
52
|
+
"node": {
|
|
53
|
+
"name": {
|
|
54
|
+
"value": None,
|
|
55
|
+
},
|
|
56
|
+
},
|
|
57
|
+
},
|
|
58
|
+
"convert_query_response": {
|
|
59
|
+
"value": None,
|
|
60
|
+
},
|
|
61
|
+
"parameters": {
|
|
62
|
+
"value": None,
|
|
63
|
+
},
|
|
64
|
+
"execute_in_proposed_change": {
|
|
65
|
+
"value": None,
|
|
66
|
+
},
|
|
67
|
+
"execute_after_merge": {
|
|
68
|
+
"value": None,
|
|
69
|
+
},
|
|
70
|
+
"targets": {
|
|
71
|
+
"node": {
|
|
72
|
+
"id": None,
|
|
73
|
+
"members": {
|
|
74
|
+
"@filters": {
|
|
75
|
+
"ids": target_ids,
|
|
76
|
+
},
|
|
77
|
+
"edges": {
|
|
78
|
+
"node": {
|
|
79
|
+
"__typename": None,
|
|
80
|
+
"id": None,
|
|
81
|
+
"display_label": None,
|
|
82
|
+
},
|
|
83
|
+
},
|
|
84
|
+
},
|
|
85
|
+
},
|
|
86
|
+
},
|
|
87
|
+
"repository": {
|
|
88
|
+
"node": {
|
|
89
|
+
"__typename": None,
|
|
90
|
+
"id": None,
|
|
91
|
+
"name": {
|
|
92
|
+
"value": None,
|
|
93
|
+
},
|
|
94
|
+
f"... on {InfrahubKind.REPOSITORY}": {
|
|
95
|
+
"commit": {
|
|
96
|
+
"value": None,
|
|
97
|
+
},
|
|
98
|
+
},
|
|
99
|
+
f"... on {InfrahubKind.READONLYREPOSITORY}": {
|
|
100
|
+
"commit": {
|
|
101
|
+
"value": None,
|
|
102
|
+
},
|
|
103
|
+
},
|
|
104
|
+
},
|
|
105
|
+
},
|
|
106
|
+
},
|
|
107
|
+
},
|
|
108
|
+
},
|
|
109
|
+
},
|
|
110
|
+
)
|
|
111
|
+
|
|
15
112
|
|
|
16
113
|
@flow(
|
|
17
114
|
name="action-add-node-to-group",
|
|
@@ -65,12 +162,19 @@ async def run_generator(
|
|
|
65
162
|
branch_name: str,
|
|
66
163
|
node_ids: list[str],
|
|
67
164
|
generator_definition_id: str,
|
|
68
|
-
context: InfrahubContext,
|
|
69
|
-
service: InfrahubServices,
|
|
165
|
+
context: InfrahubContext,
|
|
166
|
+
service: InfrahubServices, # noqa: ARG001
|
|
70
167
|
) -> None:
|
|
71
168
|
await add_tags(branches=[branch_name], nodes=node_ids + [generator_definition_id])
|
|
72
|
-
|
|
73
|
-
|
|
169
|
+
|
|
170
|
+
client = get_client()
|
|
171
|
+
|
|
172
|
+
await _run_generators(
|
|
173
|
+
branch_name=branch_name,
|
|
174
|
+
node_ids=node_ids,
|
|
175
|
+
generator_definition_id=generator_definition_id,
|
|
176
|
+
client=client,
|
|
177
|
+
context=context,
|
|
74
178
|
)
|
|
75
179
|
|
|
76
180
|
|
|
@@ -82,13 +186,20 @@ async def run_generator_group_event(
|
|
|
82
186
|
branch_name: str,
|
|
83
187
|
members: list[EventGroupMember],
|
|
84
188
|
generator_definition_id: str,
|
|
85
|
-
context: InfrahubContext,
|
|
86
|
-
service: InfrahubServices,
|
|
189
|
+
context: InfrahubContext,
|
|
190
|
+
service: InfrahubServices, # noqa: ARG001
|
|
87
191
|
) -> None:
|
|
88
192
|
node_ids = [node.id for node in members]
|
|
89
193
|
await add_tags(branches=[branch_name], nodes=node_ids + [generator_definition_id])
|
|
90
|
-
|
|
91
|
-
|
|
194
|
+
|
|
195
|
+
client = get_client()
|
|
196
|
+
|
|
197
|
+
await _run_generators(
|
|
198
|
+
branch_name=branch_name,
|
|
199
|
+
node_ids=node_ids,
|
|
200
|
+
generator_definition_id=generator_definition_id,
|
|
201
|
+
client=client,
|
|
202
|
+
context=context,
|
|
92
203
|
)
|
|
93
204
|
|
|
94
205
|
|
|
@@ -104,16 +215,97 @@ async def configure_action_rules(
|
|
|
104
215
|
) # type: ignore[misc]
|
|
105
216
|
|
|
106
217
|
|
|
107
|
-
async def
|
|
218
|
+
async def _get_targets(
|
|
219
|
+
branch_name: str,
|
|
220
|
+
targets: list[dict[str, Any]],
|
|
221
|
+
client: InfrahubClient,
|
|
222
|
+
) -> dict[str, dict[str, InfrahubNode]]:
|
|
223
|
+
"""Get the targets per kind in order to extract the variables."""
|
|
224
|
+
|
|
225
|
+
targets_per_kind: dict[str, dict[str, InfrahubNode]] = defaultdict(dict)
|
|
226
|
+
|
|
227
|
+
for target in targets:
|
|
228
|
+
targets_per_kind[target["node"]["__typename"]][target["node"]["id"]] = None
|
|
229
|
+
|
|
230
|
+
for kind, values in targets_per_kind.items():
|
|
231
|
+
nodes = await client.filters(
|
|
232
|
+
kind=kind, branch=branch_name, ids=list(values.keys()), populate_store=False, order=Order(disable=True)
|
|
233
|
+
)
|
|
234
|
+
for node in nodes:
|
|
235
|
+
targets_per_kind[kind][node.id] = node
|
|
236
|
+
|
|
237
|
+
return targets_per_kind
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
async def _run_generators(
|
|
108
241
|
branch_name: str,
|
|
109
242
|
node_ids: list[str],
|
|
110
243
|
generator_definition_id: str,
|
|
111
|
-
|
|
244
|
+
client: InfrahubClient,
|
|
245
|
+
context: InfrahubContext | None = None,
|
|
112
246
|
) -> None:
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
247
|
+
"""Fetch generator metadata and submit per-target runs.
|
|
248
|
+
|
|
249
|
+
Args:
|
|
250
|
+
branch_name: Branch on which to execute.
|
|
251
|
+
node_ids: Node IDs to run against (restricts selection if provided).
|
|
252
|
+
generator_definition_id: Generator definition to execute.
|
|
253
|
+
client: InfrahubClient to query additional data.
|
|
254
|
+
context: Execution context passed to downstream workflow submissions.
|
|
255
|
+
|
|
256
|
+
Returns:
|
|
257
|
+
None
|
|
258
|
+
|
|
259
|
+
Raises:
|
|
260
|
+
ValueError: If the generator definition is not found or none of the requested
|
|
261
|
+
targets are members of the target group.
|
|
262
|
+
"""
|
|
263
|
+
response = await client.execute_graphql(
|
|
264
|
+
query=get_generator_run_query(definition_id=generator_definition_id, target_ids=node_ids).render(),
|
|
265
|
+
branch_name=branch_name,
|
|
117
266
|
)
|
|
267
|
+
if not response[InfrahubKind.GENERATORDEFINITION]["edges"]:
|
|
268
|
+
raise ValueError(f"Generator definition {generator_definition_id} not found")
|
|
118
269
|
|
|
119
|
-
|
|
270
|
+
data = response[InfrahubKind.GENERATORDEFINITION]["edges"][0]["node"]
|
|
271
|
+
|
|
272
|
+
if not data["targets"]["node"]["members"]["edges"]:
|
|
273
|
+
raise ValueError(f"Target {node_ids[0]} is not part of the group {data['targets']['node']['id']}")
|
|
274
|
+
|
|
275
|
+
targets = data["targets"]["node"]["members"]["edges"]
|
|
276
|
+
|
|
277
|
+
targets_per_kind = await _get_targets(branch_name=branch_name, targets=targets, client=client)
|
|
278
|
+
|
|
279
|
+
workflow = get_workflow()
|
|
280
|
+
|
|
281
|
+
for target in targets:
|
|
282
|
+
node: InfrahubNode | None = None
|
|
283
|
+
if data["parameters"]["value"]:
|
|
284
|
+
node = targets_per_kind[target["node"]["__typename"]][target["node"]["id"]]
|
|
285
|
+
|
|
286
|
+
request_generator_run_model = RequestGeneratorRun(
|
|
287
|
+
generator_definition=GeneratorDefinitionModel(
|
|
288
|
+
definition_id=generator_definition_id,
|
|
289
|
+
definition_name=data["name"]["value"],
|
|
290
|
+
class_name=data["class_name"]["value"],
|
|
291
|
+
file_path=data["file_path"]["value"],
|
|
292
|
+
query_name=data["query"]["node"]["name"]["value"],
|
|
293
|
+
convert_query_response=data["convert_query_response"]["value"],
|
|
294
|
+
group_id=data["targets"]["node"]["id"],
|
|
295
|
+
parameters=data["parameters"]["value"],
|
|
296
|
+
execute_in_proposed_change=data["execute_in_proposed_change"]["value"],
|
|
297
|
+
execute_after_merge=data["execute_after_merge"]["value"],
|
|
298
|
+
),
|
|
299
|
+
commit=data["repository"]["node"]["commit"]["value"],
|
|
300
|
+
repository_id=data["repository"]["node"]["id"],
|
|
301
|
+
repository_name=data["repository"]["node"]["name"]["value"],
|
|
302
|
+
repository_kind=data["repository"]["node"]["__typename"],
|
|
303
|
+
branch_name=branch_name,
|
|
304
|
+
query=data["query"]["node"]["name"]["value"],
|
|
305
|
+
variables=await node.extract(params=data["parameters"]["value"]) if node else {},
|
|
306
|
+
target_id=target["node"]["id"],
|
|
307
|
+
target_name=target["node"]["display_label"],
|
|
308
|
+
)
|
|
309
|
+
await workflow.submit_workflow(
|
|
310
|
+
workflow=REQUEST_GENERATOR_RUN, context=context, parameters={"model": request_generator_run_model}
|
|
311
|
+
)
|
infrahub/api/artifact.py
CHANGED
|
@@ -15,6 +15,7 @@ from infrahub.api.dependencies import (
|
|
|
15
15
|
)
|
|
16
16
|
from infrahub.core import registry
|
|
17
17
|
from infrahub.core.account import ObjectPermission
|
|
18
|
+
from infrahub.core.branch.needs_rebase_status import check_need_rebase_status
|
|
18
19
|
from infrahub.core.constants import GLOBAL_BRANCH_NAME, InfrahubKind, PermissionAction
|
|
19
20
|
from infrahub.core.protocols import CoreArtifactDefinition
|
|
20
21
|
from infrahub.database import InfrahubDatabase # noqa: TC001
|
|
@@ -74,6 +75,8 @@ async def generate_artifact(
|
|
|
74
75
|
permission_manager: PermissionManager = Depends(get_permission_manager),
|
|
75
76
|
context: InfrahubContext = Depends(get_context),
|
|
76
77
|
) -> None:
|
|
78
|
+
check_need_rebase_status(branch_params.branch)
|
|
79
|
+
|
|
77
80
|
permission_decision = (
|
|
78
81
|
PermissionDecisionFlag.ALLOW_DEFAULT
|
|
79
82
|
if branch_params.branch.name in (GLOBAL_BRANCH_NAME, registry.default_branch)
|
infrahub/api/diff/diff.py
CHANGED
|
@@ -52,7 +52,7 @@ async def get_diff_files(
|
|
|
52
52
|
for branch_name, items in diff_files.items():
|
|
53
53
|
for item in items:
|
|
54
54
|
repository_id = item.repository.get_id()
|
|
55
|
-
display_label = await item.repository.
|
|
55
|
+
display_label = await item.repository.get_display_label(db=db)
|
|
56
56
|
if repository_id not in response[branch_name]:
|
|
57
57
|
response[branch_name][repository_id] = BranchDiffRepository(
|
|
58
58
|
id=repository_id,
|
infrahub/api/query.py
CHANGED
|
@@ -24,6 +24,7 @@ from infrahub.graphql.metrics import (
|
|
|
24
24
|
GRAPHQL_RESPONSE_SIZE_METRICS,
|
|
25
25
|
GRAPHQL_TOP_LEVEL_QUERIES_METRICS,
|
|
26
26
|
)
|
|
27
|
+
from infrahub.graphql.middleware import raise_on_mutation_on_branch_needing_rebase
|
|
27
28
|
from infrahub.graphql.utils import extract_data
|
|
28
29
|
from infrahub.groups.models import RequestGraphQLQueryGroupUpdate
|
|
29
30
|
from infrahub.log import get_logger
|
|
@@ -98,6 +99,7 @@ async def execute_query(
|
|
|
98
99
|
context_value=gql_params.context,
|
|
99
100
|
root_value=None,
|
|
100
101
|
variable_values=params,
|
|
102
|
+
middleware=[raise_on_mutation_on_branch_needing_rebase],
|
|
101
103
|
)
|
|
102
104
|
|
|
103
105
|
data = extract_data(query_name=gql_query.name.value, result=result)
|
infrahub/api/schema.py
CHANGED
|
@@ -18,6 +18,7 @@ from infrahub.api.exceptions import SchemaNotValidError
|
|
|
18
18
|
from infrahub.core import registry
|
|
19
19
|
from infrahub.core.account import GlobalPermission
|
|
20
20
|
from infrahub.core.branch import Branch # noqa: TC001
|
|
21
|
+
from infrahub.core.branch.needs_rebase_status import check_need_rebase_status
|
|
21
22
|
from infrahub.core.constants import GLOBAL_BRANCH_NAME, GlobalPermissions, PermissionDecision
|
|
22
23
|
from infrahub.core.migrations.schema.models import SchemaApplyMigrationData
|
|
23
24
|
from infrahub.core.models import ( # noqa: TC001
|
|
@@ -287,6 +288,8 @@ async def load_schema(
|
|
|
287
288
|
permission_manager: PermissionManager = Depends(get_permission_manager),
|
|
288
289
|
context: InfrahubContext = Depends(get_context),
|
|
289
290
|
) -> SchemaUpdate:
|
|
291
|
+
check_need_rebase_status(branch)
|
|
292
|
+
|
|
290
293
|
permission_manager.raise_for_permission(
|
|
291
294
|
permission=define_global_permission_from_branch(
|
|
292
295
|
permission=GlobalPermissions.MANAGE_SCHEMA, branch_name=branch.name
|
infrahub/auth.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import uuid
|
|
4
|
-
from datetime import datetime, timedelta
|
|
4
|
+
from datetime import UTC, datetime, timedelta
|
|
5
5
|
from enum import Enum
|
|
6
6
|
from typing import TYPE_CHECKING
|
|
7
7
|
|
|
@@ -78,7 +78,7 @@ async def authenticate_with_password(
|
|
|
78
78
|
if not valid_credentials:
|
|
79
79
|
raise AuthorizationError("Incorrect password")
|
|
80
80
|
|
|
81
|
-
now = datetime.now(tz=
|
|
81
|
+
now = datetime.now(tz=UTC)
|
|
82
82
|
refresh_expires = now + timedelta(seconds=config.SETTINGS.security.refresh_token_lifetime)
|
|
83
83
|
|
|
84
84
|
session_id = await create_db_refresh_token(db=db, account_id=account.id, expiration=refresh_expires)
|
|
@@ -139,7 +139,7 @@ async def signin_sso_account(db: InfrahubDatabase, account_name: str, sso_groups
|
|
|
139
139
|
await group.members.add(db=db, data=account)
|
|
140
140
|
await group.members.save(db=db)
|
|
141
141
|
|
|
142
|
-
now = datetime.now(tz=
|
|
142
|
+
now = datetime.now(tz=UTC)
|
|
143
143
|
refresh_expires = now + timedelta(seconds=config.SETTINGS.security.refresh_token_lifetime)
|
|
144
144
|
session_id = await create_db_refresh_token(db=db, account_id=account.id, expiration=refresh_expires)
|
|
145
145
|
access_token = generate_access_token(account_id=account.id, session_id=session_id)
|
|
@@ -148,7 +148,7 @@ async def signin_sso_account(db: InfrahubDatabase, account_name: str, sso_groups
|
|
|
148
148
|
|
|
149
149
|
|
|
150
150
|
def generate_access_token(account_id: str, session_id: uuid.UUID) -> str:
|
|
151
|
-
now = datetime.now(tz=
|
|
151
|
+
now = datetime.now(tz=UTC)
|
|
152
152
|
|
|
153
153
|
access_expires = now + timedelta(seconds=config.SETTINGS.security.access_token_lifetime)
|
|
154
154
|
access_data = {
|
|
@@ -165,7 +165,7 @@ def generate_access_token(account_id: str, session_id: uuid.UUID) -> str:
|
|
|
165
165
|
|
|
166
166
|
|
|
167
167
|
def generate_refresh_token(account_id: str, session_id: uuid.UUID, expiration: datetime) -> str:
|
|
168
|
-
now = datetime.now(tz=
|
|
168
|
+
now = datetime.now(tz=UTC)
|
|
169
169
|
|
|
170
170
|
refresh_data = {
|
|
171
171
|
"sub": account_id,
|
infrahub/cli/db.py
CHANGED
|
@@ -5,7 +5,7 @@ import logging
|
|
|
5
5
|
import os
|
|
6
6
|
from collections import defaultdict
|
|
7
7
|
from csv import DictReader, DictWriter
|
|
8
|
-
from datetime import
|
|
8
|
+
from datetime import UTC, datetime
|
|
9
9
|
from enum import Enum
|
|
10
10
|
from pathlib import Path
|
|
11
11
|
from typing import TYPE_CHECKING, Any, Sequence
|
|
@@ -54,12 +54,13 @@ from infrahub.log import get_logger
|
|
|
54
54
|
|
|
55
55
|
from .constants import ERROR_BADGE, FAILED_BADGE, SUCCESS_BADGE
|
|
56
56
|
from .db_commands.check_inheritance import check_inheritance
|
|
57
|
+
from .db_commands.clean_duplicate_schema_fields import clean_duplicate_schema_fields
|
|
57
58
|
from .patch import patch_app
|
|
58
59
|
|
|
59
60
|
|
|
60
61
|
def get_timestamp_string() -> str:
|
|
61
62
|
"""Generate a timestamp string in the format YYYYMMDD-HHMMSS."""
|
|
62
|
-
return datetime.now(tz=
|
|
63
|
+
return datetime.now(tz=UTC).strftime("%Y%m%d-%H%M%S")
|
|
63
64
|
|
|
64
65
|
|
|
65
66
|
if TYPE_CHECKING:
|
|
@@ -200,6 +201,29 @@ async def check_inheritance_cmd(
|
|
|
200
201
|
await dbdriver.close()
|
|
201
202
|
|
|
202
203
|
|
|
204
|
+
@app.command(name="check-duplicate-schema-fields")
|
|
205
|
+
async def check_duplicate_schema_fields_cmd(
|
|
206
|
+
ctx: typer.Context,
|
|
207
|
+
fix: bool = typer.Option(False, help="Fix the duplicate schema fields on the default branch."),
|
|
208
|
+
config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
|
|
209
|
+
) -> None:
|
|
210
|
+
"""Check for any duplicate schema attributes or relationships on the default branch"""
|
|
211
|
+
logging.getLogger("infrahub").setLevel(logging.WARNING)
|
|
212
|
+
logging.getLogger("neo4j").setLevel(logging.ERROR)
|
|
213
|
+
logging.getLogger("prefect").setLevel(logging.ERROR)
|
|
214
|
+
|
|
215
|
+
config.load_and_exit(config_file_name=config_file)
|
|
216
|
+
|
|
217
|
+
context: CliContext = ctx.obj
|
|
218
|
+
dbdriver = await context.init_db(retry=1)
|
|
219
|
+
|
|
220
|
+
success = await clean_duplicate_schema_fields(db=dbdriver, fix=fix)
|
|
221
|
+
if not success:
|
|
222
|
+
raise typer.Exit(code=1)
|
|
223
|
+
|
|
224
|
+
await dbdriver.close()
|
|
225
|
+
|
|
226
|
+
|
|
203
227
|
@app.command(name="update-core-schema")
|
|
204
228
|
async def update_core_schema_cmd(
|
|
205
229
|
ctx: typer.Context,
|
|
@@ -0,0 +1,212 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from rich import print as rprint
|
|
6
|
+
from rich.console import Console
|
|
7
|
+
from rich.table import Table
|
|
8
|
+
|
|
9
|
+
from infrahub.cli.constants import FAILED_BADGE, SUCCESS_BADGE
|
|
10
|
+
from infrahub.core.query import Query, QueryType
|
|
11
|
+
from infrahub.database import InfrahubDatabase
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class SchemaFieldType(str, Enum):
|
|
15
|
+
ATTRIBUTE = "attribute"
|
|
16
|
+
RELATIONSHIP = "relationship"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@dataclass
|
|
20
|
+
class SchemaFieldDetails:
|
|
21
|
+
schema_kind: str
|
|
22
|
+
schema_uuid: str
|
|
23
|
+
field_type: SchemaFieldType
|
|
24
|
+
field_name: str
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class DuplicateSchemaFields(Query):
|
|
28
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
|
|
29
|
+
query = """
|
|
30
|
+
MATCH (root:Root)
|
|
31
|
+
LIMIT 1
|
|
32
|
+
WITH root.default_branch AS default_branch
|
|
33
|
+
MATCH (field:SchemaAttribute|SchemaRelationship)
|
|
34
|
+
CALL (default_branch, field) {
|
|
35
|
+
MATCH (field)-[is_part_of:IS_PART_OF]->(:Root)
|
|
36
|
+
WHERE is_part_of.branch = default_branch
|
|
37
|
+
ORDER BY is_part_of.from DESC
|
|
38
|
+
RETURN is_part_of
|
|
39
|
+
LIMIT 1
|
|
40
|
+
}
|
|
41
|
+
WITH default_branch, field, CASE
|
|
42
|
+
WHEN is_part_of.status = "active" AND is_part_of.to IS NULL THEN is_part_of.from
|
|
43
|
+
ELSE NULL
|
|
44
|
+
END AS active_from
|
|
45
|
+
WHERE active_from IS NOT NULL
|
|
46
|
+
WITH default_branch, field, active_from, "SchemaAttribute" IN labels(field) AS is_attribute
|
|
47
|
+
CALL (field, default_branch) {
|
|
48
|
+
MATCH (field)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
|
|
49
|
+
WHERE r1.branch = default_branch AND r2.branch = default_branch
|
|
50
|
+
AND r1.status = "active" AND r2.status = "active"
|
|
51
|
+
AND r1.to IS NULL AND r2.to IS NULL
|
|
52
|
+
ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
|
|
53
|
+
LIMIT 1
|
|
54
|
+
RETURN name_value.value AS field_name
|
|
55
|
+
}
|
|
56
|
+
CALL (field, default_branch) {
|
|
57
|
+
MATCH (field)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(peer:SchemaNode|SchemaGeneric)
|
|
58
|
+
WHERE rel.name IN ["schema__node__relationships", "schema__node__attributes"]
|
|
59
|
+
AND r1.branch = default_branch AND r2.branch = default_branch
|
|
60
|
+
AND r1.status = "active" AND r2.status = "active"
|
|
61
|
+
AND r1.to IS NULL AND r2.to IS NULL
|
|
62
|
+
ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
|
|
63
|
+
LIMIT 1
|
|
64
|
+
RETURN peer AS schema_vertex
|
|
65
|
+
}
|
|
66
|
+
WITH default_branch, field, field_name, is_attribute, active_from, schema_vertex
|
|
67
|
+
ORDER BY active_from DESC
|
|
68
|
+
WITH default_branch, field_name, is_attribute, schema_vertex, collect(field) AS fields_reverse_chron
|
|
69
|
+
WHERE size(fields_reverse_chron) > 1
|
|
70
|
+
"""
|
|
71
|
+
self.add_to_query(query)
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class GetDuplicateSchemaFields(DuplicateSchemaFields):
|
|
75
|
+
"""
|
|
76
|
+
Get the kind, field type, and field name for any duplicated attributes or relationships on a given schema
|
|
77
|
+
on the default branch
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
name = "get_duplicate_schema_fields"
|
|
81
|
+
type = QueryType.READ
|
|
82
|
+
insert_return = False
|
|
83
|
+
|
|
84
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None:
|
|
85
|
+
await super().query_init(db=db, **kwargs)
|
|
86
|
+
query = """
|
|
87
|
+
CALL (schema_vertex, default_branch) {
|
|
88
|
+
MATCH (schema_vertex)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "namespace"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
|
|
89
|
+
WHERE r1.branch = default_branch AND r2.branch = default_branch
|
|
90
|
+
ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
|
|
91
|
+
LIMIT 1
|
|
92
|
+
RETURN name_value.value AS schema_namespace
|
|
93
|
+
}
|
|
94
|
+
CALL (schema_vertex, default_branch) {
|
|
95
|
+
MATCH (schema_vertex)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
|
|
96
|
+
WHERE r1.branch = default_branch AND r2.branch = default_branch
|
|
97
|
+
ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
|
|
98
|
+
LIMIT 1
|
|
99
|
+
RETURN name_value.value AS schema_name
|
|
100
|
+
}
|
|
101
|
+
RETURN schema_namespace + schema_name AS schema_kind, schema_vertex.uuid AS schema_uuid, field_name, is_attribute
|
|
102
|
+
ORDER BY schema_kind ASC, is_attribute DESC, field_name ASC
|
|
103
|
+
"""
|
|
104
|
+
self.return_labels = ["schema_kind", "schema_uuid", "field_name", "is_attribute"]
|
|
105
|
+
self.add_to_query(query)
|
|
106
|
+
|
|
107
|
+
def get_schema_field_details(self) -> list[SchemaFieldDetails]:
|
|
108
|
+
schema_field_details: list[SchemaFieldDetails] = []
|
|
109
|
+
for result in self.results:
|
|
110
|
+
schema_kind = result.get_as_type(label="schema_kind", return_type=str)
|
|
111
|
+
schema_uuid = result.get_as_type(label="schema_uuid", return_type=str)
|
|
112
|
+
field_name = result.get_as_type(label="field_name", return_type=str)
|
|
113
|
+
is_attribute = result.get_as_type(label="is_attribute", return_type=bool)
|
|
114
|
+
schema_field_details.append(
|
|
115
|
+
SchemaFieldDetails(
|
|
116
|
+
schema_kind=schema_kind,
|
|
117
|
+
schema_uuid=schema_uuid,
|
|
118
|
+
field_name=field_name,
|
|
119
|
+
field_type=SchemaFieldType.ATTRIBUTE if is_attribute else SchemaFieldType.RELATIONSHIP,
|
|
120
|
+
)
|
|
121
|
+
)
|
|
122
|
+
return schema_field_details
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
class FixDuplicateSchemaFields(DuplicateSchemaFields):
|
|
126
|
+
"""
|
|
127
|
+
Fix the duplicate schema fields by hard deleting the earlier duplicate(s)
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
name = "fix_duplicate_schema_fields"
|
|
131
|
+
type = QueryType.WRITE
|
|
132
|
+
insert_return = False
|
|
133
|
+
|
|
134
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None:
|
|
135
|
+
await super().query_init(db=db, **kwargs)
|
|
136
|
+
query = """
|
|
137
|
+
WITH default_branch, tail(fields_reverse_chron) AS fields_to_delete
|
|
138
|
+
UNWIND fields_to_delete AS field_to_delete
|
|
139
|
+
CALL (field_to_delete, default_branch) {
|
|
140
|
+
MATCH (field_to_delete)-[r:IS_PART_OF {branch: default_branch}]-()
|
|
141
|
+
DELETE r
|
|
142
|
+
WITH field_to_delete
|
|
143
|
+
MATCH (field_to_delete)-[:IS_RELATED {branch: default_branch}]-(rel:Relationship)
|
|
144
|
+
WITH DISTINCT field_to_delete, rel
|
|
145
|
+
MATCH (rel)-[r {branch: default_branch}]-()
|
|
146
|
+
DELETE r
|
|
147
|
+
WITH field_to_delete, rel
|
|
148
|
+
OPTIONAL MATCH (rel)
|
|
149
|
+
WHERE NOT exists((rel)--())
|
|
150
|
+
DELETE rel
|
|
151
|
+
WITH DISTINCT field_to_delete
|
|
152
|
+
MATCH (field_to_delete)-[:HAS_ATTRIBUTE {branch: default_branch}]->(attr:Attribute)
|
|
153
|
+
MATCH (attr)-[r {branch: default_branch}]-()
|
|
154
|
+
DELETE r
|
|
155
|
+
WITH field_to_delete, attr
|
|
156
|
+
OPTIONAL MATCH (attr)
|
|
157
|
+
WHERE NOT exists((attr)--())
|
|
158
|
+
DELETE attr
|
|
159
|
+
WITH DISTINCT field_to_delete
|
|
160
|
+
OPTIONAL MATCH (field_to_delete)
|
|
161
|
+
WHERE NOT exists((field_to_delete)--())
|
|
162
|
+
DELETE field_to_delete
|
|
163
|
+
}
|
|
164
|
+
"""
|
|
165
|
+
self.add_to_query(query)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def display_duplicate_schema_fields(duplicate_schema_fields: list[SchemaFieldDetails]) -> None:
|
|
169
|
+
console = Console()
|
|
170
|
+
|
|
171
|
+
table = Table(title="Duplicate Schema Fields on Default Branch")
|
|
172
|
+
|
|
173
|
+
table.add_column("Schema Kind")
|
|
174
|
+
table.add_column("Schema UUID")
|
|
175
|
+
table.add_column("Field Name")
|
|
176
|
+
table.add_column("Field Type")
|
|
177
|
+
|
|
178
|
+
for duplicate_schema_field in duplicate_schema_fields:
|
|
179
|
+
table.add_row(
|
|
180
|
+
duplicate_schema_field.schema_kind,
|
|
181
|
+
duplicate_schema_field.schema_uuid,
|
|
182
|
+
duplicate_schema_field.field_name,
|
|
183
|
+
duplicate_schema_field.field_type.value,
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
console.print(table)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
async def clean_duplicate_schema_fields(db: InfrahubDatabase, fix: bool = False) -> bool:
|
|
190
|
+
"""
|
|
191
|
+
Identify any attributes or relationships that are duplicated in a schema on the default branch
|
|
192
|
+
If fix is True, runs cypher queries to hard delete the earlier duplicate
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
duplicate_schema_fields_query = await GetDuplicateSchemaFields.init(db=db)
|
|
196
|
+
await duplicate_schema_fields_query.execute(db=db)
|
|
197
|
+
duplicate_schema_fields = duplicate_schema_fields_query.get_schema_field_details()
|
|
198
|
+
|
|
199
|
+
if not duplicate_schema_fields:
|
|
200
|
+
rprint(f"{SUCCESS_BADGE} No duplicate schema fields found")
|
|
201
|
+
return True
|
|
202
|
+
|
|
203
|
+
display_duplicate_schema_fields(duplicate_schema_fields)
|
|
204
|
+
|
|
205
|
+
if not fix:
|
|
206
|
+
rprint(f"{FAILED_BADGE} Use the --fix flag to fix the duplicate schema fields")
|
|
207
|
+
return False
|
|
208
|
+
|
|
209
|
+
fix_duplicate_schema_fields_query = await FixDuplicateSchemaFields.init(db=db)
|
|
210
|
+
await fix_duplicate_schema_fields_query.execute(db=db)
|
|
211
|
+
rprint(f"{SUCCESS_BADGE} Duplicate schema fields deleted from the default branch")
|
|
212
|
+
return True
|
infrahub/config.py
CHANGED
|
@@ -8,7 +8,7 @@ from enum import Enum
|
|
|
8
8
|
from pathlib import Path
|
|
9
9
|
from typing import TYPE_CHECKING, Any
|
|
10
10
|
|
|
11
|
-
import
|
|
11
|
+
import tomllib
|
|
12
12
|
from infrahub_sdk.utils import generate_uuid
|
|
13
13
|
from pydantic import (
|
|
14
14
|
AliasChoices,
|
|
@@ -371,6 +371,11 @@ class CacheSettings(BaseSettings):
|
|
|
371
371
|
tls_enabled: bool = Field(default=False, description="Indicates if TLS is enabled for the connection")
|
|
372
372
|
tls_insecure: bool = Field(default=False, description="Indicates if TLS certificates are verified")
|
|
373
373
|
tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
|
|
374
|
+
clean_up_deadlocks_interval_mins: int = Field(
|
|
375
|
+
default=15,
|
|
376
|
+
ge=1,
|
|
377
|
+
description="Age threshold in minutes: locks older than this and owned by inactive workers are deleted by the cleanup task.",
|
|
378
|
+
)
|
|
374
379
|
|
|
375
380
|
@property
|
|
376
381
|
def service_port(self) -> int:
|
|
@@ -975,7 +980,7 @@ def load(config_file_name: Path | str = "infrahub.toml", config_data: dict[str,
|
|
|
975
980
|
|
|
976
981
|
if config_file.exists():
|
|
977
982
|
config_string = config_file.read_text(encoding="utf-8")
|
|
978
|
-
config_tmp =
|
|
983
|
+
config_tmp = tomllib.loads(config_string)
|
|
979
984
|
|
|
980
985
|
return Settings(**config_tmp)
|
|
981
986
|
|