infrahub-server 1.2.3__py3-none-any.whl → 1.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/cli/db.py +308 -2
- infrahub/cli/git_agent.py +4 -10
- infrahub/config.py +32 -0
- infrahub/core/branch/tasks.py +50 -10
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/constraint/node/runner.py +6 -5
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/migrations/graph/__init__.py +4 -0
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +68 -70
- infrahub/core/migrations/graph/m025_uniqueness_nulls.py +26 -0
- infrahub/core/migrations/graph/m026_0000_prefix_fix.py +54 -0
- infrahub/core/migrations/schema/node_attribute_remove.py +16 -2
- infrahub/core/models.py +1 -1
- infrahub/core/node/__init__.py +4 -1
- infrahub/core/node/constraints/grouped_uniqueness.py +6 -1
- infrahub/core/node/resource_manager/number_pool.py +1 -1
- infrahub/core/registry.py +18 -0
- infrahub/core/schema/basenode_schema.py +21 -1
- infrahub/core/schema/definitions/internal.py +2 -1
- infrahub/core/schema/generated/base_node_schema.py +1 -1
- infrahub/core/schema/manager.py +21 -1
- infrahub/core/schema/schema_branch.py +8 -7
- infrahub/core/schema/schema_branch_computed.py +12 -1
- infrahub/database/__init__.py +10 -0
- infrahub/events/branch_action.py +3 -0
- infrahub/events/group_action.py +6 -1
- infrahub/events/node_action.py +5 -1
- infrahub/git/integrator.py +2 -2
- infrahub/graphql/mutations/main.py +10 -12
- infrahub/message_bus/messages/__init__.py +0 -4
- infrahub/message_bus/messages/request_proposedchange_pipeline.py +5 -0
- infrahub/message_bus/operations/__init__.py +0 -3
- infrahub/message_bus/operations/requests/proposed_change.py +29 -9
- infrahub/message_bus/types.py +2 -34
- infrahub/proposed_change/branch_diff.py +65 -0
- infrahub/proposed_change/tasks.py +12 -4
- infrahub/server.py +6 -11
- infrahub/services/adapters/cache/__init__.py +17 -0
- infrahub/services/adapters/cache/redis.py +11 -1
- infrahub/services/adapters/message_bus/__init__.py +20 -0
- infrahub/services/adapters/workflow/worker.py +1 -1
- infrahub/services/component.py +1 -2
- infrahub/tasks/registry.py +3 -7
- infrahub/workers/infrahub_async.py +4 -10
- infrahub/workflows/catalogue.py +10 -0
- infrahub_sdk/generator.py +1 -0
- infrahub_sdk/node.py +16 -4
- infrahub_sdk/schema/__init__.py +10 -1
- {infrahub_server-1.2.3.dist-info → infrahub_server-1.2.5.dist-info}/METADATA +2 -2
- {infrahub_server-1.2.3.dist-info → infrahub_server-1.2.5.dist-info}/RECORD +57 -60
- infrahub_testcontainers/container.py +4 -0
- infrahub_testcontainers/helpers.py +1 -1
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/branch/flow_models.py +0 -0
- infrahub/message_bus/messages/event_branch_merge.py +0 -13
- infrahub/message_bus/messages/event_worker_newprimaryapi.py +0 -9
- infrahub/message_bus/operations/event/__init__.py +0 -3
- infrahub/message_bus/operations/event/branch.py +0 -61
- infrahub/message_bus/operations/event/worker.py +0 -9
- {infrahub_server-1.2.3.dist-info → infrahub_server-1.2.5.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.3.dist-info → infrahub_server-1.2.5.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.3.dist-info → infrahub_server-1.2.5.dist-info}/entry_points.txt +0 -0
|
@@ -363,7 +363,7 @@ class InfrahubMutationMixin:
|
|
|
363
363
|
branch: Branch,
|
|
364
364
|
db: InfrahubDatabase,
|
|
365
365
|
obj: Node,
|
|
366
|
-
|
|
366
|
+
skip_uniqueness_check: bool = False,
|
|
367
367
|
) -> tuple[Node, Self]:
|
|
368
368
|
"""
|
|
369
369
|
Wrapper around mutate_update to potentially activate locking and call it within a database transaction.
|
|
@@ -378,11 +378,11 @@ class InfrahubMutationMixin:
|
|
|
378
378
|
if lock_names:
|
|
379
379
|
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names):
|
|
380
380
|
obj = await cls.mutate_update_object(
|
|
381
|
-
db=db, info=info, data=data, branch=branch, obj=obj,
|
|
381
|
+
db=db, info=info, data=data, branch=branch, obj=obj, skip_uniqueness_check=skip_uniqueness_check
|
|
382
382
|
)
|
|
383
383
|
else:
|
|
384
384
|
obj = await cls.mutate_update_object(
|
|
385
|
-
db=db, info=info, data=data, branch=branch, obj=obj,
|
|
385
|
+
db=db, info=info, data=data, branch=branch, obj=obj, skip_uniqueness_check=skip_uniqueness_check
|
|
386
386
|
)
|
|
387
387
|
result = await cls.mutate_update_to_graphql(db=db, info=info, obj=obj)
|
|
388
388
|
return obj, result
|
|
@@ -396,11 +396,11 @@ class InfrahubMutationMixin:
|
|
|
396
396
|
data=data,
|
|
397
397
|
branch=branch,
|
|
398
398
|
obj=obj,
|
|
399
|
-
|
|
399
|
+
skip_uniqueness_check=skip_uniqueness_check,
|
|
400
400
|
)
|
|
401
401
|
else:
|
|
402
402
|
obj = await cls.mutate_update_object(
|
|
403
|
-
db=dbt, info=info, data=data, branch=branch, obj=obj,
|
|
403
|
+
db=dbt, info=info, data=data, branch=branch, obj=obj, skip_uniqueness_check=skip_uniqueness_check
|
|
404
404
|
)
|
|
405
405
|
result = await cls.mutate_update_to_graphql(db=dbt, info=info, obj=obj)
|
|
406
406
|
return obj, result
|
|
@@ -434,7 +434,7 @@ class InfrahubMutationMixin:
|
|
|
434
434
|
data: InputObjectType,
|
|
435
435
|
branch: Branch,
|
|
436
436
|
obj: Node,
|
|
437
|
-
|
|
437
|
+
skip_uniqueness_check: bool = False,
|
|
438
438
|
) -> Node:
|
|
439
439
|
component_registry = get_component_registry()
|
|
440
440
|
node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch)
|
|
@@ -442,8 +442,9 @@ class InfrahubMutationMixin:
|
|
|
442
442
|
before_mutate_profile_ids = await cls._get_profile_ids(db=db, obj=obj)
|
|
443
443
|
await obj.from_graphql(db=db, data=data)
|
|
444
444
|
fields_to_validate = list(data)
|
|
445
|
-
|
|
446
|
-
|
|
445
|
+
await node_constraint_runner.check(
|
|
446
|
+
node=obj, field_filters=fields_to_validate, skip_uniqueness_check=skip_uniqueness_check
|
|
447
|
+
)
|
|
447
448
|
|
|
448
449
|
fields = list(data.keys())
|
|
449
450
|
for field_to_remove in ("id", "hfid"):
|
|
@@ -494,7 +495,6 @@ class InfrahubMutationMixin:
|
|
|
494
495
|
db = database or graphql_context.db
|
|
495
496
|
dict_data = dict(data)
|
|
496
497
|
node = None
|
|
497
|
-
run_constraint_checks = True
|
|
498
498
|
|
|
499
499
|
if "id" in dict_data:
|
|
500
500
|
node = await NodeManager.get_one(
|
|
@@ -506,7 +506,6 @@ class InfrahubMutationMixin:
|
|
|
506
506
|
db=db,
|
|
507
507
|
branch=branch,
|
|
508
508
|
obj=node,
|
|
509
|
-
run_constraint_checks=run_constraint_checks,
|
|
510
509
|
)
|
|
511
510
|
return updated_obj, mutation, False
|
|
512
511
|
|
|
@@ -525,7 +524,6 @@ class InfrahubMutationMixin:
|
|
|
525
524
|
db=db,
|
|
526
525
|
branch=branch,
|
|
527
526
|
obj=node,
|
|
528
|
-
run_constraint_checks=run_constraint_checks,
|
|
529
527
|
)
|
|
530
528
|
return updated_obj, mutation, False
|
|
531
529
|
|
|
@@ -545,7 +543,7 @@ class InfrahubMutationMixin:
|
|
|
545
543
|
db=db,
|
|
546
544
|
branch=branch,
|
|
547
545
|
obj=node,
|
|
548
|
-
|
|
546
|
+
skip_uniqueness_check=True,
|
|
549
547
|
)
|
|
550
548
|
return updated_obj, mutation, False
|
|
551
549
|
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
from infrahub.message_bus import InfrahubMessage, InfrahubResponse
|
|
2
2
|
|
|
3
3
|
from .check_generator_run import CheckGeneratorRun
|
|
4
|
-
from .event_branch_merge import EventBranchMerge
|
|
5
|
-
from .event_worker_newprimaryapi import EventWorkerNewPrimaryAPI
|
|
6
4
|
from .finalize_validator_execution import FinalizeValidatorExecution
|
|
7
5
|
from .git_file_get import GitFileGet, GitFileGetResponse
|
|
8
6
|
from .git_repository_connectivity import GitRepositoryConnectivity
|
|
@@ -16,8 +14,6 @@ from .send_echo_request import SendEchoRequest, SendEchoRequestResponse
|
|
|
16
14
|
|
|
17
15
|
MESSAGE_MAP: dict[str, type[InfrahubMessage]] = {
|
|
18
16
|
"check.generator.run": CheckGeneratorRun,
|
|
19
|
-
"event.branch.merge": EventBranchMerge,
|
|
20
|
-
"event.worker.new_primary_api": EventWorkerNewPrimaryAPI,
|
|
21
17
|
"finalize.validator.execution": FinalizeValidatorExecution,
|
|
22
18
|
"git.file.get": GitFileGet,
|
|
23
19
|
"git.repository.connectivity": GitRepositoryConnectivity,
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
|
|
1
3
|
from pydantic import Field
|
|
2
4
|
|
|
3
5
|
from infrahub.context import InfrahubContext
|
|
@@ -16,3 +18,6 @@ class RequestProposedChangePipeline(InfrahubMessage):
|
|
|
16
18
|
default=CheckType.ALL, description="Can be used to restrict the pipeline to a specific type of job"
|
|
17
19
|
)
|
|
18
20
|
context: InfrahubContext = Field(..., description="The context of the task")
|
|
21
|
+
pipeline_id: uuid.UUID = Field(
|
|
22
|
+
default_factory=uuid.uuid4, description="The unique ID of the execution of this pipeline"
|
|
23
|
+
)
|
|
@@ -4,7 +4,6 @@ from prefect import Flow
|
|
|
4
4
|
from infrahub.message_bus import RPCErrorResponse, messages
|
|
5
5
|
from infrahub.message_bus.operations import (
|
|
6
6
|
check,
|
|
7
|
-
event,
|
|
8
7
|
finalize,
|
|
9
8
|
git,
|
|
10
9
|
refresh,
|
|
@@ -17,8 +16,6 @@ from infrahub.tasks.check import set_check_status
|
|
|
17
16
|
|
|
18
17
|
COMMAND_MAP = {
|
|
19
18
|
"check.generator.run": check.generator.run,
|
|
20
|
-
"event.branch.merge": event.branch.merge,
|
|
21
|
-
"event.worker.new_primary_api": event.worker.new_primary_api,
|
|
22
19
|
"finalize.validator.execution": finalize.validator.execution,
|
|
23
20
|
"git.file.get": git.file.get,
|
|
24
21
|
"git.repository.connectivity": git.repository.connectivity,
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from enum import IntFlag
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
4
5
|
|
|
5
6
|
from prefect import flow, task
|
|
6
7
|
from prefect.logging import get_run_logger
|
|
@@ -20,6 +21,14 @@ from infrahub.message_bus.types import (
|
|
|
20
21
|
ProposedChangeRepository,
|
|
21
22
|
ProposedChangeSubscriber,
|
|
22
23
|
)
|
|
24
|
+
from infrahub.proposed_change.branch_diff import (
|
|
25
|
+
get_diff_summary_cache,
|
|
26
|
+
get_modified_kinds,
|
|
27
|
+
get_modified_node_ids,
|
|
28
|
+
has_data_changes,
|
|
29
|
+
has_node_changes,
|
|
30
|
+
set_diff_summary_cache,
|
|
31
|
+
)
|
|
23
32
|
from infrahub.proposed_change.models import (
|
|
24
33
|
RequestArtifactDefinitionCheck,
|
|
25
34
|
RequestProposedChangeDataIntegrity,
|
|
@@ -40,6 +49,9 @@ from infrahub.workflows.catalogue import (
|
|
|
40
49
|
)
|
|
41
50
|
from infrahub.workflows.utils import add_tags
|
|
42
51
|
|
|
52
|
+
if TYPE_CHECKING:
|
|
53
|
+
from infrahub_sdk.diff import NodeDiff
|
|
54
|
+
|
|
43
55
|
|
|
44
56
|
class DefinitionSelect(IntFlag):
|
|
45
57
|
NONE = 0
|
|
@@ -101,8 +113,11 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf
|
|
|
101
113
|
await diff_coordinator.update_branch_diff(base_branch=destination_branch, diff_branch=source_branch)
|
|
102
114
|
|
|
103
115
|
diff_summary = await service.client.get_diff_summary(branch=message.source_branch)
|
|
104
|
-
|
|
105
|
-
|
|
116
|
+
await set_diff_summary_cache(pipeline_id=message.pipeline_id, diff_summary=diff_summary, cache=service.cache)
|
|
117
|
+
branch_diff = ProposedChangeBranchDiff(repositories=repositories, pipeline_id=message.pipeline_id)
|
|
118
|
+
await _populate_subscribers(
|
|
119
|
+
branch_diff=branch_diff, diff_summary=diff_summary, service=service, branch=message.source_branch
|
|
120
|
+
)
|
|
106
121
|
|
|
107
122
|
if message.check_type is CheckType.ARTIFACT:
|
|
108
123
|
events.append(
|
|
@@ -132,8 +147,8 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf
|
|
|
132
147
|
parameters={"model": model_proposed_change_run_generator},
|
|
133
148
|
)
|
|
134
149
|
|
|
135
|
-
if message.check_type in [CheckType.ALL, CheckType.DATA] and
|
|
136
|
-
branch=message.source_branch
|
|
150
|
+
if message.check_type in [CheckType.ALL, CheckType.DATA] and has_node_changes(
|
|
151
|
+
diff_summary=diff_summary, branch=message.source_branch
|
|
137
152
|
):
|
|
138
153
|
model_proposed_change_data_integrity = RequestProposedChangeDataIntegrity(
|
|
139
154
|
proposed_change=message.proposed_change,
|
|
@@ -162,8 +177,8 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf
|
|
|
162
177
|
parameters={"model": model_proposed_change_repo_checks},
|
|
163
178
|
)
|
|
164
179
|
|
|
165
|
-
if message.check_type in [CheckType.ALL, CheckType.SCHEMA] and
|
|
166
|
-
branch=message.source_branch
|
|
180
|
+
if message.check_type in [CheckType.ALL, CheckType.SCHEMA] and has_data_changes(
|
|
181
|
+
diff_summary=diff_summary, branch=message.source_branch
|
|
167
182
|
):
|
|
168
183
|
await service.workflow.submit_workflow(
|
|
169
184
|
workflow=REQUEST_PROPOSED_CHANGE_SCHEMA_INTEGRITY,
|
|
@@ -215,6 +230,9 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa
|
|
|
215
230
|
definitions=definition_information[InfrahubKind.ARTIFACTDEFINITION]["edges"]
|
|
216
231
|
)
|
|
217
232
|
|
|
233
|
+
diff_summary = await get_diff_summary_cache(pipeline_id=message.branch_diff.pipeline_id, cache=service.cache)
|
|
234
|
+
modified_kinds = get_modified_kinds(diff_summary=diff_summary, branch=message.source_branch)
|
|
235
|
+
|
|
218
236
|
for artifact_definition in artifact_definitions:
|
|
219
237
|
# Request artifact definition checks if the source branch that is managed in combination
|
|
220
238
|
# to the Git repository containing modifications which could indicate changes to the transforms
|
|
@@ -229,7 +247,7 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa
|
|
|
229
247
|
condition=message.source_branch_sync_with_git and message.branch_diff.has_file_modifications,
|
|
230
248
|
)
|
|
231
249
|
|
|
232
|
-
for changed_model in
|
|
250
|
+
for changed_model in modified_kinds:
|
|
233
251
|
condition = False
|
|
234
252
|
if (changed_model in artifact_definition.query_models) or (
|
|
235
253
|
changed_model.startswith("Profile")
|
|
@@ -589,11 +607,13 @@ async def _gather_repository_repository_diffs(
|
|
|
589
607
|
repo.files_changed = files_changed
|
|
590
608
|
|
|
591
609
|
|
|
592
|
-
async def _populate_subscribers(
|
|
610
|
+
async def _populate_subscribers(
|
|
611
|
+
branch_diff: ProposedChangeBranchDiff, diff_summary: list[NodeDiff], service: InfrahubServices, branch: str
|
|
612
|
+
) -> None:
|
|
593
613
|
result = await service.client.execute_graphql(
|
|
594
614
|
query=GATHER_GRAPHQL_QUERY_SUBSCRIBERS,
|
|
595
615
|
branch_name=branch,
|
|
596
|
-
variables={"members":
|
|
616
|
+
variables={"members": get_modified_node_ids(diff_summary=diff_summary, branch=branch)},
|
|
597
617
|
)
|
|
598
618
|
|
|
599
619
|
for group in result[InfrahubKind.GRAPHQLQUERYGROUP]["edges"]:
|
infrahub/message_bus/types.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import re
|
|
4
|
+
import uuid # noqa: TC003
|
|
4
5
|
from enum import Enum
|
|
5
6
|
|
|
6
|
-
from infrahub_sdk.diff import NodeDiff # noqa: TC002
|
|
7
7
|
from pydantic import BaseModel, Field
|
|
8
8
|
|
|
9
9
|
from infrahub.core.constants import InfrahubKind, RepositoryInternalStatus
|
|
@@ -109,9 +109,9 @@ class ProposedChangeArtifactDefinition(BaseModel):
|
|
|
109
109
|
|
|
110
110
|
|
|
111
111
|
class ProposedChangeBranchDiff(BaseModel):
|
|
112
|
-
diff_summary: list[NodeDiff] = Field(default_factory=list, description="The DiffSummary between two branches")
|
|
113
112
|
repositories: list[ProposedChangeRepository] = Field(default_factory=list)
|
|
114
113
|
subscribers: list[ProposedChangeSubscriber] = Field(default_factory=list)
|
|
114
|
+
pipeline_id: uuid.UUID = Field(..., description="The unique ID of the execution of this pipeline")
|
|
115
115
|
|
|
116
116
|
def get_repository(self, repository_id: str) -> ProposedChangeRepository:
|
|
117
117
|
for repository in self.repositories:
|
|
@@ -122,39 +122,7 @@ class ProposedChangeBranchDiff(BaseModel):
|
|
|
122
122
|
def get_subscribers_ids(self, kind: str) -> list[str]:
|
|
123
123
|
return [subscriber.subscriber_id for subscriber in self.subscribers if subscriber.kind == kind]
|
|
124
124
|
|
|
125
|
-
def has_node_changes(self, branch: str) -> bool:
|
|
126
|
-
"""Indicates if there is at least one node object that has been modified in the branch"""
|
|
127
|
-
return bool(
|
|
128
|
-
[
|
|
129
|
-
entry
|
|
130
|
-
for entry in self.diff_summary
|
|
131
|
-
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
132
|
-
]
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
def has_data_changes(self, branch: str) -> bool:
|
|
136
|
-
"""Indicates if there are node or schema changes within the branch."""
|
|
137
|
-
return bool([entry for entry in self.diff_summary if entry["branch"] == branch])
|
|
138
|
-
|
|
139
125
|
@property
|
|
140
126
|
def has_file_modifications(self) -> bool:
|
|
141
127
|
"""Indicates modifications to any of the files in the Git repositories."""
|
|
142
128
|
return any(repository.has_modifications for repository in self.repositories)
|
|
143
|
-
|
|
144
|
-
def modified_nodes(self, branch: str) -> list[str]:
|
|
145
|
-
"""Return a list of non schema nodes that have been modified on the branch"""
|
|
146
|
-
return [
|
|
147
|
-
entry["id"]
|
|
148
|
-
for entry in self.diff_summary
|
|
149
|
-
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
150
|
-
]
|
|
151
|
-
|
|
152
|
-
def modified_kinds(self, branch: str) -> list[str]:
|
|
153
|
-
"""Return a list of non schema kinds that have been modified on the branch"""
|
|
154
|
-
return list(
|
|
155
|
-
{
|
|
156
|
-
entry["kind"]
|
|
157
|
-
for entry in self.diff_summary
|
|
158
|
-
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
159
|
-
}
|
|
160
|
-
)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from typing import TYPE_CHECKING, cast
|
|
6
|
+
|
|
7
|
+
from infrahub.exceptions import ResourceNotFoundError
|
|
8
|
+
from infrahub.message_bus.types import KVTTL
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
|
|
13
|
+
from infrahub_sdk.diff import NodeDiff
|
|
14
|
+
|
|
15
|
+
from infrahub.services.adapters.cache import InfrahubCache
|
|
16
|
+
|
|
17
|
+
SCHEMA_CHANGE = re.compile(r"^Schema[A-Z]")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def has_data_changes(diff_summary: list[NodeDiff], branch: str) -> bool:
|
|
21
|
+
"""Indicates if there are node or schema changes within the branch."""
|
|
22
|
+
return any(entry["branch"] == branch for entry in diff_summary)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def has_node_changes(diff_summary: list[NodeDiff], branch: str) -> bool:
|
|
26
|
+
"""Indicates if there is at least one node object that has been modified in the branch"""
|
|
27
|
+
return any(entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"]) for entry in diff_summary)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_modified_kinds(diff_summary: list[NodeDiff], branch: str) -> list[str]:
|
|
31
|
+
"""Return a list of non schema kinds that have been modified on the branch"""
|
|
32
|
+
return list(
|
|
33
|
+
{
|
|
34
|
+
entry["kind"]
|
|
35
|
+
for entry in diff_summary
|
|
36
|
+
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
37
|
+
}
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_modified_node_ids(diff_summary: list[NodeDiff], branch: str) -> list[str]:
|
|
42
|
+
"""Return a list of non schema nodes that have been modified on the branch"""
|
|
43
|
+
return [
|
|
44
|
+
entry["id"] for entry in diff_summary if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def set_diff_summary_cache(pipeline_id: UUID, diff_summary: list[NodeDiff], cache: InfrahubCache) -> None:
|
|
49
|
+
serialized = json.dumps(diff_summary)
|
|
50
|
+
await cache.set(
|
|
51
|
+
key=f"proposed_change:pipeline:pipeline_id:{pipeline_id}:diff_summary",
|
|
52
|
+
value=serialized,
|
|
53
|
+
expires=KVTTL.TWO_HOURS,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def get_diff_summary_cache(pipeline_id: UUID, cache: InfrahubCache) -> list[NodeDiff]:
|
|
58
|
+
summary_payload = await cache.get(
|
|
59
|
+
key=f"proposed_change:pipeline:pipeline_id:{pipeline_id}:diff_summary",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
if not summary_payload:
|
|
63
|
+
raise ResourceNotFoundError(message=f"Diff summary for pipeline {pipeline_id} was not found in the cache")
|
|
64
|
+
|
|
65
|
+
return cast(list["NodeDiff"], json.loads(summary_payload))
|
|
@@ -61,7 +61,10 @@ from infrahub.workflows.catalogue import (
|
|
|
61
61
|
)
|
|
62
62
|
from infrahub.workflows.utils import add_tags
|
|
63
63
|
|
|
64
|
+
from .branch_diff import get_diff_summary_cache, get_modified_kinds
|
|
65
|
+
|
|
64
66
|
if TYPE_CHECKING:
|
|
67
|
+
from infrahub_sdk.diff import NodeDiff
|
|
65
68
|
from infrahub_sdk.node import InfrahubNode
|
|
66
69
|
|
|
67
70
|
from infrahub.core.models import SchemaUpdateConstraintInfo
|
|
@@ -253,6 +256,9 @@ async def run_generators(
|
|
|
253
256
|
for generator in generators
|
|
254
257
|
]
|
|
255
258
|
|
|
259
|
+
diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id, cache=service.cache)
|
|
260
|
+
modified_kinds = get_modified_kinds(diff_summary=diff_summary, branch=model.source_branch)
|
|
261
|
+
|
|
256
262
|
for generator_definition in generator_definitions:
|
|
257
263
|
# Request generator definitions if the source branch that is managed in combination
|
|
258
264
|
# to the Git repository containing modifications which could indicate changes to the transforms
|
|
@@ -267,7 +273,7 @@ async def run_generators(
|
|
|
267
273
|
condition=model.source_branch_sync_with_git and model.branch_diff.has_file_modifications,
|
|
268
274
|
)
|
|
269
275
|
|
|
270
|
-
for changed_model in
|
|
276
|
+
for changed_model in modified_kinds:
|
|
271
277
|
select = select.add_flag(
|
|
272
278
|
current=select,
|
|
273
279
|
flag=DefinitionSelect.MODIFIED_KINDS,
|
|
@@ -338,8 +344,9 @@ async def run_proposed_change_schema_integrity_check(
|
|
|
338
344
|
schema_diff = dest_schema.diff(other=candidate_schema)
|
|
339
345
|
validation_result = dest_schema.validate_update(other=candidate_schema, diff=schema_diff)
|
|
340
346
|
|
|
347
|
+
diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id, cache=service.cache)
|
|
341
348
|
constraints_from_data_diff = await _get_proposed_change_schema_integrity_constraints(
|
|
342
|
-
|
|
349
|
+
schema=candidate_schema, diff_summary=diff_summary
|
|
343
350
|
)
|
|
344
351
|
constraints_from_schema_diff = validation_result.constraints
|
|
345
352
|
constraints = set(constraints_from_data_diff + constraints_from_schema_diff)
|
|
@@ -390,10 +397,11 @@ async def run_proposed_change_schema_integrity_check(
|
|
|
390
397
|
|
|
391
398
|
|
|
392
399
|
async def _get_proposed_change_schema_integrity_constraints(
|
|
393
|
-
|
|
400
|
+
schema: SchemaBranch, diff_summary: list[NodeDiff]
|
|
394
401
|
) -> list[SchemaUpdateConstraintInfo]:
|
|
395
402
|
node_diff_field_summary_map: dict[str, NodeDiffFieldSummary] = {}
|
|
396
|
-
|
|
403
|
+
|
|
404
|
+
for node_diff in diff_summary:
|
|
397
405
|
node_kind = node_diff["kind"]
|
|
398
406
|
if node_kind not in node_diff_field_summary_map:
|
|
399
407
|
node_diff_field_summary_map[node_kind] = NodeDiffFieldSummary(kind=node_kind)
|
infrahub/server.py
CHANGED
|
@@ -33,10 +33,8 @@ from infrahub.lock import initialize_lock
|
|
|
33
33
|
from infrahub.log import clear_log_context, get_logger, set_log_data
|
|
34
34
|
from infrahub.middleware import InfrahubCORSMiddleware
|
|
35
35
|
from infrahub.services import InfrahubServices
|
|
36
|
-
from infrahub.services.adapters.cache
|
|
37
|
-
from infrahub.services.adapters.
|
|
38
|
-
from infrahub.services.adapters.message_bus.nats import NATSMessageBus
|
|
39
|
-
from infrahub.services.adapters.message_bus.rabbitmq import RabbitMQMessageBus
|
|
36
|
+
from infrahub.services.adapters.cache import InfrahubCache
|
|
37
|
+
from infrahub.services.adapters.message_bus import InfrahubMessageBus
|
|
40
38
|
from infrahub.services.adapters.workflow.local import WorkflowLocalExecution
|
|
41
39
|
from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
|
|
42
40
|
from infrahub.trace import add_span_exception, configure_trace, get_traceid
|
|
@@ -70,14 +68,11 @@ async def app_initialization(application: FastAPI, enable_scheduler: bool = True
|
|
|
70
68
|
else WorkflowLocalExecution()
|
|
71
69
|
)
|
|
72
70
|
component_type = ComponentType.API_SERVER
|
|
73
|
-
message_bus = config.OVERRIDE.message_bus or (
|
|
74
|
-
|
|
75
|
-
if config.SETTINGS.broker.driver == config.BrokerDriver.NATS
|
|
76
|
-
else await RabbitMQMessageBus.new(component_type=component_type)
|
|
77
|
-
)
|
|
78
|
-
cache = config.OVERRIDE.cache or (
|
|
79
|
-
await NATSCache.new() if config.SETTINGS.cache.driver == config.CacheDriver.NATS else RedisCache()
|
|
71
|
+
message_bus = config.OVERRIDE.message_bus or await InfrahubMessageBus.new_from_driver(
|
|
72
|
+
component_type=component_type, driver=config.SETTINGS.broker.driver
|
|
80
73
|
)
|
|
74
|
+
|
|
75
|
+
cache = config.OVERRIDE.cache or (await InfrahubCache.new_from_driver(driver=config.SETTINGS.cache.driver))
|
|
81
76
|
service = await InfrahubServices.new(
|
|
82
77
|
cache=cache,
|
|
83
78
|
database=database,
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import importlib
|
|
3
4
|
from abc import ABC, abstractmethod
|
|
4
5
|
from typing import TYPE_CHECKING
|
|
5
6
|
|
|
6
7
|
if TYPE_CHECKING:
|
|
8
|
+
from infrahub.config import CacheDriver
|
|
7
9
|
from infrahub.message_bus.types import KVTTL
|
|
8
10
|
|
|
9
11
|
|
|
@@ -34,3 +36,18 @@ class InfrahubCache(ABC):
|
|
|
34
36
|
async def set(self, key: str, value: str, expires: KVTTL | None = None, not_exists: bool = False) -> bool | None:
|
|
35
37
|
"""Set a value in the cache."""
|
|
36
38
|
raise NotImplementedError()
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
async def new_from_driver(cls, driver: CacheDriver) -> InfrahubCache:
|
|
42
|
+
"""Imports and initializes the correct class based on the supplied driver.
|
|
43
|
+
|
|
44
|
+
This is to ensure that we only import the Python modules that we actually
|
|
45
|
+
need to operate and not import all possible options.
|
|
46
|
+
"""
|
|
47
|
+
module = importlib.import_module(driver.driver_module_path)
|
|
48
|
+
broker_driver: InfrahubCache = getattr(module, driver.driver_class_name)
|
|
49
|
+
return await broker_driver.new()
|
|
50
|
+
|
|
51
|
+
@classmethod
|
|
52
|
+
async def new(cls) -> InfrahubCache:
|
|
53
|
+
raise NotImplementedError()
|
|
@@ -1,9 +1,15 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
1
5
|
import redis.asyncio as redis
|
|
2
6
|
|
|
3
7
|
from infrahub import config
|
|
4
|
-
from infrahub.message_bus.types import KVTTL
|
|
5
8
|
from infrahub.services.adapters.cache import InfrahubCache
|
|
6
9
|
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from infrahub.message_bus.types import KVTTL
|
|
12
|
+
|
|
7
13
|
|
|
8
14
|
class RedisCache(InfrahubCache):
|
|
9
15
|
def __init__(self) -> None:
|
|
@@ -44,3 +50,7 @@ class RedisCache(InfrahubCache):
|
|
|
44
50
|
|
|
45
51
|
async def set(self, key: str, value: str, expires: KVTTL | None = None, not_exists: bool = False) -> bool | None:
|
|
46
52
|
return await self.connection.set(name=key, value=value, ex=expires.value if expires else None, nx=not_exists)
|
|
53
|
+
|
|
54
|
+
@classmethod
|
|
55
|
+
async def new(cls) -> RedisCache:
|
|
56
|
+
return cls()
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import importlib
|
|
3
4
|
from abc import ABC, abstractmethod
|
|
4
5
|
from typing import TYPE_CHECKING, TypeVar
|
|
5
6
|
|
|
@@ -8,6 +9,8 @@ from infrahub.message_bus.messages import ROUTING_KEY_MAP
|
|
|
8
9
|
ResponseClass = TypeVar("ResponseClass")
|
|
9
10
|
|
|
10
11
|
if TYPE_CHECKING:
|
|
12
|
+
from infrahub.components import ComponentType
|
|
13
|
+
from infrahub.config import BrokerDriver, BrokerSettings
|
|
11
14
|
from infrahub.message_bus import InfrahubMessage, InfrahubResponse
|
|
12
15
|
from infrahub.message_bus.types import MessageTTL
|
|
13
16
|
from infrahub.services import InfrahubServices
|
|
@@ -34,6 +37,23 @@ class InfrahubMessageBus(ABC):
|
|
|
34
37
|
async def shutdown(self) -> None: # noqa: B027 We want a default empty behavior, so it's ok to have an empty non-abstract method.
|
|
35
38
|
"""Shutdown the Message bus"""
|
|
36
39
|
|
|
40
|
+
@classmethod
|
|
41
|
+
async def new(cls, component_type: ComponentType, settings: BrokerSettings | None = None) -> InfrahubMessageBus:
|
|
42
|
+
raise NotImplementedError()
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
async def new_from_driver(
|
|
46
|
+
cls, component_type: ComponentType, driver: BrokerDriver, settings: BrokerSettings | None = None
|
|
47
|
+
) -> InfrahubMessageBus:
|
|
48
|
+
"""Imports and initializes the correct class based on the supplied driver.
|
|
49
|
+
|
|
50
|
+
This is to ensure that we only import the Python modules that we actually
|
|
51
|
+
need to operate and not import all possible options.
|
|
52
|
+
"""
|
|
53
|
+
module = importlib.import_module(driver.driver_module_path)
|
|
54
|
+
broker_driver: InfrahubMessageBus = getattr(module, driver.driver_class_name)
|
|
55
|
+
return await broker_driver.new(component_type=component_type, settings=settings)
|
|
56
|
+
|
|
37
57
|
@abstractmethod
|
|
38
58
|
async def publish(
|
|
39
59
|
self, message: InfrahubMessage, routing_key: str, delay: MessageTTL | None = None, is_retry: bool = False
|
|
@@ -66,7 +66,7 @@ class WorkflowWorkerExecution(InfrahubWorkflow):
|
|
|
66
66
|
if response.state.type == StateType.CRASHED:
|
|
67
67
|
raise RuntimeError(response.state.message)
|
|
68
68
|
|
|
69
|
-
return await response.state.result(raise_on_failure=True
|
|
69
|
+
return await response.state.result(raise_on_failure=True)
|
|
70
70
|
|
|
71
71
|
async def submit_workflow(
|
|
72
72
|
self,
|
infrahub/services/component.py
CHANGED
|
@@ -10,7 +10,6 @@ from infrahub.core.constants import GLOBAL_BRANCH_NAME
|
|
|
10
10
|
from infrahub.core.registry import registry
|
|
11
11
|
from infrahub.core.timestamp import Timestamp
|
|
12
12
|
from infrahub.log import get_logger
|
|
13
|
-
from infrahub.message_bus import messages
|
|
14
13
|
from infrahub.message_bus.types import KVTTL
|
|
15
14
|
from infrahub.worker import WORKER_IDENTITY
|
|
16
15
|
|
|
@@ -116,7 +115,7 @@ class InfrahubComponent:
|
|
|
116
115
|
key=PRIMARY_API_SERVER, value=WORKER_IDENTITY, expires=KVTTL.FIFTEEN, not_exists=True
|
|
117
116
|
)
|
|
118
117
|
if result:
|
|
119
|
-
|
|
118
|
+
log.info("api_worker promoted to primary", worker_id=WORKER_IDENTITY)
|
|
120
119
|
else:
|
|
121
120
|
log.debug("Primary node already set")
|
|
122
121
|
primary_id = await self.cache.get(key=PRIMARY_API_SERVER)
|
infrahub/tasks/registry.py
CHANGED
|
@@ -22,7 +22,6 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
|
|
|
22
22
|
|
|
23
23
|
async with lock.registry.local_schema_lock():
|
|
24
24
|
branches = await registry.branch_object.get_list(db=db)
|
|
25
|
-
active_branches = [branch.name for branch in branches]
|
|
26
25
|
for new_branch in branches:
|
|
27
26
|
if new_branch.name in registry.branch:
|
|
28
27
|
branch_registry: Branch = registry.branch[new_branch.name]
|
|
@@ -61,9 +60,6 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
|
|
|
61
60
|
include_types=True,
|
|
62
61
|
)
|
|
63
62
|
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
log.info(
|
|
68
|
-
f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY
|
|
69
|
-
)
|
|
63
|
+
purged_branches = await registry.purge_inactive_branches(db=db, active_branches=branches)
|
|
64
|
+
for branch_name in purged_branches:
|
|
65
|
+
log.info(f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY)
|
|
@@ -24,11 +24,7 @@ from infrahub.git import initialize_repositories_directory
|
|
|
24
24
|
from infrahub.lock import initialize_lock
|
|
25
25
|
from infrahub.services import InfrahubServices
|
|
26
26
|
from infrahub.services.adapters.cache import InfrahubCache
|
|
27
|
-
from infrahub.services.adapters.cache.nats import NATSCache
|
|
28
|
-
from infrahub.services.adapters.cache.redis import RedisCache
|
|
29
27
|
from infrahub.services.adapters.message_bus import InfrahubMessageBus
|
|
30
|
-
from infrahub.services.adapters.message_bus.nats import NATSMessageBus
|
|
31
|
-
from infrahub.services.adapters.message_bus.rabbitmq import RabbitMQMessageBus
|
|
32
28
|
from infrahub.services.adapters.workflow import InfrahubWorkflow
|
|
33
29
|
from infrahub.services.adapters.workflow.local import WorkflowLocalExecution
|
|
34
30
|
from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
|
|
@@ -198,15 +194,13 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
198
194
|
|
|
199
195
|
async def _init_message_bus(self, component_type: ComponentType) -> InfrahubMessageBus:
|
|
200
196
|
return config.OVERRIDE.message_bus or (
|
|
201
|
-
await
|
|
202
|
-
|
|
203
|
-
|
|
197
|
+
await InfrahubMessageBus.new_from_driver(
|
|
198
|
+
component_type=component_type, driver=config.SETTINGS.broker.driver
|
|
199
|
+
)
|
|
204
200
|
)
|
|
205
201
|
|
|
206
202
|
async def _init_cache(self) -> InfrahubCache:
|
|
207
|
-
return config.OVERRIDE.cache or (
|
|
208
|
-
await NATSCache.new() if config.SETTINGS.cache.driver == config.CacheDriver.NATS else RedisCache()
|
|
209
|
-
)
|
|
203
|
+
return config.OVERRIDE.cache or (await InfrahubCache.new_from_driver(driver=config.SETTINGS.cache.driver))
|
|
210
204
|
|
|
211
205
|
async def _init_services(self, client: InfrahubClient) -> None:
|
|
212
206
|
component_type = ComponentType.GIT_AGENT
|
infrahub/workflows/catalogue.py
CHANGED
|
@@ -190,6 +190,15 @@ BRANCH_MERGE = WorkflowDefinition(
|
|
|
190
190
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
191
191
|
)
|
|
192
192
|
|
|
193
|
+
BRANCH_MERGE_POST_PROCESS = WorkflowDefinition(
|
|
194
|
+
name="branch-merge-post-process",
|
|
195
|
+
type=WorkflowType.CORE,
|
|
196
|
+
module="infrahub.core.branch.tasks",
|
|
197
|
+
function="post_process_branch_merge",
|
|
198
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
|
|
193
202
|
BRANCH_MERGE_MUTATION = WorkflowDefinition(
|
|
194
203
|
name="merge-branch-mutation",
|
|
195
204
|
type=WorkflowType.CORE,
|
|
@@ -431,6 +440,7 @@ workflows = [
|
|
|
431
440
|
BRANCH_DELETE,
|
|
432
441
|
BRANCH_MERGE,
|
|
433
442
|
BRANCH_MERGE_MUTATION,
|
|
443
|
+
BRANCH_MERGE_POST_PROCESS,
|
|
434
444
|
BRANCH_REBASE,
|
|
435
445
|
BRANCH_VALIDATE,
|
|
436
446
|
COMPUTED_ATTRIBUTE_PROCESS_JINJA2,
|