infrahub-server 1.2.4__py3-none-any.whl → 1.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/cli/db.py +308 -2
- infrahub/core/branch/tasks.py +50 -10
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/manager.py +15 -2
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m026_0000_prefix_fix.py +54 -0
- infrahub/core/node/__init__.py +4 -1
- infrahub/core/node/resource_manager/number_pool.py +1 -1
- infrahub/core/registry.py +2 -3
- infrahub/core/schema/manager.py +0 -1
- infrahub/core/schema/schema_branch.py +39 -40
- infrahub/core/schema/schema_branch_computed.py +12 -1
- infrahub/database/__init__.py +2 -0
- infrahub/events/branch_action.py +3 -0
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/git/integrator.py +2 -2
- infrahub/graphql/manager.py +10 -0
- infrahub/graphql/mutations/main.py +4 -5
- infrahub/graphql/mutations/resource_manager.py +3 -3
- infrahub/message_bus/messages/__init__.py +0 -2
- infrahub/message_bus/messages/request_proposedchange_pipeline.py +5 -0
- infrahub/message_bus/operations/__init__.py +0 -2
- infrahub/message_bus/operations/requests/proposed_change.py +29 -9
- infrahub/message_bus/types.py +2 -34
- infrahub/proposed_change/branch_diff.py +65 -0
- infrahub/proposed_change/tasks.py +12 -4
- infrahub/services/adapters/workflow/worker.py +1 -1
- infrahub/tasks/registry.py +4 -1
- infrahub/workflows/catalogue.py +10 -0
- infrahub_sdk/generator.py +1 -0
- infrahub_sdk/node.py +16 -4
- {infrahub_server-1.2.4.dist-info → infrahub_server-1.2.6.dist-info}/METADATA +2 -2
- {infrahub_server-1.2.4.dist-info → infrahub_server-1.2.6.dist-info}/RECORD +42 -44
- infrahub_testcontainers/container.py +52 -2
- infrahub_testcontainers/docker-compose.test.yml +27 -0
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +5 -5
- infrahub_testcontainers/plugin.py +1 -1
- infrahub/core/branch/flow_models.py +0 -0
- infrahub/message_bus/messages/event_branch_merge.py +0 -13
- infrahub/message_bus/operations/event/__init__.py +0 -3
- infrahub/message_bus/operations/event/branch.py +0 -61
- {infrahub_server-1.2.4.dist-info → infrahub_server-1.2.6.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.4.dist-info → infrahub_server-1.2.6.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.4.dist-info → infrahub_server-1.2.6.dist-info}/entry_points.txt +0 -0
|
@@ -1005,9 +1005,11 @@ class SchemaBranch:
|
|
|
1005
1005
|
generic_schema = self.get_generic(name=name, duplicate=False)
|
|
1006
1006
|
for attribute in generic_schema.attributes:
|
|
1007
1007
|
if attribute.computed_attribute and attribute.computed_attribute.kind != ComputedAttributeKind.USER:
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1008
|
+
for inheriting_node in generic_schema.used_by:
|
|
1009
|
+
node_schema = self.get_node(name=inheriting_node, duplicate=False)
|
|
1010
|
+
self.computed_attributes.validate_generic_inheritance(
|
|
1011
|
+
node=node_schema, attribute=attribute, generic=generic_schema
|
|
1012
|
+
)
|
|
1011
1013
|
|
|
1012
1014
|
def _validate_computed_attribute(self, node: NodeSchema, attribute: AttributeSchema) -> None:
|
|
1013
1015
|
if not attribute.computed_attribute or attribute.computed_attribute.kind == ComputedAttributeKind.USER:
|
|
@@ -1907,10 +1909,8 @@ class SchemaBranch:
|
|
|
1907
1909
|
|
|
1908
1910
|
self.set(name=node_name, schema=node_schema)
|
|
1909
1911
|
|
|
1910
|
-
def add_relationships_to_template(self, node: NodeSchema) -> None:
|
|
1912
|
+
def add_relationships_to_template(self, node: NodeSchema | GenericSchema) -> None:
|
|
1911
1913
|
template_schema = self.get(name=self._get_object_template_kind(node_kind=node.kind), duplicate=False)
|
|
1912
|
-
if template_schema.is_generic_schema:
|
|
1913
|
-
return
|
|
1914
1914
|
|
|
1915
1915
|
# Remove previous relationships to account for new ones
|
|
1916
1916
|
template_schema.relationships = [
|
|
@@ -1952,6 +1952,7 @@ class SchemaBranch:
|
|
|
1952
1952
|
label=f"{relationship.name} template".title()
|
|
1953
1953
|
if relationship.kind in [RelationshipKind.COMPONENT, RelationshipKind.PARENT]
|
|
1954
1954
|
else relationship.name.title(),
|
|
1955
|
+
inherited=relationship.inherited,
|
|
1955
1956
|
)
|
|
1956
1957
|
)
|
|
1957
1958
|
|
|
@@ -1981,9 +1982,6 @@ class SchemaBranch:
|
|
|
1981
1982
|
need_template_kinds = [n.kind for n in need_templates]
|
|
1982
1983
|
|
|
1983
1984
|
if node.is_generic_schema:
|
|
1984
|
-
# When needing a template for a generic, we generate an empty shell mostly to make sure that schemas (including the GraphQL one) will
|
|
1985
|
-
# look right. We don't really care about applying inheritance of fields as it was already processed and actual templates will have the
|
|
1986
|
-
# correct attributes and relationships
|
|
1987
1985
|
template = GenericSchema(
|
|
1988
1986
|
name=node.kind,
|
|
1989
1987
|
namespace="Template",
|
|
@@ -1992,43 +1990,44 @@ class SchemaBranch:
|
|
|
1992
1990
|
generate_profile=False,
|
|
1993
1991
|
branch=node.branch,
|
|
1994
1992
|
include_in_menu=False,
|
|
1993
|
+
display_labels=["template_name__value"],
|
|
1994
|
+
human_friendly_id=["template_name__value"],
|
|
1995
|
+
uniqueness_constraints=[["template_name__value"]],
|
|
1995
1996
|
attributes=[template_name_attr],
|
|
1996
1997
|
)
|
|
1997
1998
|
|
|
1998
1999
|
for used in node.used_by:
|
|
1999
2000
|
if used in need_template_kinds:
|
|
2000
2001
|
template.used_by.append(self._get_object_template_kind(node_kind=used))
|
|
2002
|
+
else:
|
|
2003
|
+
template = TemplateSchema(
|
|
2004
|
+
name=node.kind,
|
|
2005
|
+
namespace="Template",
|
|
2006
|
+
label=f"Object template {node.label}",
|
|
2007
|
+
description=f"Object template for {node.kind}",
|
|
2008
|
+
branch=node.branch,
|
|
2009
|
+
include_in_menu=False,
|
|
2010
|
+
display_labels=["template_name__value"],
|
|
2011
|
+
human_friendly_id=["template_name__value"],
|
|
2012
|
+
uniqueness_constraints=[["template_name__value"]],
|
|
2013
|
+
inherit_from=[InfrahubKind.LINEAGESOURCE, InfrahubKind.NODE, core_template_schema.kind],
|
|
2014
|
+
default_filter="template_name__value",
|
|
2015
|
+
attributes=[template_name_attr],
|
|
2016
|
+
relationships=[
|
|
2017
|
+
RelationshipSchema(
|
|
2018
|
+
name="related_nodes",
|
|
2019
|
+
identifier="node__objecttemplate",
|
|
2020
|
+
peer=node.kind,
|
|
2021
|
+
kind=RelationshipKind.TEMPLATE,
|
|
2022
|
+
cardinality=RelationshipCardinality.MANY,
|
|
2023
|
+
branch=BranchSupportType.AWARE,
|
|
2024
|
+
)
|
|
2025
|
+
],
|
|
2026
|
+
)
|
|
2001
2027
|
|
|
2002
|
-
|
|
2003
|
-
|
|
2004
|
-
|
|
2005
|
-
name=node.kind,
|
|
2006
|
-
namespace="Template",
|
|
2007
|
-
label=f"Object template {node.label}",
|
|
2008
|
-
description=f"Object template for {node.kind}",
|
|
2009
|
-
branch=node.branch,
|
|
2010
|
-
include_in_menu=False,
|
|
2011
|
-
display_labels=["template_name__value"],
|
|
2012
|
-
human_friendly_id=["template_name__value"],
|
|
2013
|
-
uniqueness_constraints=[["template_name__value"]],
|
|
2014
|
-
inherit_from=[InfrahubKind.LINEAGESOURCE, InfrahubKind.NODE, core_template_schema.kind],
|
|
2015
|
-
default_filter="template_name__value",
|
|
2016
|
-
attributes=[template_name_attr],
|
|
2017
|
-
relationships=[
|
|
2018
|
-
RelationshipSchema(
|
|
2019
|
-
name="related_nodes",
|
|
2020
|
-
identifier="node__objecttemplate",
|
|
2021
|
-
peer=node.kind,
|
|
2022
|
-
kind=RelationshipKind.TEMPLATE,
|
|
2023
|
-
cardinality=RelationshipCardinality.MANY,
|
|
2024
|
-
branch=BranchSupportType.AWARE,
|
|
2025
|
-
)
|
|
2026
|
-
],
|
|
2027
|
-
)
|
|
2028
|
-
|
|
2029
|
-
for inherited in node.inherit_from:
|
|
2030
|
-
if inherited in need_template_kinds:
|
|
2031
|
-
template.inherit_from.append(self._get_object_template_kind(node_kind=inherited))
|
|
2028
|
+
for inherited in node.inherit_from:
|
|
2029
|
+
if inherited in need_template_kinds:
|
|
2030
|
+
template.inherit_from.append(self._get_object_template_kind(node_kind=inherited))
|
|
2032
2031
|
|
|
2033
2032
|
for node_attr in node.attributes:
|
|
2034
2033
|
if node_attr.unique or node_attr.read_only:
|
|
@@ -2036,7 +2035,7 @@ class SchemaBranch:
|
|
|
2036
2035
|
|
|
2037
2036
|
attr = AttributeSchema(
|
|
2038
2037
|
optional=node_attr.optional if is_autogenerated_subtemplate else True,
|
|
2039
|
-
**node_attr.model_dump(exclude=["id", "unique", "optional", "read_only"
|
|
2038
|
+
**node_attr.model_dump(exclude=["id", "unique", "optional", "read_only"]),
|
|
2040
2039
|
)
|
|
2041
2040
|
template.attributes.append(attr)
|
|
2042
2041
|
|
|
@@ -9,7 +9,7 @@ from pydantic import BaseModel, Field
|
|
|
9
9
|
from infrahub.core.schema import AttributeSchema # noqa: TC001
|
|
10
10
|
|
|
11
11
|
if TYPE_CHECKING:
|
|
12
|
-
from infrahub.core.schema import NodeSchema, SchemaAttributePath
|
|
12
|
+
from infrahub.core.schema import GenericSchema, NodeSchema, SchemaAttributePath
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
@dataclass
|
|
@@ -90,6 +90,7 @@ class ComputedAttributes:
|
|
|
90
90
|
) -> None:
|
|
91
91
|
self._computed_python_transform_attribute_map: dict[str, list[AttributeSchema]] = transform_attribute_map or {}
|
|
92
92
|
self._computed_jinja2_attribute_map: dict[str, RegisteredNodeComputedAttribute] = jinja2_attribute_map or {}
|
|
93
|
+
self._defined_from_generic: dict[str, str] = {}
|
|
93
94
|
|
|
94
95
|
def duplicate(self) -> ComputedAttributes:
|
|
95
96
|
return self.__class__(
|
|
@@ -166,6 +167,16 @@ class ComputedAttributes:
|
|
|
166
167
|
schema_path.active_relationship_schema.name
|
|
167
168
|
].append(deepcopy(source_attribute))
|
|
168
169
|
|
|
170
|
+
def validate_generic_inheritance(
|
|
171
|
+
self, node: NodeSchema, attribute: AttributeSchema, generic: GenericSchema
|
|
172
|
+
) -> None:
|
|
173
|
+
attribute_key = f"{node.kind}__{attribute.name}"
|
|
174
|
+
if duplicate := self._defined_from_generic.get(attribute_key):
|
|
175
|
+
raise ValueError(
|
|
176
|
+
f"{node.kind}: {attribute.name!r} is declared as a computed attribute from multiple generics {sorted([duplicate, generic.kind])}"
|
|
177
|
+
)
|
|
178
|
+
self._defined_from_generic[attribute_key] = generic.kind
|
|
179
|
+
|
|
169
180
|
def get_impacted_jinja2_targets(self, kind: str, updates: list[str] | None = None) -> list[ComputedAttributeTarget]:
|
|
170
181
|
if mapping := self._computed_jinja2_attribute_map.get(kind):
|
|
171
182
|
return mapping.get_targets(updates=updates)
|
infrahub/database/__init__.py
CHANGED
|
@@ -394,8 +394,10 @@ class InfrahubDatabase:
|
|
|
394
394
|
with QUERY_EXECUTION_METRICS.labels(**labels).time():
|
|
395
395
|
response = await self.run_query(query=query, params=params, name=name)
|
|
396
396
|
if response is None:
|
|
397
|
+
span.set_attribute("rows", "empty")
|
|
397
398
|
return [], {}
|
|
398
399
|
results = [item async for item in response]
|
|
400
|
+
span.set_attribute("rows", len(results))
|
|
399
401
|
return results, response._metadata or {}
|
|
400
402
|
|
|
401
403
|
async def run_query(
|
infrahub/events/branch_action.py
CHANGED
|
@@ -100,6 +100,9 @@ class BranchMergedEvent(InfrahubEvent):
|
|
|
100
100
|
|
|
101
101
|
return related
|
|
102
102
|
|
|
103
|
+
def get_messages(self) -> list[InfrahubMessage]:
|
|
104
|
+
return [RefreshRegistryBranches()]
|
|
105
|
+
|
|
103
106
|
|
|
104
107
|
class BranchRebasedEvent(InfrahubEvent):
|
|
105
108
|
"""Event generated when a branch has been rebased"""
|
infrahub/events/group_action.py
CHANGED
|
@@ -22,7 +22,7 @@ class GroupMutatedEvent(InfrahubEvent):
|
|
|
22
22
|
def get_related(self) -> list[dict[str, str]]:
|
|
23
23
|
related = super().get_related()
|
|
24
24
|
|
|
25
|
-
if self.kind
|
|
25
|
+
if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
|
|
26
26
|
# Temporary workaround to avoid too large payloads for the related field
|
|
27
27
|
return related
|
|
28
28
|
|
infrahub/events/node_action.py
CHANGED
|
@@ -24,7 +24,7 @@ class NodeMutatedEvent(InfrahubEvent):
|
|
|
24
24
|
|
|
25
25
|
def get_related(self) -> list[dict[str, str]]:
|
|
26
26
|
related = super().get_related()
|
|
27
|
-
if self.kind
|
|
27
|
+
if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
|
|
28
28
|
# Temporary workaround to avoid too large payloads for the related field
|
|
29
29
|
return related
|
|
30
30
|
|
infrahub/git/integrator.py
CHANGED
|
@@ -954,7 +954,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
954
954
|
source=self.id,
|
|
955
955
|
is_protected=True,
|
|
956
956
|
)
|
|
957
|
-
obj = await self.sdk.create(kind=CoreCheckDefinition, branch=branch_name,
|
|
957
|
+
obj = await self.sdk.create(kind=CoreCheckDefinition, branch=branch_name, data=create_payload)
|
|
958
958
|
await obj.save()
|
|
959
959
|
|
|
960
960
|
return obj
|
|
@@ -1012,7 +1012,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
1012
1012
|
source=str(self.id),
|
|
1013
1013
|
is_protected=True,
|
|
1014
1014
|
)
|
|
1015
|
-
obj = await self.sdk.create(kind=CoreTransformPython, branch=branch_name,
|
|
1015
|
+
obj = await self.sdk.create(kind=CoreTransformPython, branch=branch_name, data=create_payload)
|
|
1016
1016
|
await obj.save()
|
|
1017
1017
|
return obj
|
|
1018
1018
|
|
infrahub/graphql/manager.py
CHANGED
|
@@ -113,6 +113,16 @@ class GraphQLSchemaManager:
|
|
|
113
113
|
def clear_cache(cls) -> None:
|
|
114
114
|
cls._branch_details_by_name = {}
|
|
115
115
|
|
|
116
|
+
@classmethod
|
|
117
|
+
def purge_inactive(cls, active_branches: list[str]) -> set[str]:
|
|
118
|
+
"""Return inactive branches that were purged"""
|
|
119
|
+
inactive_branches: set[str] = set()
|
|
120
|
+
for branch_name in list(cls._branch_details_by_name):
|
|
121
|
+
if branch_name not in active_branches:
|
|
122
|
+
inactive_branches.add(branch_name)
|
|
123
|
+
del cls._branch_details_by_name[branch_name]
|
|
124
|
+
return inactive_branches
|
|
125
|
+
|
|
116
126
|
@classmethod
|
|
117
127
|
def _cache_branch(
|
|
118
128
|
cls, branch: Branch, schema_branch: SchemaBranch, schema_hash: str | None = None
|
|
@@ -489,7 +489,8 @@ class InfrahubMutationMixin:
|
|
|
489
489
|
we would update the node without rerunning uniqueness constraint.
|
|
490
490
|
"""
|
|
491
491
|
|
|
492
|
-
|
|
492
|
+
schema = cls._meta.active_schema
|
|
493
|
+
schema_name = schema.kind
|
|
493
494
|
|
|
494
495
|
graphql_context: GraphqlContext = info.context
|
|
495
496
|
db = database or graphql_context.db
|
|
@@ -509,10 +510,8 @@ class InfrahubMutationMixin:
|
|
|
509
510
|
)
|
|
510
511
|
return updated_obj, mutation, False
|
|
511
512
|
|
|
512
|
-
if
|
|
513
|
-
node = await node_getter_default_filter.get_node(
|
|
514
|
-
node_schema=cls._meta.active_schema, data=data, branch=branch
|
|
515
|
-
)
|
|
513
|
+
if not schema.human_friendly_id and schema.default_filter is not None:
|
|
514
|
+
node = await node_getter_default_filter.get_node(node_schema=schema, data=data, branch=branch)
|
|
516
515
|
|
|
517
516
|
if "hfid" in data:
|
|
518
517
|
node = await NodeManager.get_one_by_hfid(db=db, hfid=dict_data["hfid"], kind=schema_name, branch=branch)
|
|
@@ -177,9 +177,9 @@ class InfrahubNumberPoolMutation(InfrahubMutationMixin, Mutation):
|
|
|
177
177
|
database: InfrahubDatabase | None = None, # noqa: ARG003
|
|
178
178
|
) -> Any:
|
|
179
179
|
try:
|
|
180
|
-
pool_node = registry.
|
|
181
|
-
|
|
182
|
-
|
|
180
|
+
pool_node = registry.schema.get(name=data["node"].value)
|
|
181
|
+
if not pool_node.is_generic_schema and not pool_node.is_node_schema:
|
|
182
|
+
raise ValidationError(input_value="The selected model is not a Node or a Generic")
|
|
183
183
|
except SchemaNotFoundError as exc:
|
|
184
184
|
exc.message = "The selected model does not exist"
|
|
185
185
|
raise exc
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
from infrahub.message_bus import InfrahubMessage, InfrahubResponse
|
|
2
2
|
|
|
3
3
|
from .check_generator_run import CheckGeneratorRun
|
|
4
|
-
from .event_branch_merge import EventBranchMerge
|
|
5
4
|
from .finalize_validator_execution import FinalizeValidatorExecution
|
|
6
5
|
from .git_file_get import GitFileGet, GitFileGetResponse
|
|
7
6
|
from .git_repository_connectivity import GitRepositoryConnectivity
|
|
@@ -15,7 +14,6 @@ from .send_echo_request import SendEchoRequest, SendEchoRequestResponse
|
|
|
15
14
|
|
|
16
15
|
MESSAGE_MAP: dict[str, type[InfrahubMessage]] = {
|
|
17
16
|
"check.generator.run": CheckGeneratorRun,
|
|
18
|
-
"event.branch.merge": EventBranchMerge,
|
|
19
17
|
"finalize.validator.execution": FinalizeValidatorExecution,
|
|
20
18
|
"git.file.get": GitFileGet,
|
|
21
19
|
"git.repository.connectivity": GitRepositoryConnectivity,
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
import uuid
|
|
2
|
+
|
|
1
3
|
from pydantic import Field
|
|
2
4
|
|
|
3
5
|
from infrahub.context import InfrahubContext
|
|
@@ -16,3 +18,6 @@ class RequestProposedChangePipeline(InfrahubMessage):
|
|
|
16
18
|
default=CheckType.ALL, description="Can be used to restrict the pipeline to a specific type of job"
|
|
17
19
|
)
|
|
18
20
|
context: InfrahubContext = Field(..., description="The context of the task")
|
|
21
|
+
pipeline_id: uuid.UUID = Field(
|
|
22
|
+
default_factory=uuid.uuid4, description="The unique ID of the execution of this pipeline"
|
|
23
|
+
)
|
|
@@ -4,7 +4,6 @@ from prefect import Flow
|
|
|
4
4
|
from infrahub.message_bus import RPCErrorResponse, messages
|
|
5
5
|
from infrahub.message_bus.operations import (
|
|
6
6
|
check,
|
|
7
|
-
event,
|
|
8
7
|
finalize,
|
|
9
8
|
git,
|
|
10
9
|
refresh,
|
|
@@ -17,7 +16,6 @@ from infrahub.tasks.check import set_check_status
|
|
|
17
16
|
|
|
18
17
|
COMMAND_MAP = {
|
|
19
18
|
"check.generator.run": check.generator.run,
|
|
20
|
-
"event.branch.merge": event.branch.merge,
|
|
21
19
|
"finalize.validator.execution": finalize.validator.execution,
|
|
22
20
|
"git.file.get": git.file.get,
|
|
23
21
|
"git.repository.connectivity": git.repository.connectivity,
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from enum import IntFlag
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
4
5
|
|
|
5
6
|
from prefect import flow, task
|
|
6
7
|
from prefect.logging import get_run_logger
|
|
@@ -20,6 +21,14 @@ from infrahub.message_bus.types import (
|
|
|
20
21
|
ProposedChangeRepository,
|
|
21
22
|
ProposedChangeSubscriber,
|
|
22
23
|
)
|
|
24
|
+
from infrahub.proposed_change.branch_diff import (
|
|
25
|
+
get_diff_summary_cache,
|
|
26
|
+
get_modified_kinds,
|
|
27
|
+
get_modified_node_ids,
|
|
28
|
+
has_data_changes,
|
|
29
|
+
has_node_changes,
|
|
30
|
+
set_diff_summary_cache,
|
|
31
|
+
)
|
|
23
32
|
from infrahub.proposed_change.models import (
|
|
24
33
|
RequestArtifactDefinitionCheck,
|
|
25
34
|
RequestProposedChangeDataIntegrity,
|
|
@@ -40,6 +49,9 @@ from infrahub.workflows.catalogue import (
|
|
|
40
49
|
)
|
|
41
50
|
from infrahub.workflows.utils import add_tags
|
|
42
51
|
|
|
52
|
+
if TYPE_CHECKING:
|
|
53
|
+
from infrahub_sdk.diff import NodeDiff
|
|
54
|
+
|
|
43
55
|
|
|
44
56
|
class DefinitionSelect(IntFlag):
|
|
45
57
|
NONE = 0
|
|
@@ -101,8 +113,11 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf
|
|
|
101
113
|
await diff_coordinator.update_branch_diff(base_branch=destination_branch, diff_branch=source_branch)
|
|
102
114
|
|
|
103
115
|
diff_summary = await service.client.get_diff_summary(branch=message.source_branch)
|
|
104
|
-
|
|
105
|
-
|
|
116
|
+
await set_diff_summary_cache(pipeline_id=message.pipeline_id, diff_summary=diff_summary, cache=service.cache)
|
|
117
|
+
branch_diff = ProposedChangeBranchDiff(repositories=repositories, pipeline_id=message.pipeline_id)
|
|
118
|
+
await _populate_subscribers(
|
|
119
|
+
branch_diff=branch_diff, diff_summary=diff_summary, service=service, branch=message.source_branch
|
|
120
|
+
)
|
|
106
121
|
|
|
107
122
|
if message.check_type is CheckType.ARTIFACT:
|
|
108
123
|
events.append(
|
|
@@ -132,8 +147,8 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf
|
|
|
132
147
|
parameters={"model": model_proposed_change_run_generator},
|
|
133
148
|
)
|
|
134
149
|
|
|
135
|
-
if message.check_type in [CheckType.ALL, CheckType.DATA] and
|
|
136
|
-
branch=message.source_branch
|
|
150
|
+
if message.check_type in [CheckType.ALL, CheckType.DATA] and has_node_changes(
|
|
151
|
+
diff_summary=diff_summary, branch=message.source_branch
|
|
137
152
|
):
|
|
138
153
|
model_proposed_change_data_integrity = RequestProposedChangeDataIntegrity(
|
|
139
154
|
proposed_change=message.proposed_change,
|
|
@@ -162,8 +177,8 @@ async def pipeline(message: messages.RequestProposedChangePipeline, service: Inf
|
|
|
162
177
|
parameters={"model": model_proposed_change_repo_checks},
|
|
163
178
|
)
|
|
164
179
|
|
|
165
|
-
if message.check_type in [CheckType.ALL, CheckType.SCHEMA] and
|
|
166
|
-
branch=message.source_branch
|
|
180
|
+
if message.check_type in [CheckType.ALL, CheckType.SCHEMA] and has_data_changes(
|
|
181
|
+
diff_summary=diff_summary, branch=message.source_branch
|
|
167
182
|
):
|
|
168
183
|
await service.workflow.submit_workflow(
|
|
169
184
|
workflow=REQUEST_PROPOSED_CHANGE_SCHEMA_INTEGRITY,
|
|
@@ -215,6 +230,9 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa
|
|
|
215
230
|
definitions=definition_information[InfrahubKind.ARTIFACTDEFINITION]["edges"]
|
|
216
231
|
)
|
|
217
232
|
|
|
233
|
+
diff_summary = await get_diff_summary_cache(pipeline_id=message.branch_diff.pipeline_id, cache=service.cache)
|
|
234
|
+
modified_kinds = get_modified_kinds(diff_summary=diff_summary, branch=message.source_branch)
|
|
235
|
+
|
|
218
236
|
for artifact_definition in artifact_definitions:
|
|
219
237
|
# Request artifact definition checks if the source branch that is managed in combination
|
|
220
238
|
# to the Git repository containing modifications which could indicate changes to the transforms
|
|
@@ -229,7 +247,7 @@ async def refresh_artifacts(message: messages.RequestProposedChangeRefreshArtifa
|
|
|
229
247
|
condition=message.source_branch_sync_with_git and message.branch_diff.has_file_modifications,
|
|
230
248
|
)
|
|
231
249
|
|
|
232
|
-
for changed_model in
|
|
250
|
+
for changed_model in modified_kinds:
|
|
233
251
|
condition = False
|
|
234
252
|
if (changed_model in artifact_definition.query_models) or (
|
|
235
253
|
changed_model.startswith("Profile")
|
|
@@ -589,11 +607,13 @@ async def _gather_repository_repository_diffs(
|
|
|
589
607
|
repo.files_changed = files_changed
|
|
590
608
|
|
|
591
609
|
|
|
592
|
-
async def _populate_subscribers(
|
|
610
|
+
async def _populate_subscribers(
|
|
611
|
+
branch_diff: ProposedChangeBranchDiff, diff_summary: list[NodeDiff], service: InfrahubServices, branch: str
|
|
612
|
+
) -> None:
|
|
593
613
|
result = await service.client.execute_graphql(
|
|
594
614
|
query=GATHER_GRAPHQL_QUERY_SUBSCRIBERS,
|
|
595
615
|
branch_name=branch,
|
|
596
|
-
variables={"members":
|
|
616
|
+
variables={"members": get_modified_node_ids(diff_summary=diff_summary, branch=branch)},
|
|
597
617
|
)
|
|
598
618
|
|
|
599
619
|
for group in result[InfrahubKind.GRAPHQLQUERYGROUP]["edges"]:
|
infrahub/message_bus/types.py
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import re
|
|
4
|
+
import uuid # noqa: TC003
|
|
4
5
|
from enum import Enum
|
|
5
6
|
|
|
6
|
-
from infrahub_sdk.diff import NodeDiff # noqa: TC002
|
|
7
7
|
from pydantic import BaseModel, Field
|
|
8
8
|
|
|
9
9
|
from infrahub.core.constants import InfrahubKind, RepositoryInternalStatus
|
|
@@ -109,9 +109,9 @@ class ProposedChangeArtifactDefinition(BaseModel):
|
|
|
109
109
|
|
|
110
110
|
|
|
111
111
|
class ProposedChangeBranchDiff(BaseModel):
|
|
112
|
-
diff_summary: list[NodeDiff] = Field(default_factory=list, description="The DiffSummary between two branches")
|
|
113
112
|
repositories: list[ProposedChangeRepository] = Field(default_factory=list)
|
|
114
113
|
subscribers: list[ProposedChangeSubscriber] = Field(default_factory=list)
|
|
114
|
+
pipeline_id: uuid.UUID = Field(..., description="The unique ID of the execution of this pipeline")
|
|
115
115
|
|
|
116
116
|
def get_repository(self, repository_id: str) -> ProposedChangeRepository:
|
|
117
117
|
for repository in self.repositories:
|
|
@@ -122,39 +122,7 @@ class ProposedChangeBranchDiff(BaseModel):
|
|
|
122
122
|
def get_subscribers_ids(self, kind: str) -> list[str]:
|
|
123
123
|
return [subscriber.subscriber_id for subscriber in self.subscribers if subscriber.kind == kind]
|
|
124
124
|
|
|
125
|
-
def has_node_changes(self, branch: str) -> bool:
|
|
126
|
-
"""Indicates if there is at least one node object that has been modified in the branch"""
|
|
127
|
-
return bool(
|
|
128
|
-
[
|
|
129
|
-
entry
|
|
130
|
-
for entry in self.diff_summary
|
|
131
|
-
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
132
|
-
]
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
def has_data_changes(self, branch: str) -> bool:
|
|
136
|
-
"""Indicates if there are node or schema changes within the branch."""
|
|
137
|
-
return bool([entry for entry in self.diff_summary if entry["branch"] == branch])
|
|
138
|
-
|
|
139
125
|
@property
|
|
140
126
|
def has_file_modifications(self) -> bool:
|
|
141
127
|
"""Indicates modifications to any of the files in the Git repositories."""
|
|
142
128
|
return any(repository.has_modifications for repository in self.repositories)
|
|
143
|
-
|
|
144
|
-
def modified_nodes(self, branch: str) -> list[str]:
|
|
145
|
-
"""Return a list of non schema nodes that have been modified on the branch"""
|
|
146
|
-
return [
|
|
147
|
-
entry["id"]
|
|
148
|
-
for entry in self.diff_summary
|
|
149
|
-
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
150
|
-
]
|
|
151
|
-
|
|
152
|
-
def modified_kinds(self, branch: str) -> list[str]:
|
|
153
|
-
"""Return a list of non schema kinds that have been modified on the branch"""
|
|
154
|
-
return list(
|
|
155
|
-
{
|
|
156
|
-
entry["kind"]
|
|
157
|
-
for entry in self.diff_summary
|
|
158
|
-
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
159
|
-
}
|
|
160
|
-
)
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import re
|
|
5
|
+
from typing import TYPE_CHECKING, cast
|
|
6
|
+
|
|
7
|
+
from infrahub.exceptions import ResourceNotFoundError
|
|
8
|
+
from infrahub.message_bus.types import KVTTL
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
|
|
13
|
+
from infrahub_sdk.diff import NodeDiff
|
|
14
|
+
|
|
15
|
+
from infrahub.services.adapters.cache import InfrahubCache
|
|
16
|
+
|
|
17
|
+
SCHEMA_CHANGE = re.compile(r"^Schema[A-Z]")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def has_data_changes(diff_summary: list[NodeDiff], branch: str) -> bool:
|
|
21
|
+
"""Indicates if there are node or schema changes within the branch."""
|
|
22
|
+
return any(entry["branch"] == branch for entry in diff_summary)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def has_node_changes(diff_summary: list[NodeDiff], branch: str) -> bool:
|
|
26
|
+
"""Indicates if there is at least one node object that has been modified in the branch"""
|
|
27
|
+
return any(entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"]) for entry in diff_summary)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def get_modified_kinds(diff_summary: list[NodeDiff], branch: str) -> list[str]:
|
|
31
|
+
"""Return a list of non schema kinds that have been modified on the branch"""
|
|
32
|
+
return list(
|
|
33
|
+
{
|
|
34
|
+
entry["kind"]
|
|
35
|
+
for entry in diff_summary
|
|
36
|
+
if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
37
|
+
}
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def get_modified_node_ids(diff_summary: list[NodeDiff], branch: str) -> list[str]:
|
|
42
|
+
"""Return a list of non schema nodes that have been modified on the branch"""
|
|
43
|
+
return [
|
|
44
|
+
entry["id"] for entry in diff_summary if entry["branch"] == branch and not SCHEMA_CHANGE.match(entry["kind"])
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
async def set_diff_summary_cache(pipeline_id: UUID, diff_summary: list[NodeDiff], cache: InfrahubCache) -> None:
|
|
49
|
+
serialized = json.dumps(diff_summary)
|
|
50
|
+
await cache.set(
|
|
51
|
+
key=f"proposed_change:pipeline:pipeline_id:{pipeline_id}:diff_summary",
|
|
52
|
+
value=serialized,
|
|
53
|
+
expires=KVTTL.TWO_HOURS,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
async def get_diff_summary_cache(pipeline_id: UUID, cache: InfrahubCache) -> list[NodeDiff]:
|
|
58
|
+
summary_payload = await cache.get(
|
|
59
|
+
key=f"proposed_change:pipeline:pipeline_id:{pipeline_id}:diff_summary",
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
if not summary_payload:
|
|
63
|
+
raise ResourceNotFoundError(message=f"Diff summary for pipeline {pipeline_id} was not found in the cache")
|
|
64
|
+
|
|
65
|
+
return cast(list["NodeDiff"], json.loads(summary_payload))
|
|
@@ -61,7 +61,10 @@ from infrahub.workflows.catalogue import (
|
|
|
61
61
|
)
|
|
62
62
|
from infrahub.workflows.utils import add_tags
|
|
63
63
|
|
|
64
|
+
from .branch_diff import get_diff_summary_cache, get_modified_kinds
|
|
65
|
+
|
|
64
66
|
if TYPE_CHECKING:
|
|
67
|
+
from infrahub_sdk.diff import NodeDiff
|
|
65
68
|
from infrahub_sdk.node import InfrahubNode
|
|
66
69
|
|
|
67
70
|
from infrahub.core.models import SchemaUpdateConstraintInfo
|
|
@@ -253,6 +256,9 @@ async def run_generators(
|
|
|
253
256
|
for generator in generators
|
|
254
257
|
]
|
|
255
258
|
|
|
259
|
+
diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id, cache=service.cache)
|
|
260
|
+
modified_kinds = get_modified_kinds(diff_summary=diff_summary, branch=model.source_branch)
|
|
261
|
+
|
|
256
262
|
for generator_definition in generator_definitions:
|
|
257
263
|
# Request generator definitions if the source branch that is managed in combination
|
|
258
264
|
# to the Git repository containing modifications which could indicate changes to the transforms
|
|
@@ -267,7 +273,7 @@ async def run_generators(
|
|
|
267
273
|
condition=model.source_branch_sync_with_git and model.branch_diff.has_file_modifications,
|
|
268
274
|
)
|
|
269
275
|
|
|
270
|
-
for changed_model in
|
|
276
|
+
for changed_model in modified_kinds:
|
|
271
277
|
select = select.add_flag(
|
|
272
278
|
current=select,
|
|
273
279
|
flag=DefinitionSelect.MODIFIED_KINDS,
|
|
@@ -338,8 +344,9 @@ async def run_proposed_change_schema_integrity_check(
|
|
|
338
344
|
schema_diff = dest_schema.diff(other=candidate_schema)
|
|
339
345
|
validation_result = dest_schema.validate_update(other=candidate_schema, diff=schema_diff)
|
|
340
346
|
|
|
347
|
+
diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id, cache=service.cache)
|
|
341
348
|
constraints_from_data_diff = await _get_proposed_change_schema_integrity_constraints(
|
|
342
|
-
|
|
349
|
+
schema=candidate_schema, diff_summary=diff_summary
|
|
343
350
|
)
|
|
344
351
|
constraints_from_schema_diff = validation_result.constraints
|
|
345
352
|
constraints = set(constraints_from_data_diff + constraints_from_schema_diff)
|
|
@@ -390,10 +397,11 @@ async def run_proposed_change_schema_integrity_check(
|
|
|
390
397
|
|
|
391
398
|
|
|
392
399
|
async def _get_proposed_change_schema_integrity_constraints(
|
|
393
|
-
|
|
400
|
+
schema: SchemaBranch, diff_summary: list[NodeDiff]
|
|
394
401
|
) -> list[SchemaUpdateConstraintInfo]:
|
|
395
402
|
node_diff_field_summary_map: dict[str, NodeDiffFieldSummary] = {}
|
|
396
|
-
|
|
403
|
+
|
|
404
|
+
for node_diff in diff_summary:
|
|
397
405
|
node_kind = node_diff["kind"]
|
|
398
406
|
if node_kind not in node_diff_field_summary_map:
|
|
399
407
|
node_diff_field_summary_map[node_kind] = NodeDiffFieldSummary(kind=node_kind)
|
|
@@ -66,7 +66,7 @@ class WorkflowWorkerExecution(InfrahubWorkflow):
|
|
|
66
66
|
if response.state.type == StateType.CRASHED:
|
|
67
67
|
raise RuntimeError(response.state.message)
|
|
68
68
|
|
|
69
|
-
return await response.state.result(raise_on_failure=True
|
|
69
|
+
return await response.state.result(raise_on_failure=True)
|
|
70
70
|
|
|
71
71
|
async def submit_workflow(
|
|
72
72
|
self,
|
infrahub/tasks/registry.py
CHANGED
|
@@ -61,5 +61,8 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
|
|
|
61
61
|
)
|
|
62
62
|
|
|
63
63
|
purged_branches = await registry.purge_inactive_branches(db=db, active_branches=branches)
|
|
64
|
-
|
|
64
|
+
purged_branches.update(
|
|
65
|
+
GraphQLSchemaManager.purge_inactive(active_branches=[branch.name for branch in branches])
|
|
66
|
+
)
|
|
67
|
+
for branch_name in sorted(purged_branches):
|
|
65
68
|
log.info(f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY)
|
infrahub/workflows/catalogue.py
CHANGED
|
@@ -190,6 +190,15 @@ BRANCH_MERGE = WorkflowDefinition(
|
|
|
190
190
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
191
191
|
)
|
|
192
192
|
|
|
193
|
+
BRANCH_MERGE_POST_PROCESS = WorkflowDefinition(
|
|
194
|
+
name="branch-merge-post-process",
|
|
195
|
+
type=WorkflowType.CORE,
|
|
196
|
+
module="infrahub.core.branch.tasks",
|
|
197
|
+
function="post_process_branch_merge",
|
|
198
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
|
|
193
202
|
BRANCH_MERGE_MUTATION = WorkflowDefinition(
|
|
194
203
|
name="merge-branch-mutation",
|
|
195
204
|
type=WorkflowType.CORE,
|
|
@@ -431,6 +440,7 @@ workflows = [
|
|
|
431
440
|
BRANCH_DELETE,
|
|
432
441
|
BRANCH_MERGE,
|
|
433
442
|
BRANCH_MERGE_MUTATION,
|
|
443
|
+
BRANCH_MERGE_POST_PROCESS,
|
|
434
444
|
BRANCH_REBASE,
|
|
435
445
|
BRANCH_VALIDATE,
|
|
436
446
|
COMPUTED_ATTRIBUTE_PROCESS_JINJA2,
|