infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +8 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/cli/db.py +24 -0
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/core/attribute.py +3 -3
- infrahub/core/branch/tasks.py +2 -1
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +6 -3
- infrahub/core/migrations/graph/__init__.py +4 -0
- infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
- infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
- infrahub/core/migrations/schema/node_attribute_add.py +5 -2
- infrahub/core/migrations/shared.py +5 -6
- infrahub/core/node/__init__.py +142 -40
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/standard.py +1 -1
- infrahub/core/protocols.py +7 -1
- infrahub/core/query/node.py +14 -1
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +1 -1
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/internal.py +14 -1
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/schema_branch.py +134 -0
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +186 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +7 -0
- infrahub/generators/tasks.py +31 -15
- infrahub/git/integrator.py +22 -14
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/mutations/display_label.py +111 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +118 -0
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +4 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +185 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +15 -4
- infrahub/middleware.py +26 -1
- infrahub/proposed_change/tasks.py +10 -1
- infrahub/server.py +16 -3
- infrahub/services/__init__.py +8 -5
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/tasks.py +3 -0
- infrahub/workflows/catalogue.py +72 -0
- infrahub/workflows/initialization.py +16 -0
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/ctl/cli_commands.py +2 -0
- infrahub_sdk/ctl/generator.py +4 -0
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/schema.py +6 -2
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/protocols.py +14 -0
- infrahub_sdk/schema/__init__.py +38 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/object.py +84 -10
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +5 -4
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +104 -79
- infrahub_testcontainers/container.py +1 -1
- infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
infrahub/graphql/schema.py
CHANGED
|
@@ -19,7 +19,9 @@ from .mutations.computed_attribute import RecomputeComputedAttribute, UpdateComp
|
|
|
19
19
|
from .mutations.convert_object_type import ConvertObjectType
|
|
20
20
|
from .mutations.diff import DiffUpdateMutation
|
|
21
21
|
from .mutations.diff_conflict import ResolveDiffConflict
|
|
22
|
+
from .mutations.display_label import UpdateDisplayLabel
|
|
22
23
|
from .mutations.generator import GeneratorDefinitionRequestRun
|
|
24
|
+
from .mutations.hfid import UpdateHFID
|
|
23
25
|
from .mutations.profile import InfrahubProfilesRefresh
|
|
24
26
|
from .mutations.proposed_change import (
|
|
25
27
|
ProposedChangeCheckForApprovalRevoke,
|
|
@@ -114,6 +116,8 @@ class InfrahubBaseMutation(ObjectType):
|
|
|
114
116
|
InfrahubRepositoryProcess = ProcessRepository.Field()
|
|
115
117
|
InfrahubRepositoryConnectivity = ValidateRepositoryConnectivity.Field()
|
|
116
118
|
InfrahubUpdateComputedAttribute = UpdateComputedAttribute.Field()
|
|
119
|
+
InfrahubUpdateDisplayLabel = UpdateDisplayLabel.Field()
|
|
120
|
+
InfrahubUpdateHFID = UpdateHFID.Field()
|
|
117
121
|
InfrahubRecomputeComputedAttribute = RecomputeComputedAttribute.Field()
|
|
118
122
|
|
|
119
123
|
RelationshipAdd = RelationshipAdd.Field()
|
infrahub/groups/tasks.py
CHANGED
|
@@ -20,7 +20,7 @@ async def update_graphql_query_group(model: RequestGraphQLQueryGroupUpdate) -> N
|
|
|
20
20
|
if len(model.subscribers) == 1:
|
|
21
21
|
related_nodes.append(model.subscribers[0])
|
|
22
22
|
|
|
23
|
-
await add_tags(branches=[model.branch], nodes=related_nodes)
|
|
23
|
+
await add_tags(branches=[model.branch], nodes=related_nodes, namespace=False)
|
|
24
24
|
|
|
25
25
|
params_hash = dict_hash(model.params)
|
|
26
26
|
group_name = f"{model.query_name}__{params_hash}"
|
|
File without changes
|
infrahub/hfid/gather.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
|
|
5
|
+
from prefect import task
|
|
6
|
+
from prefect.cache_policies import NONE
|
|
7
|
+
from prefect.logging import get_run_logger
|
|
8
|
+
|
|
9
|
+
from infrahub.core.registry import registry
|
|
10
|
+
from infrahub.database import InfrahubDatabase # noqa: TC001 needed for prefect flow
|
|
11
|
+
|
|
12
|
+
from .models import HFIDTriggerDefinition
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class BranchScope:
|
|
17
|
+
name: str
|
|
18
|
+
out_of_scope: list[str] = field(default_factory=list)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@task(
|
|
22
|
+
name="gather-trigger-hfid",
|
|
23
|
+
cache_policy=NONE,
|
|
24
|
+
)
|
|
25
|
+
async def gather_trigger_hfid(
|
|
26
|
+
db: InfrahubDatabase | None = None, # noqa: ARG001 Needed to have a common function signature for gathering functions
|
|
27
|
+
) -> list[HFIDTriggerDefinition]:
|
|
28
|
+
log = get_run_logger()
|
|
29
|
+
|
|
30
|
+
# Build a list of all branches to process based on which branch is different from main
|
|
31
|
+
branches_with_diff_from_main = registry.get_altered_schema_branches()
|
|
32
|
+
branches_to_process: list[BranchScope] = [BranchScope(name=branch) for branch in branches_with_diff_from_main]
|
|
33
|
+
branches_to_process.append(BranchScope(name=registry.default_branch, out_of_scope=branches_with_diff_from_main))
|
|
34
|
+
|
|
35
|
+
triggers: list[HFIDTriggerDefinition] = []
|
|
36
|
+
|
|
37
|
+
for branch in branches_to_process:
|
|
38
|
+
schema_branch = registry.schema.get_schema_branch(name=branch.name)
|
|
39
|
+
branch_triggers = HFIDTriggerDefinition.from_schema_hfids(
|
|
40
|
+
branch=branch.name,
|
|
41
|
+
hfids=schema_branch.hfids,
|
|
42
|
+
branches_out_of_scope=branch.out_of_scope,
|
|
43
|
+
)
|
|
44
|
+
log.info(f"Generating {len(branch_triggers)} HFID trigger for {branch.name} (except {branch.out_of_scope})")
|
|
45
|
+
|
|
46
|
+
triggers.extend(branch_triggers)
|
|
47
|
+
|
|
48
|
+
return triggers
|
infrahub/hfid/models.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Self
|
|
5
|
+
|
|
6
|
+
from infrahub_sdk.graphql import Query
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from infrahub.core.constants import RelationshipCardinality
|
|
10
|
+
from infrahub.core.registry import registry
|
|
11
|
+
from infrahub.core.schema import NodeSchema # noqa: TC001
|
|
12
|
+
from infrahub.events import NodeUpdatedEvent
|
|
13
|
+
from infrahub.trigger.constants import NAME_SEPARATOR
|
|
14
|
+
from infrahub.trigger.models import (
|
|
15
|
+
EventTrigger,
|
|
16
|
+
ExecuteWorkflow,
|
|
17
|
+
TriggerBranchDefinition,
|
|
18
|
+
TriggerType,
|
|
19
|
+
)
|
|
20
|
+
from infrahub.workflows.catalogue import HFID_PROCESS
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from infrahub.core.schema.schema_branch_hfid import HFIDs, RelationshipTriggers
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class AttributeTarget:
|
|
28
|
+
hash: str
|
|
29
|
+
fields: set[str]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class HFIDTriggerDefinition(TriggerBranchDefinition):
|
|
33
|
+
type: TriggerType = TriggerType.HUMAN_FRIENDLY_ID
|
|
34
|
+
hfid_hash: str
|
|
35
|
+
target_kind: str | None = Field(default=None)
|
|
36
|
+
|
|
37
|
+
def get_description(self) -> str:
|
|
38
|
+
return f"{super().get_description()} | hash:{self.hfid_hash}"
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_schema_hfids(
|
|
42
|
+
cls,
|
|
43
|
+
branch: str,
|
|
44
|
+
hfids: HFIDs,
|
|
45
|
+
branches_out_of_scope: list[str] | None = None,
|
|
46
|
+
) -> list[HFIDTriggerDefinition]:
|
|
47
|
+
"""
|
|
48
|
+
This function is used to create a trigger definition for a display labels of type Jinja2.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
definitions: list[HFIDTriggerDefinition] = []
|
|
52
|
+
|
|
53
|
+
for node_kind, hfid_definition in hfids.get_template_nodes().items():
|
|
54
|
+
definitions.append(
|
|
55
|
+
cls.new(
|
|
56
|
+
branch=branch,
|
|
57
|
+
node_kind=node_kind,
|
|
58
|
+
target_kind=node_kind,
|
|
59
|
+
fields=[
|
|
60
|
+
"_trigger_placeholder"
|
|
61
|
+
], # Triggers for the nodes themselves are only used to determine if all nodes should be regenerated
|
|
62
|
+
hfid_hash=hfid_definition.get_hash(),
|
|
63
|
+
branches_out_of_scope=branches_out_of_scope,
|
|
64
|
+
)
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
for related_kind, relationship_trigger in hfids.get_related_trigger_nodes().items():
|
|
68
|
+
definitions.extend(
|
|
69
|
+
cls.from_related_node(
|
|
70
|
+
branch=branch,
|
|
71
|
+
related_kind=related_kind,
|
|
72
|
+
relationship_trigger=relationship_trigger,
|
|
73
|
+
hfids=hfids,
|
|
74
|
+
branches_out_of_scope=branches_out_of_scope,
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
return definitions
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def from_related_node(
|
|
82
|
+
cls,
|
|
83
|
+
branch: str,
|
|
84
|
+
related_kind: str,
|
|
85
|
+
relationship_trigger: RelationshipTriggers,
|
|
86
|
+
hfids: HFIDs,
|
|
87
|
+
branches_out_of_scope: list[str] | None = None,
|
|
88
|
+
) -> list[HFIDTriggerDefinition]:
|
|
89
|
+
targets_by_attribute: dict[str, AttributeTarget] = {}
|
|
90
|
+
definitions: list[HFIDTriggerDefinition] = []
|
|
91
|
+
for attribute, relationship_identifiers in relationship_trigger.attributes.items():
|
|
92
|
+
for relationship_identifier in relationship_identifiers:
|
|
93
|
+
actual_node = hfids.get_node_definition(kind=relationship_identifier.kind)
|
|
94
|
+
if relationship_identifier.kind not in targets_by_attribute:
|
|
95
|
+
targets_by_attribute[relationship_identifier.kind] = AttributeTarget(
|
|
96
|
+
actual_node.get_hash(), fields=set()
|
|
97
|
+
)
|
|
98
|
+
targets_by_attribute[relationship_identifier.kind].fields.add(attribute)
|
|
99
|
+
|
|
100
|
+
for target_kind, attribute_target in targets_by_attribute.items():
|
|
101
|
+
definitions.append(
|
|
102
|
+
cls.new(
|
|
103
|
+
branch=branch,
|
|
104
|
+
node_kind=related_kind,
|
|
105
|
+
target_kind=target_kind,
|
|
106
|
+
fields=sorted(attribute_target.fields),
|
|
107
|
+
hfid_hash=attribute_target.hash,
|
|
108
|
+
branches_out_of_scope=branches_out_of_scope,
|
|
109
|
+
)
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
return definitions
|
|
113
|
+
|
|
114
|
+
@classmethod
|
|
115
|
+
def new(
|
|
116
|
+
cls,
|
|
117
|
+
branch: str,
|
|
118
|
+
node_kind: str,
|
|
119
|
+
target_kind: str,
|
|
120
|
+
hfid_hash: str,
|
|
121
|
+
fields: list[str],
|
|
122
|
+
branches_out_of_scope: list[str] | None = None,
|
|
123
|
+
) -> Self:
|
|
124
|
+
event_trigger = EventTrigger()
|
|
125
|
+
event_trigger.events.add(NodeUpdatedEvent.event_name)
|
|
126
|
+
event_trigger.match = {"infrahub.node.kind": node_kind}
|
|
127
|
+
if branches_out_of_scope:
|
|
128
|
+
event_trigger.match["infrahub.branch.name"] = [f"!{branch}" for branch in branches_out_of_scope]
|
|
129
|
+
elif not branches_out_of_scope and branch != registry.default_branch:
|
|
130
|
+
event_trigger.match["infrahub.branch.name"] = branch
|
|
131
|
+
|
|
132
|
+
event_trigger.match_related = {
|
|
133
|
+
"prefect.resource.role": ["infrahub.node.attribute_update", "infrahub.node.relationship_update"],
|
|
134
|
+
"infrahub.field.name": fields,
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
workflow = ExecuteWorkflow(
|
|
138
|
+
workflow=HFID_PROCESS,
|
|
139
|
+
parameters={
|
|
140
|
+
"branch_name": "{{ event.resource['infrahub.branch.name'] }}",
|
|
141
|
+
"node_kind": node_kind,
|
|
142
|
+
"object_id": "{{ event.resource['infrahub.node.id'] }}",
|
|
143
|
+
"target_kind": target_kind,
|
|
144
|
+
"context": {
|
|
145
|
+
"__prefect_kind": "json",
|
|
146
|
+
"value": {
|
|
147
|
+
"__prefect_kind": "jinja",
|
|
148
|
+
"template": "{{ event.payload['context'] | tojson }}",
|
|
149
|
+
},
|
|
150
|
+
},
|
|
151
|
+
},
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
trigger_definition_target_kind = target_kind if target_kind == node_kind else None
|
|
155
|
+
|
|
156
|
+
return cls(
|
|
157
|
+
name=f"{target_kind}{NAME_SEPARATOR}by{NAME_SEPARATOR}{node_kind}",
|
|
158
|
+
hfid_hash=hfid_hash,
|
|
159
|
+
branch=branch,
|
|
160
|
+
trigger=event_trigger,
|
|
161
|
+
actions=[workflow],
|
|
162
|
+
target_kind=trigger_definition_target_kind,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class HFIDGraphQLResponse(BaseModel):
|
|
167
|
+
node_id: str
|
|
168
|
+
hfid_value: list[str] | None = Field(default=None)
|
|
169
|
+
variables: dict[str, str] = Field(default_factory=dict)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class HFIDGraphQL(BaseModel):
|
|
173
|
+
filter_key: str
|
|
174
|
+
node_schema: NodeSchema = Field(..., description="The node kind where the computed attribute is defined")
|
|
175
|
+
variables: list[str] = Field(..., description="The list of variable names used within the computed attribute")
|
|
176
|
+
|
|
177
|
+
def render_graphql_query(self, filter_id: str) -> str:
|
|
178
|
+
query_fields = self.query_fields
|
|
179
|
+
query_fields["id"] = None
|
|
180
|
+
query_fields["hfid"] = None
|
|
181
|
+
query = Query(
|
|
182
|
+
name="HFIDFilter",
|
|
183
|
+
query={
|
|
184
|
+
self.node_schema.kind: {
|
|
185
|
+
"@filters": {self.filter_key: filter_id},
|
|
186
|
+
"edges": {"node": query_fields},
|
|
187
|
+
}
|
|
188
|
+
},
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
return query.render()
|
|
192
|
+
|
|
193
|
+
@property
|
|
194
|
+
def query_fields(self) -> dict[str, Any]:
|
|
195
|
+
output: dict[str, Any] = {}
|
|
196
|
+
for variable in self.variables:
|
|
197
|
+
field_name, remainder = variable.split("__", maxsplit=1)
|
|
198
|
+
if field_name in self.node_schema.attribute_names:
|
|
199
|
+
output[field_name] = {remainder: None}
|
|
200
|
+
elif field_name in self.node_schema.relationship_names:
|
|
201
|
+
related_attribute, related_value = remainder.split("__", maxsplit=1)
|
|
202
|
+
relationship = self.node_schema.get_relationship(name=field_name)
|
|
203
|
+
if relationship.cardinality == RelationshipCardinality.ONE:
|
|
204
|
+
if field_name not in output:
|
|
205
|
+
output[field_name] = {"node": {}}
|
|
206
|
+
output[field_name]["node"][related_attribute] = {related_value: None}
|
|
207
|
+
return output
|
|
208
|
+
|
|
209
|
+
def parse_response(self, response: dict[str, Any]) -> list[HFIDGraphQLResponse]:
|
|
210
|
+
rendered_response: list[HFIDGraphQLResponse] = []
|
|
211
|
+
if kind_payload := response.get(self.node_schema.kind):
|
|
212
|
+
edges = kind_payload.get("edges", [])
|
|
213
|
+
for node in edges:
|
|
214
|
+
if node_response := self.to_node_response(node_dict=node):
|
|
215
|
+
rendered_response.append(node_response)
|
|
216
|
+
return rendered_response
|
|
217
|
+
|
|
218
|
+
def to_node_response(self, node_dict: dict[str, Any]) -> HFIDGraphQLResponse | None:
|
|
219
|
+
if node := node_dict.get("node"):
|
|
220
|
+
node_id = node.get("id")
|
|
221
|
+
else:
|
|
222
|
+
return None
|
|
223
|
+
|
|
224
|
+
hfid = node.get("hfid")
|
|
225
|
+
response = HFIDGraphQLResponse(node_id=node_id, hfid_value=hfid)
|
|
226
|
+
for variable in self.variables:
|
|
227
|
+
field_name, remainder = variable.split("__", maxsplit=1)
|
|
228
|
+
# response.variables[variable] = None
|
|
229
|
+
if field_content := node.get(field_name):
|
|
230
|
+
if field_name in self.node_schema.attribute_names:
|
|
231
|
+
response.variables[variable] = str(field_content.get(remainder, ""))
|
|
232
|
+
elif field_name in self.node_schema.relationship_names:
|
|
233
|
+
relationship = self.node_schema.get_relationship(name=field_name)
|
|
234
|
+
if relationship.cardinality == RelationshipCardinality.ONE:
|
|
235
|
+
related_attribute, related_value = remainder.split("__", maxsplit=1)
|
|
236
|
+
node_content = field_content.get("node") or {}
|
|
237
|
+
related_attribute_content = node_content.get(related_attribute) or {}
|
|
238
|
+
response.variables[variable] = str(related_attribute_content.get(related_value, ""))
|
|
239
|
+
|
|
240
|
+
return response
|
infrahub/hfid/tasks.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import cast
|
|
4
|
+
|
|
5
|
+
from prefect import flow
|
|
6
|
+
from prefect.logging import get_run_logger
|
|
7
|
+
|
|
8
|
+
from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
|
|
9
|
+
from infrahub.core.registry import registry
|
|
10
|
+
from infrahub.events import BranchDeletedEvent
|
|
11
|
+
from infrahub.trigger.models import TriggerSetupReport, TriggerType
|
|
12
|
+
from infrahub.trigger.setup import setup_triggers_specific
|
|
13
|
+
from infrahub.workers.dependencies import get_client, get_component, get_database, get_workflow
|
|
14
|
+
from infrahub.workflows.catalogue import HFID_PROCESS, TRIGGER_UPDATE_HFID
|
|
15
|
+
from infrahub.workflows.utils import add_tags, wait_for_schema_to_converge
|
|
16
|
+
|
|
17
|
+
from .gather import gather_trigger_hfid
|
|
18
|
+
from .models import HFIDGraphQL, HFIDGraphQLResponse, HFIDTriggerDefinition
|
|
19
|
+
|
|
20
|
+
UPDATE_HFID = """
|
|
21
|
+
mutation UpdateHFID(
|
|
22
|
+
$id: String!,
|
|
23
|
+
$kind: String!,
|
|
24
|
+
$value: [String!]!
|
|
25
|
+
) {
|
|
26
|
+
InfrahubUpdateHFID(
|
|
27
|
+
data: {id: $id, value: $value, kind: $kind}
|
|
28
|
+
) {
|
|
29
|
+
ok
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@flow(
|
|
36
|
+
name="hfid-update-value",
|
|
37
|
+
flow_run_name="Update value for hfid on {node_kind}",
|
|
38
|
+
)
|
|
39
|
+
async def hfid_update_value(
|
|
40
|
+
branch_name: str,
|
|
41
|
+
obj: HFIDGraphQLResponse,
|
|
42
|
+
node_kind: str,
|
|
43
|
+
hfid_definition: list[str],
|
|
44
|
+
) -> None:
|
|
45
|
+
log = get_run_logger()
|
|
46
|
+
client = get_client()
|
|
47
|
+
|
|
48
|
+
await add_tags(branches=[branch_name], nodes=[obj.node_id], db_change=True)
|
|
49
|
+
|
|
50
|
+
rendered_hfid: list[str] = []
|
|
51
|
+
for hfid_component in hfid_definition:
|
|
52
|
+
if hfid_component in obj.variables:
|
|
53
|
+
rendered_hfid.append(obj.variables[hfid_component])
|
|
54
|
+
# value = await template.render(variables=obj.variables)
|
|
55
|
+
if rendered_hfid == obj.hfid_value:
|
|
56
|
+
log.debug(f"Ignoring to update {obj} with existing value on human_friendly_id={obj.hfid_value}")
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
await client.execute_graphql(
|
|
60
|
+
query=UPDATE_HFID,
|
|
61
|
+
variables={"id": obj.node_id, "kind": node_kind, "value": rendered_hfid},
|
|
62
|
+
branch_name=branch_name,
|
|
63
|
+
)
|
|
64
|
+
log.info(f"Updating {node_kind}.human_friendly_id='{rendered_hfid}' ({obj.node_id})")
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@flow(
|
|
68
|
+
name="hfid-process",
|
|
69
|
+
flow_run_name="Process human friendly ids for {target_kind}",
|
|
70
|
+
)
|
|
71
|
+
async def process_hfid(
|
|
72
|
+
branch_name: str,
|
|
73
|
+
node_kind: str,
|
|
74
|
+
object_id: str,
|
|
75
|
+
target_kind: str,
|
|
76
|
+
context: InfrahubContext, # noqa: ARG001
|
|
77
|
+
) -> None:
|
|
78
|
+
log = get_run_logger()
|
|
79
|
+
client = get_client()
|
|
80
|
+
|
|
81
|
+
await add_tags(branches=[branch_name])
|
|
82
|
+
|
|
83
|
+
target_schema = branch_name if branch_name in registry.get_altered_schema_branches() else registry.default_branch
|
|
84
|
+
schema_branch = registry.schema.get_schema_branch(name=target_schema)
|
|
85
|
+
node_schema = schema_branch.get_node(name=target_kind, duplicate=False)
|
|
86
|
+
|
|
87
|
+
if node_kind == target_kind:
|
|
88
|
+
hfid_definition = schema_branch.hfids.get_node_definition(kind=node_kind)
|
|
89
|
+
else:
|
|
90
|
+
hfid_definition = schema_branch.hfids.get_related_definition(related_kind=node_kind, target_kind=target_kind)
|
|
91
|
+
|
|
92
|
+
# jinja_template = Jinja2Template(template=display_label_template.template)
|
|
93
|
+
# variables = jinja_template.get_variables()
|
|
94
|
+
hfid_graphql = HFIDGraphQL(
|
|
95
|
+
node_schema=node_schema, variables=hfid_definition.hfid, filter_key=hfid_definition.filter_key
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
query = hfid_graphql.render_graphql_query(filter_id=object_id)
|
|
99
|
+
response = await client.execute_graphql(query=query, branch_name=branch_name)
|
|
100
|
+
update_candidates = hfid_graphql.parse_response(response=response)
|
|
101
|
+
|
|
102
|
+
if not update_candidates:
|
|
103
|
+
log.debug("No nodes found that requires updates")
|
|
104
|
+
return
|
|
105
|
+
|
|
106
|
+
batch = await client.create_batch()
|
|
107
|
+
for node in update_candidates:
|
|
108
|
+
batch.add(
|
|
109
|
+
task=hfid_update_value,
|
|
110
|
+
branch_name=branch_name,
|
|
111
|
+
obj=node,
|
|
112
|
+
node_kind=node_schema.kind,
|
|
113
|
+
hfid_definition=hfid_definition.hfid,
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
_ = [response async for _, response in batch.execute()]
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
@flow(name="hfid-setup", flow_run_name="Setup human friendly ids in task-manager")
|
|
120
|
+
async def hfid_setup(context: InfrahubContext, branch_name: str | None = None, event_name: str | None = None) -> None:
|
|
121
|
+
database = await get_database()
|
|
122
|
+
async with database.start_session() as db:
|
|
123
|
+
log = get_run_logger()
|
|
124
|
+
|
|
125
|
+
if branch_name:
|
|
126
|
+
await add_tags(branches=[branch_name])
|
|
127
|
+
component = await get_component()
|
|
128
|
+
await wait_for_schema_to_converge(branch_name=branch_name, component=component, db=db, log=log)
|
|
129
|
+
|
|
130
|
+
report: TriggerSetupReport = await setup_triggers_specific(
|
|
131
|
+
gatherer=gather_trigger_hfid, trigger_type=TriggerType.HUMAN_FRIENDLY_ID
|
|
132
|
+
) # type: ignore[misc]
|
|
133
|
+
|
|
134
|
+
# Configure all DisplayLabelTriggerDefinitions in Prefect
|
|
135
|
+
hfid_reports = [cast(HFIDTriggerDefinition, entry) for entry in report.updated + report.created]
|
|
136
|
+
direct_target_triggers = [hfid_report for hfid_report in hfid_reports if hfid_report.target_kind]
|
|
137
|
+
|
|
138
|
+
for display_report in direct_target_triggers:
|
|
139
|
+
if event_name != BranchDeletedEvent.event_name and display_report.branch == branch_name:
|
|
140
|
+
await get_workflow().submit_workflow(
|
|
141
|
+
workflow=TRIGGER_UPDATE_HFID,
|
|
142
|
+
context=context,
|
|
143
|
+
parameters={
|
|
144
|
+
"branch_name": display_report.branch,
|
|
145
|
+
"kind": display_report.target_kind,
|
|
146
|
+
},
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
log.info(f"{report.in_use_count} HFID automation configurations completed")
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
@flow(
|
|
153
|
+
name="trigger-update-hfid",
|
|
154
|
+
flow_run_name="Trigger updates for display labels for kind",
|
|
155
|
+
)
|
|
156
|
+
async def trigger_update_hfid(
|
|
157
|
+
branch_name: str,
|
|
158
|
+
kind: str,
|
|
159
|
+
context: InfrahubContext,
|
|
160
|
+
) -> None:
|
|
161
|
+
await add_tags(branches=[branch_name])
|
|
162
|
+
|
|
163
|
+
client = get_client()
|
|
164
|
+
|
|
165
|
+
# NOTE we only need the id of the nodes, this query will still query for the HFID
|
|
166
|
+
node_schema = registry.schema.get_node_schema(name=kind, branch=branch_name)
|
|
167
|
+
nodes = await client.all(
|
|
168
|
+
kind=kind,
|
|
169
|
+
branch=branch_name,
|
|
170
|
+
exclude=node_schema.attribute_names + node_schema.relationship_names,
|
|
171
|
+
populate_store=False,
|
|
172
|
+
)
|
|
173
|
+
|
|
174
|
+
for node in nodes:
|
|
175
|
+
await get_workflow().submit_workflow(
|
|
176
|
+
workflow=HFID_PROCESS,
|
|
177
|
+
context=context,
|
|
178
|
+
parameters={
|
|
179
|
+
"branch_name": branch_name,
|
|
180
|
+
"node_kind": kind,
|
|
181
|
+
"target_kind": kind,
|
|
182
|
+
"object_id": node.id,
|
|
183
|
+
"context": context,
|
|
184
|
+
},
|
|
185
|
+
)
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
from infrahub.events.branch_action import BranchDeletedEvent
|
|
2
|
+
from infrahub.events.schema_action import SchemaUpdatedEvent
|
|
3
|
+
from infrahub.trigger.models import BuiltinTriggerDefinition, EventTrigger, ExecuteWorkflow
|
|
4
|
+
from infrahub.workflows.catalogue import HFID_SETUP
|
|
5
|
+
|
|
6
|
+
TRIGGER_HFID_ALL_SCHEMA = BuiltinTriggerDefinition(
|
|
7
|
+
name="hfid-setup-all",
|
|
8
|
+
trigger=EventTrigger(events={SchemaUpdatedEvent.event_name, BranchDeletedEvent.event_name}),
|
|
9
|
+
actions=[
|
|
10
|
+
ExecuteWorkflow(
|
|
11
|
+
workflow=HFID_SETUP,
|
|
12
|
+
parameters={
|
|
13
|
+
"branch_name": "{{ event.resource['infrahub.branch.name'] }}",
|
|
14
|
+
"event_name": "{{ event.event }}",
|
|
15
|
+
"context": {
|
|
16
|
+
"__prefect_kind": "json",
|
|
17
|
+
"value": {"__prefect_kind": "jinja", "template": "{{ event.payload['context'] | tojson }}"},
|
|
18
|
+
},
|
|
19
|
+
},
|
|
20
|
+
),
|
|
21
|
+
],
|
|
22
|
+
)
|
infrahub/lock.py
CHANGED
|
@@ -98,10 +98,10 @@ class NATSLock:
|
|
|
98
98
|
while True:
|
|
99
99
|
if await self.do_acquire(token):
|
|
100
100
|
self.token = token
|
|
101
|
-
return
|
|
101
|
+
return
|
|
102
102
|
await sleep(0.1) # default Redis GlobalLock value
|
|
103
103
|
|
|
104
|
-
async def do_acquire(self, token: str) -> bool:
|
|
104
|
+
async def do_acquire(self, token: str) -> bool | None:
|
|
105
105
|
return await self.service.cache.set(key=self.name, value=token, not_exists=True)
|
|
106
106
|
|
|
107
107
|
async def release(self) -> None:
|
|
@@ -124,14 +124,14 @@ class InfrahubLock:
|
|
|
124
124
|
local: bool | None = None,
|
|
125
125
|
in_multi: bool = False,
|
|
126
126
|
) -> None:
|
|
127
|
-
self.use_local: bool = local
|
|
127
|
+
self.use_local: bool | None = local
|
|
128
128
|
self.local: LocalLock = None
|
|
129
129
|
self.remote: GlobalLock = None
|
|
130
130
|
self.name: str = name
|
|
131
131
|
self.connection: redis.Redis | None = connection
|
|
132
132
|
self.in_multi: bool = in_multi
|
|
133
133
|
self.lock_type: str = "multi" if self.in_multi else "individual"
|
|
134
|
-
self.
|
|
134
|
+
self._acquire_time: int | None = None
|
|
135
135
|
self.event = asyncio.Event()
|
|
136
136
|
|
|
137
137
|
if not self.connection or (self.use_local is None and name.startswith("local.")):
|
|
@@ -144,6 +144,17 @@ class InfrahubLock:
|
|
|
144
144
|
else:
|
|
145
145
|
self.remote = NATSLock(service=self.connection, name=f"{LOCK_PREFIX}.{self.name}")
|
|
146
146
|
|
|
147
|
+
@property
|
|
148
|
+
def acquire_time(self) -> int:
|
|
149
|
+
if self._acquire_time is not None:
|
|
150
|
+
return self._acquire_time
|
|
151
|
+
|
|
152
|
+
raise ValueError("The lock has not been initialized")
|
|
153
|
+
|
|
154
|
+
@acquire_time.setter
|
|
155
|
+
def acquire_time(self, value: int) -> None:
|
|
156
|
+
self._acquire_time = value
|
|
157
|
+
|
|
147
158
|
async def __aenter__(self):
|
|
148
159
|
await self.acquire()
|
|
149
160
|
|
infrahub/middleware.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from typing import Any
|
|
2
2
|
|
|
3
|
+
from fastapi.middleware.gzip import GZipMiddleware
|
|
3
4
|
from starlette.middleware.cors import CORSMiddleware
|
|
4
|
-
from starlette.types import ASGIApp
|
|
5
|
+
from starlette.types import ASGIApp, Receive, Scope, Send
|
|
5
6
|
|
|
6
7
|
from infrahub import config
|
|
7
8
|
|
|
@@ -15,3 +16,27 @@ class InfrahubCORSMiddleware(CORSMiddleware):
|
|
|
15
16
|
kwargs["allow_headers"] = config.SETTINGS.api.cors_allow_headers
|
|
16
17
|
|
|
17
18
|
super().__init__(app, *args, **kwargs)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ConditionalGZipMiddleware(GZipMiddleware):
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
app: ASGIApp,
|
|
25
|
+
*,
|
|
26
|
+
minimum_size: int = 500,
|
|
27
|
+
compresslevel: int = 9,
|
|
28
|
+
include_paths: tuple[str, ...] = (),
|
|
29
|
+
) -> None:
|
|
30
|
+
super().__init__(app, minimum_size=minimum_size, compresslevel=compresslevel)
|
|
31
|
+
self.include_paths = include_paths
|
|
32
|
+
|
|
33
|
+
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: # type: ignore[override]
|
|
34
|
+
if scope["type"] != "http":
|
|
35
|
+
await self.app(scope, receive, send)
|
|
36
|
+
return
|
|
37
|
+
|
|
38
|
+
path = scope.get("path", "")
|
|
39
|
+
if any(path.startswith(include) for include in self.include_paths):
|
|
40
|
+
await super().__call__(scope, receive, send)
|
|
41
|
+
else:
|
|
42
|
+
await self.app(scope, receive, send)
|
|
@@ -307,6 +307,7 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
|
|
|
307
307
|
populate_store=True,
|
|
308
308
|
branch=model.source_branch,
|
|
309
309
|
)
|
|
310
|
+
|
|
310
311
|
generator_definitions = [
|
|
311
312
|
ProposedChangeGeneratorDefinition(
|
|
312
313
|
definition_id=generator.id,
|
|
@@ -319,8 +320,11 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
|
|
|
319
320
|
parameters=generator.parameters.value,
|
|
320
321
|
group_id=generator.targets.peer.id,
|
|
321
322
|
convert_query_response=generator.convert_query_response.value,
|
|
323
|
+
execute_in_proposed_change=generator.execute_in_proposed_change.value,
|
|
324
|
+
execute_after_merge=generator.execute_after_merge.value,
|
|
322
325
|
)
|
|
323
326
|
for generator in generators
|
|
327
|
+
if generator.execute_in_proposed_change.value
|
|
324
328
|
]
|
|
325
329
|
|
|
326
330
|
diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id)
|
|
@@ -760,6 +764,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
|
|
|
760
764
|
query=model.generator_definition.query_name,
|
|
761
765
|
targets=model.generator_definition.group_id,
|
|
762
766
|
convert_query_response=model.generator_definition.convert_query_response,
|
|
767
|
+
execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
|
|
768
|
+
execute_after_merge=model.generator_definition.execute_after_merge,
|
|
763
769
|
)
|
|
764
770
|
|
|
765
771
|
commit_worktree = repository.get_commit_worktree(commit=model.commit)
|
|
@@ -786,6 +792,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
|
|
|
786
792
|
params=model.variables,
|
|
787
793
|
generator_instance=generator_instance.id,
|
|
788
794
|
convert_query_response=generator_definition.convert_query_response,
|
|
795
|
+
execute_after_merge=generator_definition.execute_after_merge,
|
|
796
|
+
execute_in_proposed_change=generator_definition.execute_in_proposed_change,
|
|
789
797
|
infrahub_node=InfrahubNode,
|
|
790
798
|
)
|
|
791
799
|
generator._init_client.request_context = context.to_request_context()
|
|
@@ -934,7 +942,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
|
|
|
934
942
|
requested_instances = 0
|
|
935
943
|
impacted_instances = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.GENERATORINSTANCE)
|
|
936
944
|
|
|
937
|
-
check_generator_run_models = []
|
|
945
|
+
check_generator_run_models: list[RunGeneratorAsCheckModel] = []
|
|
938
946
|
for relationship in group.members.peers:
|
|
939
947
|
member = relationship.peer
|
|
940
948
|
generator_instance = instance_by_member.get(member.id)
|
|
@@ -970,6 +978,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
|
|
|
970
978
|
context=context,
|
|
971
979
|
)
|
|
972
980
|
for check_generator_run_model in check_generator_run_models
|
|
981
|
+
if check_generator_run_model.generator_definition.execute_in_proposed_change
|
|
973
982
|
]
|
|
974
983
|
|
|
975
984
|
await run_checks_and_update_validator(
|