infrahub-server 1.4.12__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +27 -3
- infrahub/auth.py +5 -5
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +160 -157
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +56 -9
- infrahub/computed_attribute/tasks.py +19 -7
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +35 -24
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +9 -5
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +72 -10
- infrahub/core/changelog/models.py +2 -10
- infrahub/core/constants/__init__.py +4 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/calculator.py +2 -2
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/delete_query.py +9 -5
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/diff/query/merge.py +39 -23
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +7 -4
- infrahub/core/manager.py +3 -81
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +13 -10
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
- infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +26 -5
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +66 -19
- infrahub/core/models.py +2 -2
- infrahub/core/node/__init__.py +207 -54
- infrahub/core/node/create.py +53 -49
- infrahub/core/node/lock_utils.py +124 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +82 -15
- infrahub/core/query/diff.py +61 -16
- infrahub/core/query/ipam.py +16 -4
- infrahub/core/query/node.py +92 -212
- infrahub/core/query/relationship.py +44 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/relationship/model.py +69 -24
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/check.py +1 -1
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/core/transform.py +1 -1
- infrahub/core/schema/definitions/internal.py +12 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +3 -0
- infrahub/core/schema/node_schema.py +1 -0
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +295 -10
- infrahub/core/schema/schema_branch_display.py +135 -0
- infrahub/core/schema/schema_branch_hfid.py +120 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +42 -2
- infrahub/git/integrator.py +22 -14
- infrahub/git/tasks.py +52 -2
- infrahub/graphql/analyzer.py +9 -0
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +16 -6
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +213 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +16 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +44 -13
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +73 -41
- infrahub/graphql/mutations/main.py +61 -178
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +8 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +119 -42
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/message_bus/types.py +1 -0
- infrahub/patch/plan_writer.py +2 -2
- infrahub/permissions/constants.py +2 -0
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +98 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +67 -14
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +9 -1
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workers/dependencies.py +3 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +118 -3
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/models.py +17 -2
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/branch.py +17 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +376 -95
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/branch.py +3 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +20 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +40 -10
- infrahub_sdk/ctl/task.py +110 -0
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/node/relationship.py +1 -3
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +54 -6
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/task/models.py +6 -4
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +233 -176
- infrahub_testcontainers/container.py +114 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -0
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
from graphql.language.ast import (
|
|
6
|
+
DocumentNode,
|
|
7
|
+
EnumTypeDefinitionNode,
|
|
8
|
+
EnumValueDefinitionNode,
|
|
9
|
+
FieldDefinitionNode,
|
|
10
|
+
InputObjectTypeDefinitionNode,
|
|
11
|
+
InputValueDefinitionNode,
|
|
12
|
+
InterfaceTypeDefinitionNode,
|
|
13
|
+
NamedTypeNode,
|
|
14
|
+
ObjectTypeDefinitionNode,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from graphql import DefinitionNode
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _sort_arguments(args: tuple[InputValueDefinitionNode, ...] | None) -> list[InputValueDefinitionNode] | None:
|
|
22
|
+
"""Sort arguments (filters) of a field alphabetically by name.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
args: List of input value definition nodes to sort, or None.
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
Sorted list of input value definition nodes, or None if input was None.
|
|
29
|
+
"""
|
|
30
|
+
if not args:
|
|
31
|
+
return None
|
|
32
|
+
return sorted(args, key=lambda a: a.name.value)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _sort_fields(fields: tuple[FieldDefinitionNode, ...] | None) -> list[FieldDefinitionNode] | None:
|
|
36
|
+
"""Sort fields and their arguments alphabetically.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
fields: List of field definition nodes to sort, or None.
|
|
40
|
+
|
|
41
|
+
Returns:
|
|
42
|
+
Sorted list of field definition nodes with sorted arguments, or None if input was None.
|
|
43
|
+
"""
|
|
44
|
+
if not fields:
|
|
45
|
+
return None
|
|
46
|
+
sorted_fields = []
|
|
47
|
+
for field in sorted(fields, key=lambda fld: fld.name.value):
|
|
48
|
+
sorted_args = _sort_arguments(field.arguments)
|
|
49
|
+
sorted_fields.append(
|
|
50
|
+
FieldDefinitionNode(
|
|
51
|
+
name=field.name,
|
|
52
|
+
type=field.type,
|
|
53
|
+
arguments=sorted_args,
|
|
54
|
+
directives=field.directives,
|
|
55
|
+
description=field.description,
|
|
56
|
+
loc=field.loc,
|
|
57
|
+
)
|
|
58
|
+
)
|
|
59
|
+
return sorted_fields
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _sort_enum_values(values: tuple[EnumValueDefinitionNode, ...] | None) -> list[EnumValueDefinitionNode] | None:
|
|
63
|
+
"""Sort enum values alphabetically by name.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
values: List of enum value definition nodes to sort, or None.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Sorted list of enum value definition nodes, or None if input was None.
|
|
70
|
+
"""
|
|
71
|
+
if not values:
|
|
72
|
+
return None
|
|
73
|
+
return sorted(values, key=lambda v: v.name.value)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _sort_input_fields(fields: tuple[InputValueDefinitionNode, ...] | None) -> list[InputValueDefinitionNode] | None:
|
|
77
|
+
"""Sort input object fields alphabetically by name.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
fields: List of input value definition nodes to sort, or None.
|
|
81
|
+
|
|
82
|
+
Returns:
|
|
83
|
+
Sorted list of input value definition nodes, or None if input was None.
|
|
84
|
+
"""
|
|
85
|
+
if not fields:
|
|
86
|
+
return None
|
|
87
|
+
return sorted(fields, key=lambda f: f.name.value)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def _sort_interfaces(interfaces: tuple[NamedTypeNode, ...] | None) -> list[NamedTypeNode] | None:
|
|
91
|
+
"""Sort interface implementations alphabetically by name.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
interfaces: Tuple of named type nodes representing interfaces, or None.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Sorted list of named type nodes, or None if input was None.
|
|
98
|
+
"""
|
|
99
|
+
if not interfaces:
|
|
100
|
+
return None
|
|
101
|
+
return sorted(interfaces, key=lambda i: i.name.value)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def sort_schema_ast(document: DocumentNode) -> DocumentNode:
|
|
105
|
+
"""Return a new DocumentNode with all definitions, fields, and arguments sorted alphabetically.
|
|
106
|
+
|
|
107
|
+
This function recursively sorts all GraphQL schema elements including:
|
|
108
|
+
- Type definitions (objects, interfaces, enums, input objects)
|
|
109
|
+
- Field definitions and their arguments
|
|
110
|
+
- Enum values
|
|
111
|
+
- Input object fields
|
|
112
|
+
|
|
113
|
+
Args:
|
|
114
|
+
document: The GraphQL document node containing schema definitions.
|
|
115
|
+
|
|
116
|
+
Returns:
|
|
117
|
+
A new DocumentNode with all elements sorted alphabetically by name.
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
sorted_definitions: list[
|
|
121
|
+
ObjectTypeDefinitionNode
|
|
122
|
+
| InterfaceTypeDefinitionNode
|
|
123
|
+
| EnumTypeDefinitionNode
|
|
124
|
+
| InputObjectTypeDefinitionNode
|
|
125
|
+
| DefinitionNode
|
|
126
|
+
] = []
|
|
127
|
+
|
|
128
|
+
for definition in sorted(
|
|
129
|
+
document.definitions, key=lambda d: getattr(d.name, "value", "") if hasattr(d, "name") and d.name else ""
|
|
130
|
+
):
|
|
131
|
+
if isinstance(definition, (ObjectTypeDefinitionNode, InterfaceTypeDefinitionNode)):
|
|
132
|
+
sorted_fields = _sort_fields(definition.fields)
|
|
133
|
+
sorted_interfaces = _sort_interfaces(definition.interfaces)
|
|
134
|
+
sorted_definitions.append(
|
|
135
|
+
definition.__class__(
|
|
136
|
+
name=definition.name,
|
|
137
|
+
interfaces=sorted_interfaces,
|
|
138
|
+
directives=definition.directives,
|
|
139
|
+
fields=sorted_fields,
|
|
140
|
+
description=definition.description,
|
|
141
|
+
loc=definition.loc,
|
|
142
|
+
)
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
elif isinstance(definition, EnumTypeDefinitionNode):
|
|
146
|
+
sorted_values = _sort_enum_values(definition.values)
|
|
147
|
+
sorted_definitions.append(
|
|
148
|
+
EnumTypeDefinitionNode(
|
|
149
|
+
name=definition.name,
|
|
150
|
+
directives=definition.directives,
|
|
151
|
+
values=sorted_values,
|
|
152
|
+
description=definition.description,
|
|
153
|
+
loc=definition.loc,
|
|
154
|
+
)
|
|
155
|
+
)
|
|
156
|
+
elif isinstance(definition, InputObjectTypeDefinitionNode):
|
|
157
|
+
sorted_inputs = _sort_input_fields(definition.fields)
|
|
158
|
+
sorted_definitions.append(
|
|
159
|
+
InputObjectTypeDefinitionNode(
|
|
160
|
+
name=definition.name,
|
|
161
|
+
directives=definition.directives,
|
|
162
|
+
fields=sorted_inputs,
|
|
163
|
+
description=definition.description,
|
|
164
|
+
loc=definition.loc,
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
else:
|
|
168
|
+
sorted_definitions.append(definition)
|
|
169
|
+
|
|
170
|
+
return DocumentNode(definitions=sorted_definitions)
|
infrahub/graphql/types/branch.py
CHANGED
|
@@ -2,11 +2,12 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
|
-
from graphene import Boolean, Field, String
|
|
5
|
+
from graphene import Boolean, Field, Int, String
|
|
6
6
|
|
|
7
7
|
from infrahub.core.branch import Branch
|
|
8
8
|
from infrahub.core.constants import GLOBAL_BRANCH_NAME
|
|
9
9
|
|
|
10
|
+
from .enums import InfrahubBranchStatus
|
|
10
11
|
from .standard_node import InfrahubObjectType
|
|
11
12
|
|
|
12
13
|
if TYPE_CHECKING:
|
|
@@ -19,6 +20,8 @@ class BranchType(InfrahubObjectType):
|
|
|
19
20
|
description = String(required=False)
|
|
20
21
|
origin_branch = String(required=False)
|
|
21
22
|
branched_from = String(required=False)
|
|
23
|
+
status = InfrahubBranchStatus(required=True)
|
|
24
|
+
graph_version = Int(required=False)
|
|
22
25
|
created_at = String(required=False)
|
|
23
26
|
sync_with_git = Boolean(required=False)
|
|
24
27
|
is_default = Boolean(required=False)
|
infrahub/graphql/types/enums.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from graphene import Enum
|
|
2
2
|
|
|
3
3
|
from infrahub.core import constants
|
|
4
|
+
from infrahub.core.branch.enums import BranchStatus
|
|
4
5
|
from infrahub.permissions import constants as permission_constants
|
|
5
6
|
|
|
6
7
|
CheckType = Enum.from_enum(constants.CheckType)
|
|
@@ -10,3 +11,5 @@ DiffAction = Enum.from_enum(constants.DiffAction)
|
|
|
10
11
|
Severity = Enum.from_enum(constants.Severity)
|
|
11
12
|
|
|
12
13
|
BranchRelativePermissionDecision = Enum.from_enum(permission_constants.BranchRelativePermissionDecision)
|
|
14
|
+
|
|
15
|
+
InfrahubBranchStatus = Enum.from_enum(BranchStatus)
|
infrahub/groups/tasks.py
CHANGED
|
@@ -20,7 +20,7 @@ async def update_graphql_query_group(model: RequestGraphQLQueryGroupUpdate) -> N
|
|
|
20
20
|
if len(model.subscribers) == 1:
|
|
21
21
|
related_nodes.append(model.subscribers[0])
|
|
22
22
|
|
|
23
|
-
await add_tags(branches=[model.branch], nodes=related_nodes)
|
|
23
|
+
await add_tags(branches=[model.branch], nodes=related_nodes, namespace=False)
|
|
24
24
|
|
|
25
25
|
params_hash = dict_hash(model.params)
|
|
26
26
|
group_name = f"{model.query_name}__{params_hash}"
|
|
File without changes
|
infrahub/hfid/gather.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
|
|
5
|
+
from prefect import task
|
|
6
|
+
from prefect.cache_policies import NONE
|
|
7
|
+
from prefect.logging import get_run_logger
|
|
8
|
+
|
|
9
|
+
from infrahub.core.registry import registry
|
|
10
|
+
from infrahub.database import InfrahubDatabase # noqa: TC001 needed for prefect flow
|
|
11
|
+
|
|
12
|
+
from .models import HFIDTriggerDefinition
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class BranchScope:
|
|
17
|
+
name: str
|
|
18
|
+
out_of_scope: list[str] = field(default_factory=list)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@task(
|
|
22
|
+
name="gather-trigger-hfid",
|
|
23
|
+
cache_policy=NONE,
|
|
24
|
+
)
|
|
25
|
+
async def gather_trigger_hfid(
|
|
26
|
+
db: InfrahubDatabase | None = None, # noqa: ARG001 Needed to have a common function signature for gathering functions
|
|
27
|
+
) -> list[HFIDTriggerDefinition]:
|
|
28
|
+
log = get_run_logger()
|
|
29
|
+
|
|
30
|
+
# Build a list of all branches to process based on which branch is different from main
|
|
31
|
+
branches_with_diff_from_main = registry.get_altered_schema_branches()
|
|
32
|
+
branches_to_process: list[BranchScope] = [BranchScope(name=branch) for branch in branches_with_diff_from_main]
|
|
33
|
+
branches_to_process.append(BranchScope(name=registry.default_branch, out_of_scope=branches_with_diff_from_main))
|
|
34
|
+
|
|
35
|
+
triggers: list[HFIDTriggerDefinition] = []
|
|
36
|
+
|
|
37
|
+
for branch in branches_to_process:
|
|
38
|
+
schema_branch = registry.schema.get_schema_branch(name=branch.name)
|
|
39
|
+
branch_triggers = HFIDTriggerDefinition.from_schema_hfids(
|
|
40
|
+
branch=branch.name,
|
|
41
|
+
hfids=schema_branch.hfids,
|
|
42
|
+
branches_out_of_scope=branch.out_of_scope,
|
|
43
|
+
)
|
|
44
|
+
log.info(f"Generating {len(branch_triggers)} HFID trigger for {branch.name} (except {branch.out_of_scope})")
|
|
45
|
+
|
|
46
|
+
triggers.extend(branch_triggers)
|
|
47
|
+
|
|
48
|
+
return triggers
|
infrahub/hfid/models.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Self
|
|
5
|
+
|
|
6
|
+
from infrahub_sdk.graphql import Query
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from infrahub.core.constants import RelationshipCardinality
|
|
10
|
+
from infrahub.core.registry import registry
|
|
11
|
+
from infrahub.core.schema import NodeSchema # noqa: TC001
|
|
12
|
+
from infrahub.events import NodeUpdatedEvent
|
|
13
|
+
from infrahub.trigger.constants import NAME_SEPARATOR
|
|
14
|
+
from infrahub.trigger.models import (
|
|
15
|
+
EventTrigger,
|
|
16
|
+
ExecuteWorkflow,
|
|
17
|
+
TriggerBranchDefinition,
|
|
18
|
+
TriggerType,
|
|
19
|
+
)
|
|
20
|
+
from infrahub.workflows.catalogue import HFID_PROCESS
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from infrahub.core.schema.schema_branch_hfid import HFIDs, RelationshipTriggers
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class AttributeTarget:
|
|
28
|
+
hash: str
|
|
29
|
+
fields: set[str]
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class HFIDTriggerDefinition(TriggerBranchDefinition):
|
|
33
|
+
type: TriggerType = TriggerType.HUMAN_FRIENDLY_ID
|
|
34
|
+
hfid_hash: str
|
|
35
|
+
target_kind: str | None = Field(default=None)
|
|
36
|
+
|
|
37
|
+
def get_description(self) -> str:
|
|
38
|
+
return f"{super().get_description()} | hash:{self.hfid_hash}"
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_schema_hfids(
|
|
42
|
+
cls,
|
|
43
|
+
branch: str,
|
|
44
|
+
hfids: HFIDs,
|
|
45
|
+
branches_out_of_scope: list[str] | None = None,
|
|
46
|
+
) -> list[HFIDTriggerDefinition]:
|
|
47
|
+
"""
|
|
48
|
+
This function is used to create a trigger definition for a display labels of type Jinja2.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
definitions: list[HFIDTriggerDefinition] = []
|
|
52
|
+
|
|
53
|
+
for node_kind, hfid_definition in hfids.get_template_nodes().items():
|
|
54
|
+
definitions.append(
|
|
55
|
+
cls.new(
|
|
56
|
+
branch=branch,
|
|
57
|
+
node_kind=node_kind,
|
|
58
|
+
target_kind=node_kind,
|
|
59
|
+
fields=[
|
|
60
|
+
"_trigger_placeholder"
|
|
61
|
+
], # Triggers for the nodes themselves are only used to determine if all nodes should be regenerated
|
|
62
|
+
hfid_hash=hfid_definition.get_hash(),
|
|
63
|
+
branches_out_of_scope=branches_out_of_scope,
|
|
64
|
+
)
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
for related_kind, relationship_trigger in hfids.get_related_trigger_nodes().items():
|
|
68
|
+
definitions.extend(
|
|
69
|
+
cls.from_related_node(
|
|
70
|
+
branch=branch,
|
|
71
|
+
related_kind=related_kind,
|
|
72
|
+
relationship_trigger=relationship_trigger,
|
|
73
|
+
hfids=hfids,
|
|
74
|
+
branches_out_of_scope=branches_out_of_scope,
|
|
75
|
+
)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
return definitions
|
|
79
|
+
|
|
80
|
+
@classmethod
|
|
81
|
+
def from_related_node(
|
|
82
|
+
cls,
|
|
83
|
+
branch: str,
|
|
84
|
+
related_kind: str,
|
|
85
|
+
relationship_trigger: RelationshipTriggers,
|
|
86
|
+
hfids: HFIDs,
|
|
87
|
+
branches_out_of_scope: list[str] | None = None,
|
|
88
|
+
) -> list[HFIDTriggerDefinition]:
|
|
89
|
+
targets_by_attribute: dict[str, AttributeTarget] = {}
|
|
90
|
+
definitions: list[HFIDTriggerDefinition] = []
|
|
91
|
+
for attribute, relationship_identifiers in relationship_trigger.attributes.items():
|
|
92
|
+
for relationship_identifier in relationship_identifiers:
|
|
93
|
+
actual_node = hfids.get_node_definition(kind=relationship_identifier.kind)
|
|
94
|
+
if relationship_identifier.kind not in targets_by_attribute:
|
|
95
|
+
targets_by_attribute[relationship_identifier.kind] = AttributeTarget(
|
|
96
|
+
actual_node.get_hash(), fields=set()
|
|
97
|
+
)
|
|
98
|
+
targets_by_attribute[relationship_identifier.kind].fields.add(attribute)
|
|
99
|
+
|
|
100
|
+
for target_kind, attribute_target in targets_by_attribute.items():
|
|
101
|
+
definitions.append(
|
|
102
|
+
cls.new(
|
|
103
|
+
branch=branch,
|
|
104
|
+
node_kind=related_kind,
|
|
105
|
+
target_kind=target_kind,
|
|
106
|
+
fields=sorted(attribute_target.fields),
|
|
107
|
+
hfid_hash=attribute_target.hash,
|
|
108
|
+
branches_out_of_scope=branches_out_of_scope,
|
|
109
|
+
)
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
return definitions
|
|
113
|
+
|
|
114
|
+
@classmethod
|
|
115
|
+
def new(
|
|
116
|
+
cls,
|
|
117
|
+
branch: str,
|
|
118
|
+
node_kind: str,
|
|
119
|
+
target_kind: str,
|
|
120
|
+
hfid_hash: str,
|
|
121
|
+
fields: list[str],
|
|
122
|
+
branches_out_of_scope: list[str] | None = None,
|
|
123
|
+
) -> Self:
|
|
124
|
+
event_trigger = EventTrigger()
|
|
125
|
+
event_trigger.events.add(NodeUpdatedEvent.event_name)
|
|
126
|
+
event_trigger.match = {"infrahub.node.kind": node_kind}
|
|
127
|
+
if branches_out_of_scope:
|
|
128
|
+
event_trigger.match["infrahub.branch.name"] = [f"!{branch}" for branch in branches_out_of_scope]
|
|
129
|
+
elif not branches_out_of_scope and branch != registry.default_branch:
|
|
130
|
+
event_trigger.match["infrahub.branch.name"] = branch
|
|
131
|
+
|
|
132
|
+
event_trigger.match_related = {
|
|
133
|
+
"prefect.resource.role": ["infrahub.node.attribute_update", "infrahub.node.relationship_update"],
|
|
134
|
+
"infrahub.field.name": fields,
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
workflow = ExecuteWorkflow(
|
|
138
|
+
workflow=HFID_PROCESS,
|
|
139
|
+
parameters={
|
|
140
|
+
"branch_name": "{{ event.resource['infrahub.branch.name'] }}",
|
|
141
|
+
"node_kind": node_kind,
|
|
142
|
+
"object_id": "{{ event.resource['infrahub.node.id'] }}",
|
|
143
|
+
"target_kind": target_kind,
|
|
144
|
+
"context": {
|
|
145
|
+
"__prefect_kind": "json",
|
|
146
|
+
"value": {
|
|
147
|
+
"__prefect_kind": "jinja",
|
|
148
|
+
"template": "{{ event.payload['context'] | tojson }}",
|
|
149
|
+
},
|
|
150
|
+
},
|
|
151
|
+
},
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
trigger_definition_target_kind = target_kind if target_kind == node_kind else None
|
|
155
|
+
|
|
156
|
+
return cls(
|
|
157
|
+
name=f"{target_kind}{NAME_SEPARATOR}by{NAME_SEPARATOR}{node_kind}",
|
|
158
|
+
hfid_hash=hfid_hash,
|
|
159
|
+
branch=branch,
|
|
160
|
+
trigger=event_trigger,
|
|
161
|
+
actions=[workflow],
|
|
162
|
+
target_kind=trigger_definition_target_kind,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
class HFIDGraphQLResponse(BaseModel):
|
|
167
|
+
node_id: str
|
|
168
|
+
hfid_value: list[str] | None = Field(default=None)
|
|
169
|
+
variables: dict[str, str] = Field(default_factory=dict)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class HFIDGraphQL(BaseModel):
|
|
173
|
+
filter_key: str
|
|
174
|
+
node_schema: NodeSchema = Field(..., description="The node kind where the computed attribute is defined")
|
|
175
|
+
variables: list[str] = Field(..., description="The list of variable names used within the computed attribute")
|
|
176
|
+
|
|
177
|
+
def render_graphql_query(self, filter_id: str) -> str:
|
|
178
|
+
query_fields = self.query_fields
|
|
179
|
+
query_fields["id"] = None
|
|
180
|
+
query_fields["hfid"] = None
|
|
181
|
+
query = Query(
|
|
182
|
+
name="HFIDFilter",
|
|
183
|
+
query={
|
|
184
|
+
self.node_schema.kind: {
|
|
185
|
+
"@filters": {self.filter_key: filter_id},
|
|
186
|
+
"edges": {"node": query_fields},
|
|
187
|
+
}
|
|
188
|
+
},
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
return query.render()
|
|
192
|
+
|
|
193
|
+
@property
|
|
194
|
+
def query_fields(self) -> dict[str, Any]:
|
|
195
|
+
output: dict[str, Any] = {}
|
|
196
|
+
for variable in self.variables:
|
|
197
|
+
field_name, remainder = variable.split("__", maxsplit=1)
|
|
198
|
+
if field_name in self.node_schema.attribute_names:
|
|
199
|
+
output[field_name] = {remainder: None}
|
|
200
|
+
elif field_name in self.node_schema.relationship_names:
|
|
201
|
+
related_attribute, related_value = remainder.split("__", maxsplit=1)
|
|
202
|
+
relationship = self.node_schema.get_relationship(name=field_name)
|
|
203
|
+
if relationship.cardinality == RelationshipCardinality.ONE:
|
|
204
|
+
if field_name not in output:
|
|
205
|
+
output[field_name] = {"node": {}}
|
|
206
|
+
output[field_name]["node"][related_attribute] = {related_value: None}
|
|
207
|
+
return output
|
|
208
|
+
|
|
209
|
+
def parse_response(self, response: dict[str, Any]) -> list[HFIDGraphQLResponse]:
|
|
210
|
+
rendered_response: list[HFIDGraphQLResponse] = []
|
|
211
|
+
if kind_payload := response.get(self.node_schema.kind):
|
|
212
|
+
edges = kind_payload.get("edges", [])
|
|
213
|
+
for node in edges:
|
|
214
|
+
if node_response := self.to_node_response(node_dict=node):
|
|
215
|
+
rendered_response.append(node_response)
|
|
216
|
+
return rendered_response
|
|
217
|
+
|
|
218
|
+
def to_node_response(self, node_dict: dict[str, Any]) -> HFIDGraphQLResponse | None:
|
|
219
|
+
if node := node_dict.get("node"):
|
|
220
|
+
node_id = node.get("id")
|
|
221
|
+
else:
|
|
222
|
+
return None
|
|
223
|
+
|
|
224
|
+
hfid = node.get("hfid")
|
|
225
|
+
response = HFIDGraphQLResponse(node_id=node_id, hfid_value=hfid)
|
|
226
|
+
for variable in self.variables:
|
|
227
|
+
field_name, remainder = variable.split("__", maxsplit=1)
|
|
228
|
+
# response.variables[variable] = None
|
|
229
|
+
if field_content := node.get(field_name):
|
|
230
|
+
if field_name in self.node_schema.attribute_names:
|
|
231
|
+
response.variables[variable] = str(field_content.get(remainder, ""))
|
|
232
|
+
elif field_name in self.node_schema.relationship_names:
|
|
233
|
+
relationship = self.node_schema.get_relationship(name=field_name)
|
|
234
|
+
if relationship.cardinality == RelationshipCardinality.ONE:
|
|
235
|
+
related_attribute, related_value = remainder.split("__", maxsplit=1)
|
|
236
|
+
node_content = field_content.get("node") or {}
|
|
237
|
+
related_attribute_content = node_content.get(related_attribute) or {}
|
|
238
|
+
response.variables[variable] = str(related_attribute_content.get(related_value, ""))
|
|
239
|
+
|
|
240
|
+
return response
|
infrahub/hfid/tasks.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import cast
|
|
4
|
+
|
|
5
|
+
from infrahub_sdk.exceptions import URLNotFoundError
|
|
6
|
+
from prefect import flow
|
|
7
|
+
from prefect.logging import get_run_logger
|
|
8
|
+
|
|
9
|
+
from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
|
|
10
|
+
from infrahub.core.registry import registry
|
|
11
|
+
from infrahub.events import BranchDeletedEvent
|
|
12
|
+
from infrahub.trigger.models import TriggerSetupReport, TriggerType
|
|
13
|
+
from infrahub.trigger.setup import setup_triggers_specific
|
|
14
|
+
from infrahub.workers.dependencies import get_client, get_component, get_database, get_workflow
|
|
15
|
+
from infrahub.workflows.catalogue import HFID_PROCESS, TRIGGER_UPDATE_HFID
|
|
16
|
+
from infrahub.workflows.utils import add_tags, wait_for_schema_to_converge
|
|
17
|
+
|
|
18
|
+
from .gather import gather_trigger_hfid
|
|
19
|
+
from .models import HFIDGraphQL, HFIDGraphQLResponse, HFIDTriggerDefinition
|
|
20
|
+
|
|
21
|
+
UPDATE_HFID = """
|
|
22
|
+
mutation UpdateHFID(
|
|
23
|
+
$id: String!,
|
|
24
|
+
$kind: String!,
|
|
25
|
+
$value: [String!]!
|
|
26
|
+
) {
|
|
27
|
+
InfrahubUpdateHFID(
|
|
28
|
+
data: {id: $id, value: $value, kind: $kind}
|
|
29
|
+
) {
|
|
30
|
+
ok
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@flow(
|
|
37
|
+
name="hfid-update-value",
|
|
38
|
+
flow_run_name="Update value for hfid on {node_kind}",
|
|
39
|
+
)
|
|
40
|
+
async def hfid_update_value(
|
|
41
|
+
branch_name: str,
|
|
42
|
+
obj: HFIDGraphQLResponse,
|
|
43
|
+
node_kind: str,
|
|
44
|
+
hfid_definition: list[str],
|
|
45
|
+
) -> None:
|
|
46
|
+
log = get_run_logger()
|
|
47
|
+
client = get_client()
|
|
48
|
+
|
|
49
|
+
await add_tags(branches=[branch_name], nodes=[obj.node_id], db_change=True)
|
|
50
|
+
|
|
51
|
+
rendered_hfid: list[str] = []
|
|
52
|
+
for hfid_component in hfid_definition:
|
|
53
|
+
if hfid_component in obj.variables:
|
|
54
|
+
rendered_hfid.append(obj.variables[hfid_component])
|
|
55
|
+
# value = await template.render(variables=obj.variables)
|
|
56
|
+
if rendered_hfid == obj.hfid_value:
|
|
57
|
+
log.debug(f"Ignoring to update {obj} with existing value on human_friendly_id={obj.hfid_value}")
|
|
58
|
+
return
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
await client.execute_graphql(
|
|
62
|
+
query=UPDATE_HFID,
|
|
63
|
+
variables={"id": obj.node_id, "kind": node_kind, "value": rendered_hfid},
|
|
64
|
+
branch_name=branch_name,
|
|
65
|
+
)
|
|
66
|
+
log.info(f"Updating {node_kind}.human_friendly_id='{rendered_hfid}' ({obj.node_id})")
|
|
67
|
+
except URLNotFoundError:
|
|
68
|
+
log.warning(
|
|
69
|
+
f"Updating {node_kind}.human_friendly_id='{rendered_hfid}' ({obj.node_id}) failed for branch {branch_name} (branch not found)"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@flow(
|
|
74
|
+
name="hfid-process",
|
|
75
|
+
flow_run_name="Process human friendly ids for {target_kind}",
|
|
76
|
+
)
|
|
77
|
+
async def process_hfid(
|
|
78
|
+
branch_name: str,
|
|
79
|
+
node_kind: str,
|
|
80
|
+
object_id: str,
|
|
81
|
+
target_kind: str,
|
|
82
|
+
context: InfrahubContext, # noqa: ARG001
|
|
83
|
+
) -> None:
|
|
84
|
+
log = get_run_logger()
|
|
85
|
+
client = get_client()
|
|
86
|
+
|
|
87
|
+
await add_tags(branches=[branch_name])
|
|
88
|
+
|
|
89
|
+
target_schema = branch_name if branch_name in registry.get_altered_schema_branches() else registry.default_branch
|
|
90
|
+
schema_branch = registry.schema.get_schema_branch(name=target_schema)
|
|
91
|
+
node_schema = schema_branch.get_node(name=target_kind, duplicate=False)
|
|
92
|
+
|
|
93
|
+
if node_kind == target_kind:
|
|
94
|
+
hfid_definition = schema_branch.hfids.get_node_definition(kind=node_kind)
|
|
95
|
+
else:
|
|
96
|
+
hfid_definition = schema_branch.hfids.get_related_definition(related_kind=node_kind, target_kind=target_kind)
|
|
97
|
+
|
|
98
|
+
# jinja_template = Jinja2Template(template=display_label_template.template)
|
|
99
|
+
# variables = jinja_template.get_variables()
|
|
100
|
+
hfid_graphql = HFIDGraphQL(
|
|
101
|
+
node_schema=node_schema, variables=hfid_definition.hfid, filter_key=hfid_definition.filter_key
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
query = hfid_graphql.render_graphql_query(filter_id=object_id)
|
|
105
|
+
response = await client.execute_graphql(query=query, branch_name=branch_name)
|
|
106
|
+
update_candidates = hfid_graphql.parse_response(response=response)
|
|
107
|
+
|
|
108
|
+
if not update_candidates:
|
|
109
|
+
log.debug("No nodes found that requires updates")
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
batch = await client.create_batch()
|
|
113
|
+
for node in update_candidates:
|
|
114
|
+
batch.add(
|
|
115
|
+
task=hfid_update_value,
|
|
116
|
+
branch_name=branch_name,
|
|
117
|
+
obj=node,
|
|
118
|
+
node_kind=node_schema.kind,
|
|
119
|
+
hfid_definition=hfid_definition.hfid,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
_ = [response async for _, response in batch.execute()]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@flow(name="hfid-setup", flow_run_name="Setup human friendly ids in task-manager")
|
|
126
|
+
async def hfid_setup(context: InfrahubContext, branch_name: str | None = None, event_name: str | None = None) -> None:
|
|
127
|
+
database = await get_database()
|
|
128
|
+
async with database.start_session() as db:
|
|
129
|
+
log = get_run_logger()
|
|
130
|
+
|
|
131
|
+
if branch_name:
|
|
132
|
+
await add_tags(branches=[branch_name])
|
|
133
|
+
component = await get_component()
|
|
134
|
+
await wait_for_schema_to_converge(branch_name=branch_name, component=component, db=db, log=log)
|
|
135
|
+
|
|
136
|
+
report: TriggerSetupReport = await setup_triggers_specific(
|
|
137
|
+
gatherer=gather_trigger_hfid, trigger_type=TriggerType.HUMAN_FRIENDLY_ID
|
|
138
|
+
) # type: ignore[misc]
|
|
139
|
+
|
|
140
|
+
# Configure all DisplayLabelTriggerDefinitions in Prefect
|
|
141
|
+
hfid_reports = [cast(HFIDTriggerDefinition, entry) for entry in report.updated + report.created]
|
|
142
|
+
direct_target_triggers = [hfid_report for hfid_report in hfid_reports if hfid_report.target_kind]
|
|
143
|
+
|
|
144
|
+
for display_report in direct_target_triggers:
|
|
145
|
+
if event_name != BranchDeletedEvent.event_name and display_report.branch == branch_name:
|
|
146
|
+
await get_workflow().submit_workflow(
|
|
147
|
+
workflow=TRIGGER_UPDATE_HFID,
|
|
148
|
+
context=context,
|
|
149
|
+
parameters={
|
|
150
|
+
"branch_name": display_report.branch,
|
|
151
|
+
"kind": display_report.target_kind,
|
|
152
|
+
},
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
log.info(f"{report.in_use_count} HFID automation configurations completed")
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
@flow(
|
|
159
|
+
name="trigger-update-hfid",
|
|
160
|
+
flow_run_name="Trigger updates for HFID for {kind}",
|
|
161
|
+
)
|
|
162
|
+
async def trigger_update_hfid(
|
|
163
|
+
branch_name: str,
|
|
164
|
+
kind: str,
|
|
165
|
+
context: InfrahubContext,
|
|
166
|
+
) -> None:
|
|
167
|
+
await add_tags(branches=[branch_name])
|
|
168
|
+
|
|
169
|
+
client = get_client()
|
|
170
|
+
|
|
171
|
+
# NOTE we only need the id of the nodes, this query will still query for the HFID
|
|
172
|
+
node_schema = registry.schema.get_node_schema(name=kind, branch=branch_name)
|
|
173
|
+
nodes = await client.all(
|
|
174
|
+
kind=kind,
|
|
175
|
+
branch=branch_name,
|
|
176
|
+
exclude=node_schema.attribute_names + node_schema.relationship_names,
|
|
177
|
+
populate_store=False,
|
|
178
|
+
)
|
|
179
|
+
|
|
180
|
+
for node in nodes:
|
|
181
|
+
await get_workflow().submit_workflow(
|
|
182
|
+
workflow=HFID_PROCESS,
|
|
183
|
+
context=context,
|
|
184
|
+
parameters={
|
|
185
|
+
"branch_name": branch_name,
|
|
186
|
+
"node_kind": kind,
|
|
187
|
+
"target_kind": kind,
|
|
188
|
+
"object_id": node.id,
|
|
189
|
+
"context": context,
|
|
190
|
+
},
|
|
191
|
+
)
|