infrahub-server 1.5.2__py3-none-any.whl → 1.5.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -119,6 +119,12 @@ class ComputedAttrJinja2TriggerDefinition(TriggerBranchDefinition):
119
119
  type: TriggerType = TriggerType.COMPUTED_ATTR_JINJA2
120
120
  computed_attribute: ComputedAttributeTarget
121
121
  template_hash: str
122
+ trigger_kind: str
123
+
124
+ @property
125
+ def targets_self(self) -> bool:
126
+ """Determine if the specific trigger definition targets the actual node kind of the computed attribute."""
127
+ return self.trigger_kind == self.computed_attribute.kind
122
128
 
123
129
  def get_description(self) -> str:
124
130
  return f"{super().get_description()} | hash:{self.template_hash}"
@@ -190,6 +196,7 @@ class ComputedAttrJinja2TriggerDefinition(TriggerBranchDefinition):
190
196
  definition = cls(
191
197
  name=f"{computed_attribute.key_name}{NAME_SEPARATOR}kind{NAME_SEPARATOR}{trigger_node.kind}",
192
198
  template_hash=template_hash,
199
+ trigger_kind=trigger_node.kind,
193
200
  branch=branch,
194
201
  computed_attribute=computed_attribute,
195
202
  trigger=event_trigger,
@@ -29,6 +29,7 @@ from .gather import gather_trigger_computed_attribute_jinja2, gather_trigger_com
29
29
  from .models import (
30
30
  ComputedAttrJinja2GraphQL,
31
31
  ComputedAttrJinja2GraphQLResponse,
32
+ ComputedAttrJinja2TriggerDefinition,
32
33
  PythonTransformTarget,
33
34
  )
34
35
 
@@ -312,21 +313,46 @@ async def computed_attribute_setup_jinja2(
312
313
  ) # type: ignore[misc]
313
314
  # Configure all ComputedAttrJinja2Trigger in Prefect
314
315
 
316
+ all_triggers = report.triggers_with_type(trigger_type=ComputedAttrJinja2TriggerDefinition)
317
+
315
318
  # Since we can have multiple trigger per NodeKind
316
- # we need to extract the list of unique node that should be processed
317
- unique_nodes: set[tuple[str, str, str]] = {
318
- (trigger.branch, trigger.computed_attribute.kind, trigger.computed_attribute.attribute.name) # type: ignore[attr-defined]
319
- for trigger in report.updated + report.created
320
- }
321
- for branch, kind, attribute_name in unique_nodes:
322
- if event_name != BranchDeletedEvent.event_name and branch == branch_name:
319
+ # we need to extract the list of unique node that should be processed, this is done by filtering the triggers that targets_self
320
+ modified_triggers = [
321
+ trigger
322
+ for trigger in report.modified_triggers_with_type(trigger_type=ComputedAttrJinja2TriggerDefinition)
323
+ if trigger.targets_self
324
+ ]
325
+
326
+ for modified_trigger in modified_triggers:
327
+ if event_name != BranchDeletedEvent.event_name and modified_trigger.branch == branch_name:
328
+ if branch_name != registry.default_branch:
329
+ default_branch_triggers = [
330
+ trigger
331
+ for trigger in all_triggers
332
+ if trigger.branch == registry.default_branch
333
+ and trigger.targets_self
334
+ and trigger.computed_attribute.kind == modified_trigger.computed_attribute.kind
335
+ and trigger.computed_attribute.attribute.name
336
+ == modified_trigger.computed_attribute.attribute.name
337
+ ]
338
+ if (
339
+ default_branch_triggers
340
+ and len(default_branch_triggers) == 1
341
+ and default_branch_triggers[0].template_hash == modified_trigger.template_hash
342
+ ):
343
+ log.debug(
344
+ f"Skipping computed attribute updates for {modified_trigger.computed_attribute.kind}."
345
+ f"{modified_trigger.computed_attribute.attribute.name} [{branch_name}], schema is identical to default branch"
346
+ )
347
+ continue
348
+
323
349
  await get_workflow().submit_workflow(
324
350
  workflow=TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
325
351
  context=context,
326
352
  parameters={
327
- "branch_name": branch,
328
- "computed_attribute_name": attribute_name,
329
- "computed_attribute_kind": kind,
353
+ "branch_name": modified_trigger.branch,
354
+ "computed_attribute_name": modified_trigger.computed_attribute.attribute.name,
355
+ "computed_attribute_kind": modified_trigger.computed_attribute.kind,
330
356
  },
331
357
  )
332
358
 
infrahub/config.py CHANGED
@@ -402,6 +402,11 @@ class WorkflowSettings(BaseSettings):
402
402
  worker_polling_interval: int = Field(
403
403
  default=2, ge=1, le=30, description="Specify how often the worker should poll the server for tasks (sec)"
404
404
  )
405
+ flow_run_count_cache_threshold: int = Field(
406
+ default=100_000,
407
+ ge=0,
408
+ description="Threshold for caching flow run counts (0 to always cache, higher values to disable)",
409
+ )
405
410
 
406
411
  @property
407
412
  def api_endpoint(self) -> str:
@@ -336,7 +336,7 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin):
336
336
 
337
337
  delete_at = Timestamp(at)
338
338
 
339
- query = await AttributeGetQuery.init(db=db, attr=self)
339
+ query = await AttributeGetQuery.init(db=db, attr=self, at=delete_at)
340
340
  await query.execute(db=db)
341
341
  results = query.get_results()
342
342
 
@@ -339,7 +339,7 @@ class Branch(StandardNode):
339
339
  at = Timestamp(at)
340
340
  at_str = at.to_string()
341
341
  if branch_agnostic:
342
- filter_str = f"{variable_name}.from <= ${pp}time1 AND ({variable_name}.to IS NULL or {variable_name}.to >= ${pp}time1)"
342
+ filter_str = f"{variable_name}.from < ${pp}time1 AND ({variable_name}.to IS NULL or {variable_name}.to >= ${pp}time1)"
343
343
  params[f"{pp}time1"] = at_str
344
344
  return filter_str, params
345
345
 
@@ -352,10 +352,13 @@ class Branch(StandardNode):
352
352
  filters = []
353
353
  for idx in range(len(branches_times)):
354
354
  filters.append(
355
- f"({variable_name}.branch IN ${pp}branch{idx} AND {variable_name}.from <= ${pp}time{idx} AND {variable_name}.to IS NULL)"
355
+ f"({variable_name}.branch IN ${pp}branch{idx} "
356
+ f"AND {variable_name}.from < ${pp}time{idx} AND {variable_name}.to IS NULL)"
356
357
  )
357
358
  filters.append(
358
- f"({variable_name}.branch IN ${pp}branch{idx} AND {variable_name}.from <= ${pp}time{idx} AND {variable_name}.to >= ${pp}time{idx})"
359
+ f"({variable_name}.branch IN ${pp}branch{idx} "
360
+ f"AND {variable_name}.from < ${pp}time{idx} "
361
+ f"AND {variable_name}.to >= ${pp}time{idx})"
359
362
  )
360
363
 
361
364
  filter_str = "(" + "\n OR ".join(filters) + ")"
@@ -1 +1 @@
1
- GRAPH_VERSION = 45
1
+ GRAPH_VERSION = 46
@@ -47,6 +47,7 @@ from .m042_profile_attrs_in_db import Migration042
47
47
  from .m043_create_hfid_display_label_in_db import Migration043
48
48
  from .m044_backfill_hfid_display_label_in_db import Migration044
49
49
  from .m045_backfill_hfid_display_label_in_db_profile_template import Migration045
50
+ from .m046_fill_agnostic_hfid_display_labels import Migration046
50
51
 
51
52
  if TYPE_CHECKING:
52
53
  from ..shared import MigrationTypes
@@ -98,6 +99,7 @@ MIGRATIONS: list[type[MigrationTypes]] = [
98
99
  Migration043,
99
100
  Migration044,
100
101
  Migration045,
102
+ Migration046,
101
103
  ]
102
104
 
103
105
 
@@ -6,7 +6,7 @@ from rich.progress import Progress
6
6
 
7
7
  from infrahub.core import registry
8
8
  from infrahub.core.branch import Branch
9
- from infrahub.core.constants import SchemaPathType
9
+ from infrahub.core.constants import BranchSupportType, SchemaPathType
10
10
  from infrahub.core.initialization import get_root_node
11
11
  from infrahub.core.migrations.schema.node_attribute_add import NodeAttributeAddMigration
12
12
  from infrahub.core.migrations.shared import MigrationRequiringRebase, MigrationResult, get_migration_console
@@ -78,6 +78,8 @@ class Migration043(MigrationRequiringRebase):
78
78
 
79
79
  for node_schema_kind in main_schema_branch.node_names:
80
80
  schema = main_schema_branch.get(name=node_schema_kind, duplicate=False)
81
+ if schema.branch is not BranchSupportType.AWARE:
82
+ continue
81
83
  migrations.extend(
82
84
  [
83
85
  NodeAttributeAddMigration(
@@ -123,6 +125,8 @@ class Migration043(MigrationRequiringRebase):
123
125
 
124
126
  for node_kind, node_ids in node_ids_by_kind.items():
125
127
  schema = schema_branch.get(name=node_kind, duplicate=False)
128
+ if schema.branch not in (BranchSupportType.AWARE, BranchSupportType.LOCAL):
129
+ continue
126
130
  migrations.extend(
127
131
  [
128
132
  NodeAttributeAddMigration(
@@ -40,18 +40,23 @@ class DefaultBranchNodeCount(Query):
40
40
  name = "get_branch_node_count"
41
41
  type = QueryType.READ
42
42
 
43
- def __init__(self, kinds_to_skip: list[str], **kwargs: Any) -> None:
43
+ def __init__(
44
+ self, kinds_to_skip: list[str] | None = None, kinds_to_include: list[str] | None = None, **kwargs: Any
45
+ ) -> None:
44
46
  super().__init__(**kwargs)
45
- self.kinds_to_skip = kinds_to_skip
47
+ self.kinds_to_skip = kinds_to_skip or []
48
+ self.kinds_to_include = kinds_to_include
46
49
 
47
50
  async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
48
51
  self.params = {
49
52
  "branch_names": [registry.default_branch, GLOBAL_BRANCH_NAME],
50
53
  "kinds_to_skip": self.kinds_to_skip,
54
+ "kinds_to_include": self.kinds_to_include,
51
55
  }
52
56
  query = """
53
57
  MATCH (n:Node)-[e:IS_PART_OF]->(:Root)
54
58
  WHERE NOT n.kind IN $kinds_to_skip
59
+ AND ($kinds_to_include IS NULL OR n.kind IN $kinds_to_include)
55
60
  AND e.branch IN $branch_names
56
61
  AND e.status = "active"
57
62
  AND e.to IS NULL
@@ -731,6 +736,10 @@ class Migration044(MigrationRequiringRebase):
731
736
  continue
732
737
 
733
738
  node_schema = main_schema_branch.get_node(name=node_schema_name, duplicate=False)
739
+
740
+ if node_schema.branch is not BranchSupportType.AWARE:
741
+ continue
742
+
734
743
  attribute_schema_map = {}
735
744
  if node_schema.display_labels:
736
745
  attribute_schema_map[display_labels_attribute_schema] = display_label_attribute_schema
@@ -826,6 +835,8 @@ class Migration044(MigrationRequiringRebase):
826
835
  continue
827
836
 
828
837
  node_schema = schema_branch.get_node(name=node_schema_name, duplicate=False)
838
+ if node_schema.branch not in (BranchSupportType.AWARE, BranchSupportType.LOCAL):
839
+ continue
829
840
  try:
830
841
  default_node_schema = main_schema_branch.get_node(name=node_schema_name, duplicate=False)
831
842
  except SchemaNotFoundError:
@@ -0,0 +1,236 @@
1
+ from __future__ import annotations
2
+
3
+ from itertools import chain
4
+ from typing import TYPE_CHECKING, Any
5
+
6
+ import ujson
7
+ from rich.progress import Progress, TaskID
8
+
9
+ from infrahub.core.branch import Branch
10
+ from infrahub.core.constants import GLOBAL_BRANCH_NAME, BranchSupportType, SchemaPathType
11
+ from infrahub.core.initialization import get_root_node
12
+ from infrahub.core.migrations.graph.m044_backfill_hfid_display_label_in_db import (
13
+ DefaultBranchNodeCount,
14
+ GetPathDetailsDefaultBranch,
15
+ GetResultMapQuery,
16
+ UpdateAttributeValuesQuery,
17
+ )
18
+ from infrahub.core.migrations.schema.node_attribute_add import NodeAttributeAddMigration
19
+ from infrahub.core.migrations.shared import ArbitraryMigration, MigrationResult, get_migration_console
20
+ from infrahub.core.path import SchemaPath
21
+ from infrahub.core.query import Query, QueryType
22
+
23
+ from .load_schema_branch import get_or_load_schema_branch
24
+
25
+ if TYPE_CHECKING:
26
+ from infrahub.core.schema import AttributeSchema, MainSchemaTypes, NodeSchema, SchemaAttributePath
27
+ from infrahub.core.schema.schema_branch import SchemaBranch
28
+ from infrahub.database import InfrahubDatabase
29
+
30
+
31
+ class DeleteBranchAwareAttrsForBranchAgnosticNodesQuery(Query):
32
+ name = "delete_branch_aware_attrs_for_branch_agnostic_nodes_query"
33
+ type = QueryType.WRITE
34
+ insert_return = False
35
+ raise_error_if_empty = False
36
+
37
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
38
+ query = """
39
+ MATCH (n:Node {branch_support: "agnostic"})
40
+ MATCH (n)-[:HAS_ATTRIBUTE]->(attr:Attribute)
41
+ WHERE attr.name IN ["human_friendly_id", "display_label"]
42
+ WITH DISTINCT attr
43
+ CALL (attr) {
44
+ DETACH DELETE attr
45
+ } IN TRANSACTIONS
46
+ """
47
+ self.add_to_query(query)
48
+
49
+
50
+ class Migration046(ArbitraryMigration):
51
+ """
52
+ Delete any branch-aware human_friendly_id and display_label attributes added to branch-agnostic nodes
53
+ Add human_friendly_id and display_label attributes to branch-agnostic nodes
54
+ Set human_friendly_id and display_label attributes for branch-agnostic nodes on global branch
55
+
56
+ Uses and duplicates code from Migration044
57
+ """
58
+
59
+ name: str = "046_fill_agnostic_hfid_display_labels"
60
+ minimum_version: int = 45
61
+ update_batch_size: int = 1000
62
+
63
+ async def _do_one_schema_all(
64
+ self,
65
+ db: InfrahubDatabase,
66
+ branch: Branch,
67
+ schema: MainSchemaTypes,
68
+ schema_branch: SchemaBranch,
69
+ attribute_schema_map: dict[AttributeSchema, AttributeSchema],
70
+ progress: Progress | None = None,
71
+ update_task: TaskID | None = None,
72
+ ) -> None:
73
+ print(f"Processing {schema.kind}...", end="")
74
+
75
+ schema_paths_by_name: dict[str, list[SchemaAttributePath]] = {}
76
+ for source_attribute_schema in attribute_schema_map.keys():
77
+ node_schema_property = getattr(schema, source_attribute_schema.name)
78
+ if not node_schema_property:
79
+ continue
80
+ if isinstance(node_schema_property, list):
81
+ schema_paths_by_name[source_attribute_schema.name] = [
82
+ schema.parse_schema_path(path=str(path), schema=schema_branch) for path in node_schema_property
83
+ ]
84
+ else:
85
+ schema_paths_by_name[source_attribute_schema.name] = [
86
+ schema.parse_schema_path(path=str(node_schema_property), schema=schema_branch)
87
+ ]
88
+ all_schema_paths = list(chain(*schema_paths_by_name.values()))
89
+ offset = 0
90
+
91
+ # loop until we get no results from the get_details_query
92
+ while True:
93
+ get_details_query: GetResultMapQuery = await GetPathDetailsDefaultBranch.init(
94
+ db=db,
95
+ schema_kind=schema.kind,
96
+ schema_paths=all_schema_paths,
97
+ offset=offset,
98
+ limit=self.update_batch_size,
99
+ )
100
+ await get_details_query.execute(db=db)
101
+
102
+ num_updates = 0
103
+ for source_attribute_schema, destination_attribute_schema in attribute_schema_map.items():
104
+ schema_paths = schema_paths_by_name[source_attribute_schema.name]
105
+ schema_path_values_map = get_details_query.get_result_map(schema_paths)
106
+ num_updates = max(num_updates, len(schema_path_values_map))
107
+ formatted_schema_path_values_map = {}
108
+ for k, v in schema_path_values_map.items():
109
+ if not v:
110
+ continue
111
+ if destination_attribute_schema.kind == "List":
112
+ formatted_schema_path_values_map[k] = ujson.dumps(v)
113
+ else:
114
+ formatted_schema_path_values_map[k] = " ".join(item for item in v if item is not None)
115
+
116
+ if not formatted_schema_path_values_map:
117
+ continue
118
+
119
+ update_display_label_query = await UpdateAttributeValuesQuery.init(
120
+ db=db,
121
+ branch=branch,
122
+ attribute_schema=destination_attribute_schema,
123
+ values_by_id_map=formatted_schema_path_values_map,
124
+ )
125
+ await update_display_label_query.execute(db=db)
126
+
127
+ if progress is not None and update_task is not None:
128
+ progress.update(update_task, advance=num_updates)
129
+
130
+ if num_updates == 0:
131
+ break
132
+
133
+ offset += self.update_batch_size
134
+
135
+ print("done")
136
+
137
+ async def execute(self, db: InfrahubDatabase) -> MigrationResult:
138
+ try:
139
+ return await self._do_execute(db=db)
140
+ except Exception as exc:
141
+ return MigrationResult(errors=[str(exc)])
142
+
143
+ async def _do_execute(self, db: InfrahubDatabase) -> MigrationResult:
144
+ console = get_migration_console()
145
+ result = MigrationResult()
146
+
147
+ root_node = await get_root_node(db=db, initialize=False)
148
+ default_branch_name = root_node.default_branch
149
+ default_branch = await Branch.get_by_name(db=db, name=default_branch_name)
150
+ main_schema_branch = await get_or_load_schema_branch(db=db, branch=default_branch)
151
+
152
+ console.print("Deleting branch-aware attributes for branch-agnostic nodes...", end="")
153
+ delete_query = await DeleteBranchAwareAttrsForBranchAgnosticNodesQuery.init(db=db)
154
+ await delete_query.execute(db=db)
155
+ console.print("done")
156
+
157
+ branch_agnostic_schemas: list[NodeSchema] = []
158
+ migrations = []
159
+ for node_schema_kind in main_schema_branch.node_names:
160
+ schema = main_schema_branch.get_node(name=node_schema_kind, duplicate=False)
161
+ if schema.branch is not BranchSupportType.AGNOSTIC:
162
+ continue
163
+ branch_agnostic_schemas.append(schema)
164
+ migrations.extend(
165
+ [
166
+ NodeAttributeAddMigration(
167
+ new_node_schema=schema,
168
+ previous_node_schema=schema,
169
+ schema_path=SchemaPath(
170
+ schema_kind=schema.kind, path_type=SchemaPathType.ATTRIBUTE, field_name="human_friendly_id"
171
+ ),
172
+ ),
173
+ NodeAttributeAddMigration(
174
+ new_node_schema=schema,
175
+ previous_node_schema=schema,
176
+ schema_path=SchemaPath(
177
+ schema_kind=schema.kind, path_type=SchemaPathType.ATTRIBUTE, field_name="display_label"
178
+ ),
179
+ ),
180
+ ]
181
+ )
182
+
183
+ global_branch = await Branch.get_by_name(db=db, name=GLOBAL_BRANCH_NAME)
184
+ with Progress(console=console) as progress:
185
+ update_task = progress.add_task(
186
+ "Adding HFID and display label attributes to branch-agnostic nodes", total=len(migrations)
187
+ )
188
+
189
+ for migration in migrations:
190
+ try:
191
+ execution_result = await migration.execute(db=db, branch=global_branch)
192
+ result.errors.extend(execution_result.errors)
193
+ progress.update(update_task, advance=1)
194
+ except Exception as exc:
195
+ result.errors.append(str(exc))
196
+ return result
197
+
198
+ total_nodes_query = await DefaultBranchNodeCount.init(
199
+ db=db, kinds_to_include=[sch.kind for sch in branch_agnostic_schemas]
200
+ )
201
+ await total_nodes_query.execute(db=db)
202
+ total_nodes_count = total_nodes_query.get_num_nodes()
203
+
204
+ base_node_schema = main_schema_branch.get("SchemaNode", duplicate=False)
205
+ display_label_attribute_schema = base_node_schema.get_attribute("display_label")
206
+ display_labels_attribute_schema = base_node_schema.get_attribute("display_labels")
207
+ hfid_attribute_schema = base_node_schema.get_attribute("human_friendly_id")
208
+
209
+ with Progress(console=console) as progress:
210
+ update_task = progress.add_task(
211
+ f"Set display_label and human_friendly_id for {total_nodes_count} branch-agnostic nodes on global branch",
212
+ total=total_nodes_count,
213
+ )
214
+ for branch_agnostic_schema in branch_agnostic_schemas:
215
+ attribute_schema_map = {}
216
+ if branch_agnostic_schema.display_labels:
217
+ attribute_schema_map[display_labels_attribute_schema] = display_label_attribute_schema
218
+ if branch_agnostic_schema.human_friendly_id:
219
+ attribute_schema_map[hfid_attribute_schema] = hfid_attribute_schema
220
+ if not attribute_schema_map:
221
+ continue
222
+
223
+ await self._do_one_schema_all(
224
+ db=db,
225
+ branch=global_branch,
226
+ schema=branch_agnostic_schema,
227
+ schema_branch=main_schema_branch,
228
+ attribute_schema_map=attribute_schema_map,
229
+ progress=progress,
230
+ update_task=update_task,
231
+ )
232
+
233
+ return result
234
+
235
+ async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
236
+ return MigrationResult()
@@ -1088,7 +1088,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
1088
1088
 
1089
1089
  if key in self._relationships:
1090
1090
  rel: RelationshipManager = getattr(self, key)
1091
- changed |= await rel.update(db=db, data=value)
1091
+ changed |= await rel.update(db=db, data=value, process_delete=process_pools)
1092
1092
 
1093
1093
  return changed
1094
1094
 
@@ -38,7 +38,7 @@ class AttributeQuery(Query):
38
38
  if at:
39
39
  self.at = Timestamp(at)
40
40
  else:
41
- self.at = self.attr.at
41
+ self.at = Timestamp()
42
42
 
43
43
  self.branch = branch or self.attr.get_branch_based_on_support_type()
44
44
 
@@ -247,10 +247,9 @@ class AttributeGetQuery(AttributeQuery):
247
247
  self.params["attr_uuid"] = self.attr.id
248
248
  self.params["node_uuid"] = self.attr.node.id
249
249
 
250
- at = self.at or self.attr.at
251
- self.params["at"] = at.to_string()
250
+ self.params["at"] = self.at.to_string()
252
251
 
253
- rels_filter, rels_params = self.branch.get_query_filter_path(at=at.to_string())
252
+ rels_filter, rels_params = self.branch.get_query_filter_path(at=self.at.to_string())
254
253
  self.params.update(rels_params)
255
254
 
256
255
  query = (
@@ -1061,7 +1061,12 @@ class RelationshipManager:
1061
1061
 
1062
1062
  return self._relationships.as_list()
1063
1063
 
1064
- async def update(self, data: list[str | Node] | dict[str, Any] | str | Node | None, db: InfrahubDatabase) -> bool:
1064
+ async def update(
1065
+ self,
1066
+ data: list[str | Node] | dict[str, Any] | str | Node | None,
1067
+ db: InfrahubDatabase,
1068
+ process_delete: bool = True,
1069
+ ) -> bool:
1065
1070
  """Replace and Update the list of relationships with this one."""
1066
1071
  if not isinstance(data, list):
1067
1072
  list_data: Sequence[str | Node | dict[str, Any] | None] = [data]
@@ -1087,8 +1092,9 @@ class RelationshipManager:
1087
1092
 
1088
1093
  if item is None:
1089
1094
  if previous_relationships:
1090
- for rel in previous_relationships.values():
1091
- await rel.delete(db=db)
1095
+ if process_delete:
1096
+ for rel in previous_relationships.values():
1097
+ await rel.delete(db=db)
1092
1098
  changed = True
1093
1099
  continue
1094
1100
 
@@ -1,7 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import cast
4
-
5
3
  from infrahub_sdk.exceptions import URLNotFoundError
6
4
  from infrahub_sdk.template import Jinja2Template
7
5
  from prefect import flow
@@ -139,11 +137,32 @@ async def display_labels_setup_jinja2(
139
137
  ) # type: ignore[misc]
140
138
 
141
139
  # Configure all DisplayLabelTriggerDefinitions in Prefect
142
- display_reports = [cast(DisplayLabelTriggerDefinition, entry) for entry in report.updated + report.created]
143
- direct_target_triggers = [display_report for display_report in display_reports if display_report.target_kind]
140
+ all_triggers = report.triggers_with_type(trigger_type=DisplayLabelTriggerDefinition)
141
+ direct_target_triggers = [
142
+ display_report
143
+ for display_report in report.modified_triggers_with_type(trigger_type=DisplayLabelTriggerDefinition)
144
+ if display_report.target_kind
145
+ ]
144
146
 
145
147
  for display_report in direct_target_triggers:
146
148
  if event_name != BranchDeletedEvent.event_name and display_report.branch == branch_name:
149
+ if branch_name != registry.default_branch:
150
+ default_branch_triggers = [
151
+ trigger
152
+ for trigger in all_triggers
153
+ if trigger.branch == registry.default_branch
154
+ and trigger.target_kind == display_report.target_kind
155
+ ]
156
+ if (
157
+ default_branch_triggers
158
+ and len(default_branch_triggers) == 1
159
+ and default_branch_triggers[0].template_hash == display_report.template_hash
160
+ ):
161
+ log.debug(
162
+ f"Skipping display label updates for {display_report.target_kind} [{branch_name}], schema is identical to default branch"
163
+ )
164
+ continue
165
+
147
166
  await get_workflow().submit_workflow(
148
167
  workflow=TRIGGER_UPDATE_DISPLAY_LABELS,
149
168
  context=context,
infrahub/hfid/tasks.py CHANGED
@@ -1,7 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import cast
4
-
5
3
  from infrahub_sdk.exceptions import URLNotFoundError
6
4
  from prefect import flow
7
5
  from prefect.logging import get_run_logger
@@ -138,11 +136,32 @@ async def hfid_setup(context: InfrahubContext, branch_name: str | None = None, e
138
136
  ) # type: ignore[misc]
139
137
 
140
138
  # Configure all DisplayLabelTriggerDefinitions in Prefect
141
- hfid_reports = [cast(HFIDTriggerDefinition, entry) for entry in report.updated + report.created]
142
- direct_target_triggers = [hfid_report for hfid_report in hfid_reports if hfid_report.target_kind]
139
+ all_triggers = report.triggers_with_type(trigger_type=HFIDTriggerDefinition)
140
+ direct_target_triggers = [
141
+ hfid_report
142
+ for hfid_report in report.modified_triggers_with_type(trigger_type=HFIDTriggerDefinition)
143
+ if hfid_report.target_kind
144
+ ]
143
145
 
144
146
  for display_report in direct_target_triggers:
145
147
  if event_name != BranchDeletedEvent.event_name and display_report.branch == branch_name:
148
+ if branch_name != registry.default_branch:
149
+ default_branch_triggers = [
150
+ trigger
151
+ for trigger in all_triggers
152
+ if trigger.branch == registry.default_branch
153
+ and trigger.target_kind == display_report.target_kind
154
+ ]
155
+ if (
156
+ default_branch_triggers
157
+ and len(default_branch_triggers) == 1
158
+ and default_branch_triggers[0].hfid_hash == display_report.hfid_hash
159
+ ):
160
+ log.debug(
161
+ f"Skipping HFID updates for {display_report.target_kind} [{branch_name}], schema is identical to default branch"
162
+ )
163
+ continue
164
+
146
165
  await get_workflow().submit_workflow(
147
166
  workflow=TRIGGER_UPDATE_HFID,
148
167
  context=context,
@@ -1,5 +1,6 @@
1
1
  import asyncio
2
- import uuid
2
+ import hashlib
3
+ import json
3
4
  from datetime import datetime, timedelta, timezone
4
5
  from typing import Any
5
6
  from uuid import UUID
@@ -27,11 +28,14 @@ from prefect.client.schemas.sorting import (
27
28
  FlowRunSort,
28
29
  )
29
30
 
31
+ from infrahub import config
30
32
  from infrahub.core.constants import TaskConclusion
31
33
  from infrahub.core.query.node import NodeGetKindQuery
32
34
  from infrahub.database import InfrahubDatabase
33
35
  from infrahub.log import get_logger
36
+ from infrahub.message_bus.types import KVTTL
34
37
  from infrahub.utils import get_nested_dict
38
+ from infrahub.workers.dependencies import get_cache
35
39
  from infrahub.workflows.constants import TAG_NAMESPACE, WorkflowTag
36
40
 
37
41
  from .constants import CONCLUSION_STATE_MAPPING
@@ -44,6 +48,12 @@ PREFECT_MAX_LOGS_PER_CALL = 200
44
48
 
45
49
 
46
50
  class PrefectTask:
51
+ @staticmethod
52
+ def _build_flow_run_count_cache_key(body: dict[str, Any]) -> str:
53
+ serialized = json.dumps(body, sort_keys=True, separators=(",", ":"))
54
+ hashed = hashlib.sha256(serialized.encode()).hexdigest()
55
+ return f"task_manager:flow_run_count:{hashed}"
56
+
47
57
  @classmethod
48
58
  async def count_flow_runs(
49
59
  cls,
@@ -59,10 +69,24 @@ class PrefectTask:
59
69
  "flows": flow_filter.model_dump(mode="json") if flow_filter else None,
60
70
  "flow_runs": (flow_run_filter.model_dump(mode="json", exclude_unset=True) if flow_run_filter else None),
61
71
  }
72
+ cache_key = cls._build_flow_run_count_cache_key(body)
73
+
74
+ cache = await get_cache()
75
+ cached_value_raw = await cache.get(key=cache_key)
76
+ if cached_value_raw is not None:
77
+ try:
78
+ return int(cached_value_raw)
79
+ except (TypeError, ValueError):
80
+ await cache.delete(key=cache_key)
62
81
 
63
82
  response = await client._client.post("/flow_runs/count", json=body)
64
83
  response.raise_for_status()
65
- return response.json()
84
+ count_value = int(response.json())
85
+
86
+ if count_value >= config.SETTINGS.workflow.flow_run_count_cache_threshold:
87
+ await cache.set(key=cache_key, value=str(count_value), expires=KVTTL.ONE_MINUTE)
88
+
89
+ return count_value
66
90
 
67
91
  @classmethod
68
92
  async def _get_related_nodes(cls, db: InfrahubDatabase, flows: list[FlowRun]) -> RelatedNodesInfo:
@@ -204,7 +228,7 @@ class PrefectTask:
204
228
  tags=FlowRunFilterTags(all_=filter_tags),
205
229
  )
206
230
  if ids:
207
- flow_run_filter.id = FlowRunFilterId(any_=[uuid.UUID(id) for id in ids])
231
+ flow_run_filter.id = FlowRunFilterId(any_=[UUID(id) for id in ids])
208
232
 
209
233
  if statuses:
210
234
  flow_run_filter.state = FlowRunFilterState(type=FlowRunFilterStateType(any_=statuses))
@@ -8,6 +8,7 @@ from prefect.client.schemas.objects import WorkerStatus
8
8
  from infrahub.events.utils import get_all_events
9
9
  from infrahub.trigger.constants import NAME_SEPARATOR
10
10
  from infrahub.trigger.models import TriggerType
11
+ from infrahub.trigger.setup import gather_all_automations
11
12
 
12
13
  from .models import TelemetryPrefectData, TelemetryWorkPoolData
13
14
 
@@ -53,7 +54,7 @@ async def gather_prefect_events(client: PrefectClient) -> dict[str, Any]:
53
54
 
54
55
  @task(name="telemetry-gather-automations", task_run_name="Gather Automations", cache_policy=NONE)
55
56
  async def gather_prefect_automations(client: PrefectClient) -> dict[str, Any]:
56
- automations = await client.read_automations()
57
+ automations = await gather_all_automations(client=client)
57
58
 
58
59
  data: dict[str, Any] = {}
59
60
 
@@ -1,8 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from datetime import timedelta
4
- from enum import Enum
5
- from typing import TYPE_CHECKING, Any
4
+ from enum import Enum, StrEnum
5
+ from typing import TYPE_CHECKING, Any, TypeVar
6
6
 
7
7
  from prefect.events.actions import RunDeployment
8
8
  from prefect.events.schemas.automations import Automation, Posture
@@ -18,16 +18,78 @@ from .constants import NAME_SEPARATOR
18
18
  if TYPE_CHECKING:
19
19
  from uuid import UUID
20
20
 
21
+ T = TypeVar("T", bound="TriggerDefinition")
22
+
23
+
24
+ class TriggerComparison(StrEnum):
25
+ MATCH = "match" # Expected trigger and actual trigger is identical
26
+ REFRESH = "refresh" # The branch parameters doesn't match, the hash does, refresh in Prefect but don't run triggers
27
+ UPDATE = "update" # Neither branch or other data points match, update in Prefect and run triggers
28
+
29
+ @property
30
+ def update_prefect(self) -> bool:
31
+ return self in {TriggerComparison.REFRESH, TriggerComparison.UPDATE}
32
+
21
33
 
22
34
  class TriggerSetupReport(BaseModel):
23
35
  created: list[TriggerDefinition] = Field(default_factory=list)
36
+ refreshed: list[TriggerDefinition] = Field(default_factory=list)
24
37
  updated: list[TriggerDefinition] = Field(default_factory=list)
25
38
  deleted: list[Automation] = Field(default_factory=list)
26
39
  unchanged: list[TriggerDefinition] = Field(default_factory=list)
27
40
 
28
41
  @property
29
42
  def in_use_count(self) -> int:
30
- return len(self.created + self.updated + self.unchanged)
43
+ return len(self.created + self.updated + self.unchanged + self.refreshed)
44
+
45
+ def add_with_comparison(self, trigger: TriggerDefinition, comparison: TriggerComparison) -> None:
46
+ match comparison:
47
+ case TriggerComparison.UPDATE:
48
+ self.updated.append(trigger)
49
+ case TriggerComparison.REFRESH:
50
+ self.refreshed.append(trigger)
51
+ case TriggerComparison.MATCH:
52
+ self.unchanged.append(trigger)
53
+
54
+ def _created_triggers_with_type(self, trigger_type: type[T]) -> list[T]:
55
+ return [trigger for trigger in self.created if isinstance(trigger, trigger_type)]
56
+
57
+ def _refreshed_triggers_with_type(self, trigger_type: type[T]) -> list[T]:
58
+ return [trigger for trigger in self.refreshed if isinstance(trigger, trigger_type)]
59
+
60
+ def _unchanged_triggers_with_type(self, trigger_type: type[T]) -> list[T]:
61
+ return [trigger for trigger in self.unchanged if isinstance(trigger, trigger_type)]
62
+
63
+ def _updated_triggers_with_type(self, trigger_type: type[T]) -> list[T]:
64
+ return [trigger for trigger in self.updated if isinstance(trigger, trigger_type)]
65
+
66
+ def triggers_with_type(self, trigger_type: type[T]) -> list[T]:
67
+ """Return all triggers that match the specified type.
68
+
69
+ Args:
70
+ trigger_type: A TriggerDefinition class or subclass to filter by
71
+
72
+ Returns:
73
+ List of triggers of the specified type from all categories
74
+ """
75
+ created = self._created_triggers_with_type(trigger_type=trigger_type)
76
+ updated = self._updated_triggers_with_type(trigger_type=trigger_type)
77
+ refreshed = self._refreshed_triggers_with_type(trigger_type=trigger_type)
78
+ unchanged = self._unchanged_triggers_with_type(trigger_type=trigger_type)
79
+ return created + updated + refreshed + unchanged
80
+
81
+ def modified_triggers_with_type(self, trigger_type: type[T]) -> list[T]:
82
+ """Return all created and updated triggers that match the specified type.
83
+
84
+ Args:
85
+ trigger_type: A TriggerDefinition class or subclass to filter by
86
+
87
+ Returns:
88
+ List of triggers of the specified type from both created and updated lists
89
+ """
90
+ created = self._created_triggers_with_type(trigger_type=trigger_type)
91
+ updated = self._updated_triggers_with_type(trigger_type=trigger_type)
92
+ return created + updated
31
93
 
32
94
 
33
95
  class TriggerType(str, Enum):
@@ -41,6 +103,16 @@ class TriggerType(str, Enum):
41
103
  HUMAN_FRIENDLY_ID = "human_friendly_id"
42
104
  # OBJECT = "object"
43
105
 
106
+ @property
107
+ def is_branch_specific(self) -> bool:
108
+ return self in {
109
+ TriggerType.COMPUTED_ATTR_JINJA2,
110
+ TriggerType.COMPUTED_ATTR_PYTHON,
111
+ TriggerType.COMPUTED_ATTR_PYTHON_QUERY,
112
+ TriggerType.DISPLAY_LABEL_JINJA2,
113
+ TriggerType.HUMAN_FRIENDLY_ID,
114
+ }
115
+
44
116
 
45
117
  def _match_related_dict() -> dict:
46
118
  # Make Mypy happy as match related is a dict[str, Any] | list[dict[str, Any]]
infrahub/trigger/setup.py CHANGED
@@ -12,22 +12,36 @@ from infrahub import lock
12
12
  from infrahub.database import InfrahubDatabase
13
13
  from infrahub.trigger.models import TriggerDefinition
14
14
 
15
- from .models import TriggerSetupReport, TriggerType
15
+ from .models import TriggerComparison, TriggerSetupReport, TriggerType
16
16
 
17
17
  if TYPE_CHECKING:
18
18
  from uuid import UUID
19
19
 
20
20
 
21
- def compare_automations(target: AutomationCore, existing: Automation) -> bool:
22
- """Compare an AutomationCore with an existing Automation object to identify if they are identical or not
23
-
24
- Return True if the target is identical to the existing automation
21
+ def compare_automations(
22
+ target: AutomationCore, existing: Automation, trigger_type: TriggerType | None, force_update: bool = False
23
+ ) -> TriggerComparison:
24
+ """Compare an AutomationCore with an existing Automation object to identify if they are identical,
25
+ if it's a branch specific automation and the branch filter may be different, or if they are different.
25
26
  """
26
27
 
28
+ if force_update:
29
+ return TriggerComparison.UPDATE
30
+
27
31
  target_dump = target.model_dump(exclude_defaults=True, exclude_none=True)
28
32
  existing_dump = existing.model_dump(exclude_defaults=True, exclude_none=True, exclude={"id"})
29
33
 
30
- return target_dump == existing_dump
34
+ if target_dump == existing_dump:
35
+ return TriggerComparison.MATCH
36
+
37
+ if not trigger_type or not trigger_type.is_branch_specific:
38
+ return TriggerComparison.UPDATE
39
+
40
+ if target.description == existing.description:
41
+ # If only the branch related info is different, we consider it a refresh
42
+ return TriggerComparison.REFRESH
43
+
44
+ return TriggerComparison.UPDATE
31
45
 
32
46
 
33
47
  @task(name="trigger-setup-specific", task_run_name="Setup triggers of a specific kind", cache_policy=NONE) # type: ignore[arg-type]
@@ -63,10 +77,8 @@ async def setup_triggers(
63
77
 
64
78
  report = TriggerSetupReport()
65
79
 
66
- if trigger_type:
67
- log.debug(f"Setting up triggers of type {trigger_type.value}")
68
- else:
69
- log.debug("Setting up all triggers")
80
+ trigger_log_message = f"triggers of type {trigger_type.value}" if trigger_type else "all triggers"
81
+ log.debug(f"Setting up {trigger_log_message}")
70
82
 
71
83
  # -------------------------------------------------------------
72
84
  # Retrieve existing Deployments and Automation from the server
@@ -80,16 +92,14 @@ async def setup_triggers(
80
92
  }
81
93
  deployments_mapping: dict[str, UUID] = {name: item.id for name, item in deployments.items()}
82
94
 
83
- # If a trigger type is provided, narrow down the list of existing triggers to know which one to delete
84
- existing_automations: dict[str, Automation] = {}
95
+ existing_automations = {item.name: item for item in await gather_all_automations(client=client)}
85
96
  if trigger_type:
97
+ # If a trigger type is provided, narrow down the list of existing triggers to know which one to delete
86
98
  existing_automations = {
87
- item.name: item
88
- for item in await client.read_automations()
89
- if item.name.startswith(f"{trigger_type.value}::")
99
+ automation_name: automation
100
+ for automation_name, automation in existing_automations.items()
101
+ if automation_name.startswith(f"{trigger_type.value}::")
90
102
  }
91
- else:
92
- existing_automations = {item.name: item for item in await client.read_automations()}
93
103
 
94
104
  trigger_names = [trigger.generate_name() for trigger in triggers]
95
105
  automation_names = list(existing_automations.keys())
@@ -115,12 +125,13 @@ async def setup_triggers(
115
125
  existing_automation = existing_automations.get(trigger.generate_name(), None)
116
126
 
117
127
  if existing_automation:
118
- if force_update or not compare_automations(target=automation, existing=existing_automation):
128
+ trigger_comparison = compare_automations(
129
+ target=automation, existing=existing_automation, trigger_type=trigger_type, force_update=force_update
130
+ )
131
+ if trigger_comparison.update_prefect:
119
132
  await client.update_automation(automation_id=existing_automation.id, automation=automation)
120
133
  log.info(f"{trigger.generate_name()} Updated")
121
- report.updated.append(trigger)
122
- else:
123
- report.unchanged.append(trigger)
134
+ report.add_with_comparison(trigger, trigger_comparison)
124
135
  else:
125
136
  await client.create_automation(automation=automation)
126
137
  log.info(f"{trigger.generate_name()} Created")
@@ -145,15 +156,34 @@ async def setup_triggers(
145
156
  else:
146
157
  raise
147
158
 
148
- if trigger_type:
149
- log.info(
150
- f"Processed triggers of type {trigger_type.value}: "
151
- f"{len(report.created)} created, {len(report.updated)} updated, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
152
- )
153
- else:
154
- log.info(
155
- f"Processed all triggers: "
156
- f"{len(report.created)} created, {len(report.updated)} updated, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
157
- )
159
+ log.info(
160
+ f"Processed {trigger_log_message}: {len(report.created)} created, {len(report.updated)} updated, "
161
+ f"{len(report.refreshed)} refreshed, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
162
+ )
158
163
 
159
164
  return report
165
+
166
+
167
+ async def gather_all_automations(client: PrefectClient) -> list[Automation]:
168
+ """Gather all automations from the Prefect server
169
+
170
+ By default the Prefect client only retrieves a limited number of automations, this function
171
+ retrieves them all by paginating through the results. The default within Prefect is 200 items,
172
+ and client.read_automations() doesn't support pagination parameters.
173
+ """
174
+ automation_count_response = await client.request("POST", "/automations/count")
175
+ automation_count_response.raise_for_status()
176
+ automation_count: int = automation_count_response.json()
177
+ offset = 0
178
+ limit = 200
179
+ missing_automations = True
180
+ automations: list[Automation] = []
181
+ while missing_automations:
182
+ response = await client.request("POST", "/automations/filter", json={"limit": limit, "offset": offset})
183
+ response.raise_for_status()
184
+ automations.extend(Automation.model_validate_list(response.json()))
185
+ if len(automations) >= automation_count:
186
+ missing_automations = False
187
+ offset += limit
188
+
189
+ return automations
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: infrahub-server
3
- Version: 1.5.2
3
+ Version: 1.5.4
4
4
  Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
5
5
  License: Apache-2.0
6
6
  Author: OpsMill
@@ -54,20 +54,20 @@ infrahub/components.py,sha256=lSLDCDwIZoakZ2iBrfHi9c3BxzugMiuiZO6V7Egt6tk,107
54
54
  infrahub/computed_attribute/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  infrahub/computed_attribute/constants.py,sha256=oTMPEfRuf2mcfCkBpRLWRALO6nsLHpFm9jJGu0lowS4,446
56
56
  infrahub/computed_attribute/gather.py,sha256=xhH4dsgCjRFyFshns4Iu3sloe5m1bVMRdeQAJjFdyYU,8220
57
- infrahub/computed_attribute/models.py,sha256=P_MijLwCVd7394oyTTfYQ3HmX5wIF966jdchuZaLRbs,17361
58
- infrahub/computed_attribute/tasks.py,sha256=3HGVb6mat9kD5VFYBtdUhJx6KW_cRlh2hbgHDOVAwFo,17944
57
+ infrahub/computed_attribute/models.py,sha256=U2zM2cGpGWAmRpVEp28hFauCNQYaDdI332thj_65zvE,17658
58
+ infrahub/computed_attribute/tasks.py,sha256=zld2oDQ5Z4HZzHTDrCYNLOxzCW8SqcLTvvxtSk6TADM,19388
59
59
  infrahub/computed_attribute/triggers.py,sha256=ve1cUj0CZ7dU1VtZkxET9LD8StszKIL9mCkTZpCeUaI,2304
60
- infrahub/config.py,sha256=jWGDVIwZiRdn7TRw0LwcFD61zr5T0mUacQ9C2MYLoLg,39188
60
+ infrahub/config.py,sha256=PX6RlgWJhShbl0vnRrw8MVitjWcjCYn764VtHfHvfmk,39389
61
61
  infrahub/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
62
62
  infrahub/constants/database.py,sha256=WmV1iuOk4xulxZHOVvO3sS_VF1eTf7fKh0TPe_RnfV4,507
63
63
  infrahub/constants/environment.py,sha256=ry-6qsBzSumOjjiq1D3XNoquf1LWqFKiQSJj8t6nET4,32
64
64
  infrahub/context.py,sha256=8SZRKSECkkcsNNzDaKEUJ7Nyr0EzUfToAy969LXjQVk,1554
65
65
  infrahub/core/__init__.py,sha256=z6EJBZyCYCBqinoBtX9li6BTBbbGV8WCkE_4CrEsmDA,104
66
66
  infrahub/core/account.py,sha256=6f1cIDWvL-HsbzL0UwWoCbDTzx55wzd_SkpQXiKDjcE,27477
67
- infrahub/core/attribute.py,sha256=4_FvQI5EgCmOGKb8_N0c7k2kV1EbXwLqRgFsvOtQbVw,44907
67
+ infrahub/core/attribute.py,sha256=quW5lG-e4c3VjIhiSta12TUTB8Uv_Gmi8mwTenwBSIo,44921
68
68
  infrahub/core/branch/__init__.py,sha256=h0oIj0gHp1xI-N1cYW8_N6VZ81CBOmLuiUt5cS5nKuk,49
69
69
  infrahub/core/branch/enums.py,sha256=wE_TvKxd-r3zeHgLOMuZhsyKRwDWWC8BMxAEC_aX7A8,212
70
- infrahub/core/branch/models.py,sha256=wgTeFJCcrgW7soZrfJWhGaJg2jDmTnsAjabpjf0-KWI,20617
70
+ infrahub/core/branch/models.py,sha256=BgEHOXr0bpOfhIV0-v7OJR0UVtHUFQXjn3giAEHD6Vw,20674
71
71
  infrahub/core/branch/needs_rebase_status.py,sha256=purrg93k9zWcV9NONjIdMF8cWLXEKHq6YjO0ayC3C04,407
72
72
  infrahub/core/branch/tasks.py,sha256=RnQlWdFmAj3VIALb4WNaMiL7ZuvtGcW1zgdbnemjE-Q,23558
73
73
  infrahub/core/changelog/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -143,7 +143,7 @@ infrahub/core/diff/repository/deserializer.py,sha256=bhN9ao8HxqKyRz273QGLNV9z9_S
143
143
  infrahub/core/diff/repository/repository.py,sha256=u0QTMY1e2dknG_DuRAwzFt-Lp1_mdj5lqF2ymt77k9E,25581
144
144
  infrahub/core/diff/tasks.py,sha256=lQMTcqvVC1p5hc3KmwR2ONA1M6qffDUl62vKOktTs5c,3514
145
145
  infrahub/core/enums.py,sha256=qGbhRVoH43Xi0iDkUfWdQiKapJbLT9UKsCobFk_paIk,491
146
- infrahub/core/graph/__init__.py,sha256=KNVHBhDx3HVtzNMVi-7A8m-Cp_rbi61MeY-Ag62bZ78,19
146
+ infrahub/core/graph/__init__.py,sha256=zt3OOi7yTehM66ClW2czqpghOQrlZ2Tzgw7hRet4-LM,19
147
147
  infrahub/core/graph/constraints.py,sha256=lmuzrKDFoeSKRiLtycB9PXi6zhMYghczKrPYvfWyy90,10396
148
148
  infrahub/core/graph/index.py,sha256=A9jzEE_wldBJsEsflODeMt4GM8sPmmbHAJRNdFioR1k,1736
149
149
  infrahub/core/graph/schema.py,sha256=o50Jcy6GBRk55RkDJSMIDDwHhLD7y_RWOirI9rCex4A,10776
@@ -163,7 +163,7 @@ infrahub/core/manager.py,sha256=IU5VfkHLCN4WhvqzI6DOEyCXo9fBT_st5BmN__A4vas,4418
163
163
  infrahub/core/merge.py,sha256=TNZpxjNYcl3dnvE8eYXaWSXFDYeEa8DDsS9XbR2XKlA,11217
164
164
  infrahub/core/migrations/__init__.py,sha256=ttA1YkKhiG9Zc0fwIIcMLIDCrIhN5xVOIh6ojFoy58o,1541
165
165
  infrahub/core/migrations/exceptions.py,sha256=gEAkWzjvN-IXr0YPqUrEqnu_GsR-uqucsu1QUaWhEmM,147
166
- infrahub/core/migrations/graph/__init__.py,sha256=epa6L0I-ZMzHS2NkpTusTl_1LEDjGNRq7q_S_HukqqQ,4475
166
+ infrahub/core/migrations/graph/__init__.py,sha256=ShWFHlPSkyrkOORcFNn907I9-I4UM2n4GBktPPsHZx8,4558
167
167
  infrahub/core/migrations/graph/load_schema_branch.py,sha256=OvjowaeDnx4djD1aGPjE7Rqyh1p843LSodOf_Frdt9U,1008
168
168
  infrahub/core/migrations/graph/m001_add_version_to_graph.py,sha256=YcLN6cFjE6IGheXR4Ujb6CcyY8bJ7WE289hcKJaENOc,1515
169
169
  infrahub/core/migrations/graph/m002_attribute_is_default.py,sha256=wB6f2N_ChTvGajqHD-OWCG5ahRMDhhXZuwo79ieq_II,1036
@@ -207,9 +207,10 @@ infrahub/core/migrations/graph/m039_ipam_reconcile.py,sha256=yRWTE73Rq1Qh6tPLu98
207
207
  infrahub/core/migrations/graph/m040_duplicated_attributes.py,sha256=2LxsG-CfcZnBirwGhwYL4kU-g3oxl6lNSM12vZTZ7Gw,2930
208
208
  infrahub/core/migrations/graph/m041_deleted_dup_edges.py,sha256=eP2BqUfvwkjACJrKI5fVyBBmXxEDwxtAD9O_CcbwBMw,5409
209
209
  infrahub/core/migrations/graph/m042_profile_attrs_in_db.py,sha256=KdEaNPHovJxxiNL3CFRjWBnNzaMdidj1nmW5Jbhrt-4,6431
210
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py,sha256=x7OgI5ZoX8yslg329VNFHXbZ2eDFwcdFhBeC-UWqVdk,7072
211
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py,sha256=mzw_sitDDDNQRMurvMo3mprUqhXNLJVh6Dgabh67cTw,39096
210
+ infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py,sha256=giICX6Dwd_z1GaWTWyX3KD5Va34LGth0Vv2uyaof044,7290
211
+ infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py,sha256=FuV3EBWS4lY4dtOn7-1Qgo6J7j4XDq4s5yIQjfCCVDM,39575
212
212
  infrahub/core/migrations/graph/m045_backfill_hfid_display_label_in_db_profile_template.py,sha256=mnND7TIdPSVcN9uJeYMmRpVjlAU6QBqfu7e1CS31D9Q,7934
213
+ infrahub/core/migrations/graph/m046_fill_agnostic_hfid_display_labels.py,sha256=labPu3UwH2L6X3VCVmNQHGSbRS-De0ihVzsaUiTf2so,10210
213
214
  infrahub/core/migrations/query/__init__.py,sha256=nUbKk8bX6Ei4RkLe0VNNAm7c-d2zDoAMgdFGkYW0Czw,850
214
215
  infrahub/core/migrations/query/attribute_add.py,sha256=wKChMnqcd8hb8YCTIU3rUrtVqwHFSI5bdvXAPUcnRIA,4969
215
216
  infrahub/core/migrations/query/attribute_remove.py,sha256=IhAPlv9jyZTWMf8f8HZJ8G0ImWebt-ER78NrP3vIWhU,5307
@@ -232,7 +233,7 @@ infrahub/core/migrations/schema/placeholder_dummy.py,sha256=YBGFUVPi57ARbWgAmbtK
232
233
  infrahub/core/migrations/schema/tasks.py,sha256=2J8gHGSP-WhxSi4GYhOc9xAJOg_S1ONm3YE4_ukLKxw,4164
233
234
  infrahub/core/migrations/shared.py,sha256=sQMdbx1gnht2G_-nb1MDbStwUso5YJkyyumjlKAJq3g,9592
234
235
  infrahub/core/models.py,sha256=xoodMSKLaHD5nOtBT35soRQnyHO_OrDlVRNdXuL51Ho,26679
235
- infrahub/core/node/__init__.py,sha256=yr6WFmh7pvL6XeFx6Hou0UoSc0DniZy09YPn84NGipg,50084
236
+ infrahub/core/node/__init__.py,sha256=OihBPaPhEbgvSLKPdBOBBuh0EFWf2wt897LZ_jdWb_g,50114
236
237
  infrahub/core/node/base.py,sha256=BAowVRCK_WC50yXym1kCyUppJDJnrODGU5uoj1s0Yd4,2564
237
238
  infrahub/core/node/constraints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
238
239
  infrahub/core/node/constraints/attribute_uniqueness.py,sha256=lcHBk4d3bc12sywxRTnQs18VEm_S6pDcUhNnsXnb-uI,2162
@@ -255,7 +256,7 @@ infrahub/core/property.py,sha256=mUf8JGUHSUVBaegN7TPmmJ2CnQRGo2d1rQEQQmdNmbU,538
255
256
  infrahub/core/protocols.py,sha256=QoDvLhsvLiJ7Jqouri31ja4apApVN4zmwm_59KTKhs8,12741
256
257
  infrahub/core/protocols_base.py,sha256=cEi6giHtEUmaD0JWfDfWHJhEv_6wjaBA3oJRJCbvc6Q,3411
257
258
  infrahub/core/query/__init__.py,sha256=2qIMaODLwJ6pK6BUd5vODTlA15Aecf5I8_-J44UlCso,23089
258
- infrahub/core/query/attribute.py,sha256=rz0IJPD1wUhjzt20z8R-_gMYGJuNyShM_PDzKPLiF1M,14901
259
+ infrahub/core/query/attribute.py,sha256=ud9dySDbH8OYnCMB6KJKZONekE3Ar0IrUbVEQ9DeWYQ,14873
259
260
  infrahub/core/query/branch.py,sha256=aIYyDxpnw_Zw2lqTnMEVlhPUaYckZtJJJU1SFUht1o0,4343
260
261
  infrahub/core/query/delete.py,sha256=7tPP1qtNV6QGYtmgE1RKsuQ9oxENnMTVkttLvJ2PiKg,1927
261
262
  infrahub/core/query/diff.py,sha256=uvojpzJSZFdcuutPAbA74M6R85hN1fteBDS4ZufE8IA,38579
@@ -277,7 +278,7 @@ infrahub/core/relationship/constraints/peer_kind.py,sha256=Bropiav4y6r0iU2KfWJ_k
277
278
  infrahub/core/relationship/constraints/peer_parent.py,sha256=z7elpC8xS_ovAF28Haq-RNpFtTEiUehzowiDgYGT68U,2343
278
279
  infrahub/core/relationship/constraints/peer_relatives.py,sha256=Ye79l7njaWxZkU2chTOaptIjvKBIawsNCl0IQxCTDtM,2737
279
280
  infrahub/core/relationship/constraints/profiles_kind.py,sha256=nEZPGtGcmelZ1Nb8EPcQ-7_zCLCNIYwwWbU6C9fLj5E,2464
280
- infrahub/core/relationship/model.py,sha256=ZaR6Rrp1CiS8vjbAcMTILpeI755ORj2_aj-3MYD-Q1E,48978
281
+ infrahub/core/relationship/model.py,sha256=ocow8RLJWaR-DNHh7JqYhA0g0fe_elwNeOmuG9ywzII,49093
281
282
  infrahub/core/root.py,sha256=8ZLSOtnmjQcrjqX2vxNO-AGopEUArmBPo_X5NeZBdP0,416
282
283
  infrahub/core/schema/__init__.py,sha256=U1DWsadfu7Y3FUc51h9DNwXEq4Qa_yZ5w5Bip8pVpmw,6674
283
284
  infrahub/core/schema/attribute_parameters.py,sha256=ABL1GEsOl4_CcDvK9_NucGMaF6LUeOjAxbDQVm_G7eg,6516
@@ -442,7 +443,7 @@ infrahub/dependencies/registry.py,sha256=YNv73l66EIYDBLaxeeWfGStl8MY6Xz_HpQY1osX
442
443
  infrahub/display_labels/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
443
444
  infrahub/display_labels/gather.py,sha256=FVMB6dGh3rwUKvxlqHOYrmxAEOuRkw7AC4SHjtL3w5Y,1786
444
445
  infrahub/display_labels/models.py,sha256=eOnbMy70qtw8je2ltYxW9BMSt2UO0T6H8ouHJATW_wE,9779
445
- infrahub/display_labels/tasks.py,sha256=LADymXZWPtCnrzz93uliNyBEeXSNhvdmZ0jBhXUIo2I,7009
446
+ infrahub/display_labels/tasks.py,sha256=BlXADozQcAdh3Qzhbr6ARjGM3X9rCpEnfjvzCHCe0qA,7934
446
447
  infrahub/display_labels/triggers.py,sha256=dWMKAC5w525bz6MnDF8kQFbDsSL-qI8A7zcZRP2Xq6U,966
447
448
  infrahub/events/__init__.py,sha256=6BtpkdstvgnMYvUWc-q2dqiA08ZRaU-Xs4vmhWpOJXs,1702
448
449
  infrahub/events/artifact_action.py,sha256=-j_Sh-_NdJIGJhUDYm5DoZS--eIYsaMsejj36OUE6yk,2823
@@ -587,7 +588,7 @@ infrahub/helpers.py,sha256=KchbQqgipU4VjfwnDbzCGjnEv-4espw_g63Zw4KAhbo,251
587
588
  infrahub/hfid/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
588
589
  infrahub/hfid/gather.py,sha256=ruv1NtzAuDO0Q6XsAjNsj0ujeXwqHxYOB1tVobPFf4U,1691
589
590
  infrahub/hfid/models.py,sha256=DZ6bX0sIcYB6nEbdDWzEVs6seqa2FU3dTT3sTP1PCXw,9481
590
- infrahub/hfid/tasks.py,sha256=gJKnQWywOXRcmGcrYb1-KovgA5SahQw9UiTtYfVjvMs,6764
591
+ infrahub/hfid/tasks.py,sha256=MEOZQ9mI23187Ae1Rmzuf9l3qxuAB2d_BV4dOWRWKtM,7670
591
592
  infrahub/hfid/triggers.py,sha256=A0-oB1GvvfLJFQ9HsL44vFz-ZVgpsSKOhIkDUiL-5fw,912
592
593
  infrahub/lock.py,sha256=tebDPS_LZmTCA7M49LNtKwK_e_evqEcguuYoXnbbel8,10954
593
594
  infrahub/locks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -703,7 +704,7 @@ infrahub/task_manager/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3h
703
704
  infrahub/task_manager/constants.py,sha256=1t1BZRa8_y89gIDPNHzIbRKo63nHOP37-r5OvtHa56c,559
704
705
  infrahub/task_manager/event.py,sha256=AVzfTt_jcsL9XgtMDEnhrb7w0xeZ84wB4ZNK2P3g4JA,14127
705
706
  infrahub/task_manager/models.py,sha256=WJxyCJPznSck8sUiGhCmfqN5SpKabLPBuTjiHC7vhNE,8940
706
- infrahub/task_manager/task.py,sha256=CAc7ZMZtQ1uniSnvtmVWSBb4kMOLJEzDob0xiKUDvTQ,15308
707
+ infrahub/task_manager/task.py,sha256=DV3zlj7bRgH0au3tbD6FC8tyx8OtXyryomFCHXR-o6g,16298
707
708
  infrahub/tasks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
708
709
  infrahub/tasks/artifact.py,sha256=q1MyQAbT77pD-pm8StHsF_JlTpEQreNc51JHJfnsTD4,1958
709
710
  infrahub/tasks/check.py,sha256=37n1U1Knb3AV6kz2sw_IabL9pnlqceLVICWf9GdSxZE,687
@@ -715,7 +716,7 @@ infrahub/telemetry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
715
716
  infrahub/telemetry/constants.py,sha256=_5mJAZaT_wTCaF7Yzsd---Zn1N6GZkoP_954GK8K4-c,184
716
717
  infrahub/telemetry/database.py,sha256=9UVPOxRionVF65jjo8slRIaNBOv-KMRzq7I-7fe3wZE,3111
717
718
  infrahub/telemetry/models.py,sha256=q3h_wSX0A2OZgDHo05TXTgcHrzDSxx8hSyqRKPGLvwc,1405
718
- infrahub/telemetry/task_manager.py,sha256=x7bUCQ2jXi93VWmrjKZHZTzR3JhD7r0OhhqK7ymCnAM,2864
719
+ infrahub/telemetry/task_manager.py,sha256=ZTmrN_MTiPIMyFQuD7xBy0PQkMCchNfI9KHnn5oLkkE,2934
719
720
  infrahub/telemetry/tasks.py,sha256=O1oK1AJIuf5x2kMxxN2fEDiurH1PmBcYUFCQO_HBvoc,4380
720
721
  infrahub/telemetry/utils.py,sha256=K-gmj4QilO3HXAqJRzUwVcpqdA9KcM4RYJPU_zUYpHA,308
721
722
  infrahub/trace.py,sha256=Hir9hMWx_6IKF_dhDnMxYjusJdy0ycjB5CHWge2wXNE,3759
@@ -726,8 +727,8 @@ infrahub/transformations/tasks.py,sha256=fxq1t5k0Uj9guEEQN4JSagF4ITVDP7_eyp9TxfQ
726
727
  infrahub/trigger/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
727
728
  infrahub/trigger/catalogue.py,sha256=62WmN3waxVGBLgcGIygcxLwA7HWk4ffbm2WQPgdimiE,976
728
729
  infrahub/trigger/constants.py,sha256=u9_5A6gIUIrprzfEdwseYk2yTkwU0VPCjZTwL8b3T6s,22
729
- infrahub/trigger/models.py,sha256=niFKtFpgUovIllKKImAkyqeBMUUxcqSHBNf6jBGbGzQ,4968
730
- infrahub/trigger/setup.py,sha256=EZxsneu-da-nXtHtsl5jSicKr8B9uWd-2YCq1J-4fko,6537
730
+ infrahub/trigger/models.py,sha256=X82Kz1K1s2knb8iezx87ctu4ML6d8f0wWYWM6EglUQo,8240
731
+ infrahub/trigger/setup.py,sha256=YCp8smOzXE4jlQ0n7OlK_FMDHPPusHfoRVKREv9bKiI,8018
731
732
  infrahub/trigger/tasks.py,sha256=b_tdEKYpWYkDnv3m7cfwH5JanZdMBhro4Lzf0LV8_RI,1871
732
733
  infrahub/types.py,sha256=aea-_EXtaqhYq8-P5LGqkU9VitfMs_WU4WXOSsItrsg,11591
733
734
  infrahub/utils.py,sha256=3p_bXmRGOTnXIMG9fZ9mFsKdBw2fU3VBFJT3YHrbvyw,2704
@@ -888,8 +889,8 @@ infrahub_testcontainers/models.py,sha256=hT7WEX2o7gxTFPE9uhtP5yigKgP5YSsy2c3tFB-
888
889
  infrahub_testcontainers/performance_test.py,sha256=_nf7Uk15mHwqpN4y7XUfI4JI54-UaXW-Yu4uwMIx21w,6185
889
890
  infrahub_testcontainers/plugin.py,sha256=I3RuZQ0dARyKHuqCf0y1Yj731P2Mwf3BJUehRJKeWrs,5645
890
891
  infrahub_testcontainers/prometheus.yml,sha256=610xQEyj3xuVJMzPkC4m1fRnCrjGpiRBrXA2ytCLa54,599
891
- infrahub_server-1.5.2.dist-info/LICENSE.txt,sha256=7GQO7kxVoQYnZtFrjZBKLRXbrGwwwimHPPOJtqXsozQ,11340
892
- infrahub_server-1.5.2.dist-info/METADATA,sha256=FI3l6-MziBYaFBQiDqElU0MK6XgH67S3iQsq6jiJ4_8,6127
893
- infrahub_server-1.5.2.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
894
- infrahub_server-1.5.2.dist-info/entry_points.txt,sha256=UXIeFWDsrV-4IllNvUEd6KieYGzQfn9paga2YyABOQI,393
895
- infrahub_server-1.5.2.dist-info/RECORD,,
892
+ infrahub_server-1.5.4.dist-info/LICENSE.txt,sha256=7GQO7kxVoQYnZtFrjZBKLRXbrGwwwimHPPOJtqXsozQ,11340
893
+ infrahub_server-1.5.4.dist-info/METADATA,sha256=nX55O3QHheHyTq0Ntl453Hw2Bizx-R_r657PW5T2O8k,6127
894
+ infrahub_server-1.5.4.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
895
+ infrahub_server-1.5.4.dist-info/entry_points.txt,sha256=UXIeFWDsrV-4IllNvUEd6KieYGzQfn9paga2YyABOQI,393
896
+ infrahub_server-1.5.4.dist-info/RECORD,,