infrahub-server 1.2.9rc0__py3-none-any.whl → 1.2.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. infrahub/computed_attribute/models.py +13 -0
  2. infrahub/computed_attribute/tasks.py +48 -26
  3. infrahub/core/attribute.py +43 -2
  4. infrahub/core/branch/models.py +8 -9
  5. infrahub/core/branch/tasks.py +0 -2
  6. infrahub/core/diff/calculator.py +65 -11
  7. infrahub/core/diff/combiner.py +38 -31
  8. infrahub/core/diff/coordinator.py +44 -28
  9. infrahub/core/diff/data_check_synchronizer.py +3 -2
  10. infrahub/core/diff/enricher/hierarchy.py +36 -27
  11. infrahub/core/diff/ipam_diff_parser.py +5 -4
  12. infrahub/core/diff/merger/merger.py +46 -16
  13. infrahub/core/diff/merger/serializer.py +1 -0
  14. infrahub/core/diff/model/field_specifiers_map.py +64 -0
  15. infrahub/core/diff/model/path.py +58 -58
  16. infrahub/core/diff/parent_node_adder.py +14 -16
  17. infrahub/core/diff/query/drop_nodes.py +42 -0
  18. infrahub/core/diff/query/field_specifiers.py +8 -7
  19. infrahub/core/diff/query/filters.py +15 -1
  20. infrahub/core/diff/query/merge.py +264 -28
  21. infrahub/core/diff/query/save.py +6 -2
  22. infrahub/core/diff/query_parser.py +50 -64
  23. infrahub/core/diff/repository/deserializer.py +38 -24
  24. infrahub/core/diff/repository/repository.py +31 -12
  25. infrahub/core/graph/__init__.py +1 -1
  26. infrahub/core/migrations/graph/__init__.py +2 -0
  27. infrahub/core/migrations/graph/m027_delete_isolated_nodes.py +50 -0
  28. infrahub/core/migrations/graph/m028_delete_diffs.py +38 -0
  29. infrahub/core/query/branch.py +27 -17
  30. infrahub/core/query/diff.py +162 -51
  31. infrahub/core/query/node.py +39 -5
  32. infrahub/core/query/relationship.py +105 -30
  33. infrahub/core/query/subquery.py +2 -2
  34. infrahub/core/relationship/model.py +1 -1
  35. infrahub/core/schema/schema_branch.py +3 -0
  36. infrahub/core/validators/uniqueness/query.py +7 -0
  37. infrahub/graphql/queries/diff/tree.py +2 -1
  38. infrahub/trigger/models.py +11 -1
  39. infrahub/trigger/setup.py +51 -15
  40. infrahub/trigger/tasks.py +1 -4
  41. infrahub/types.py +1 -1
  42. infrahub/webhook/models.py +2 -1
  43. infrahub/workflows/catalogue.py +9 -0
  44. infrahub/workflows/initialization.py +1 -3
  45. infrahub_sdk/timestamp.py +2 -2
  46. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.10.dist-info}/METADATA +3 -3
  47. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.10.dist-info}/RECORD +52 -48
  48. infrahub_testcontainers/docker-compose.test.yml +3 -3
  49. infrahub_testcontainers/performance_test.py +6 -3
  50. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.10.dist-info}/LICENSE.txt +0 -0
  51. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.10.dist-info}/WHEEL +0 -0
  52. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.2.10.dist-info}/entry_points.txt +0 -0
@@ -73,9 +73,18 @@ class RelationshipPeerData:
73
73
  source_id: UUID
74
74
  """UUID of the Source Node."""
75
75
 
76
+ source_db_id: str
77
+ """Internal DB ID of the Source Node."""
78
+
79
+ source_kind: str
80
+ """Kind of the Source Node."""
81
+
76
82
  peer_id: UUID
77
83
  """UUID of the Peer Node."""
78
84
 
85
+ peer_db_id: str
86
+ """Internal DB ID of the Peer Node."""
87
+
79
88
  peer_kind: str
80
89
  """Kind of the Peer Node."""
81
90
 
@@ -85,9 +94,6 @@ class RelationshipPeerData:
85
94
  rel_node_id: UUID | None = None
86
95
  """UUID of the Relationship Node."""
87
96
 
88
- peer_db_id: str | None = None
89
- """Internal DB ID of the Peer Node."""
90
-
91
97
  rel_node_db_id: str | None = None
92
98
  """Internal DB ID of the Relationship Node."""
93
99
 
@@ -196,6 +202,63 @@ class RelationshipQuery(Query):
196
202
  rel_prop_dict["hierarchy"] = self.schema.hierarchical
197
203
  return rel_prop_dict
198
204
 
205
+ def add_source_match_to_query(self, source_branch: Branch) -> None:
206
+ self.params["source_id"] = self.source_id or self.source.get_id()
207
+ if source_branch.is_global or source_branch.is_default:
208
+ source_query_match = """
209
+ MATCH (s:Node { uuid: $source_id })
210
+ OPTIONAL MATCH (s)-[delete_edge:IS_PART_OF {status: "deleted", branch: $source_branch}]->(:Root)
211
+ WHERE delete_edge.from <= $at
212
+ WITH *, s WHERE delete_edge IS NULL
213
+ """
214
+ self.params["source_branch"] = source_branch.name
215
+ source_filter, source_filter_params = source_branch.get_query_filter_path(
216
+ at=self.at, variable_name="r", params_prefix="src_"
217
+ )
218
+ source_query_match = """
219
+ MATCH (s:Node { uuid: $source_id })
220
+ CALL {
221
+ WITH s
222
+ MATCH (s)-[r:IS_PART_OF]->(:Root)
223
+ WHERE %(source_filter)s
224
+ RETURN r.status = "active" AS s_is_active
225
+ ORDER BY r.from DESC
226
+ LIMIT 1
227
+ }
228
+ WITH *, s WHERE s_is_active = TRUE
229
+ """ % {"source_filter": source_filter}
230
+ self.params.update(source_filter_params)
231
+ self.add_to_query(source_query_match)
232
+
233
+ def add_dest_match_to_query(self, destination_branch: Branch, destination_id: str) -> None:
234
+ self.params["destination_id"] = destination_id
235
+ if destination_branch.is_global or destination_branch.is_default:
236
+ destination_query_match = """
237
+ MATCH (d:Node { uuid: $destination_id })
238
+ OPTIONAL MATCH (d)-[delete_edge:IS_PART_OF {status: "deleted", branch: $destination_branch}]->(:Root)
239
+ WHERE delete_edge.from <= $at
240
+ WITH *, d WHERE delete_edge IS NULL
241
+ """
242
+ self.params["destination_branch"] = destination_branch.name
243
+ else:
244
+ destination_filter, destination_filter_params = destination_branch.get_query_filter_path(
245
+ at=self.at, variable_name="r", params_prefix="dst_"
246
+ )
247
+ destination_query_match = """
248
+ MATCH (d:Node { uuid: $destination_id })
249
+ CALL {
250
+ WITH d
251
+ MATCH (d)-[r:IS_PART_OF]->(:Root)
252
+ WHERE %(destination_filter)s
253
+ RETURN r.status = "active" AS d_is_active
254
+ ORDER BY r.from DESC
255
+ LIMIT 1
256
+ }
257
+ WITH *, d WHERE d_is_active = TRUE
258
+ """ % {"destination_filter": destination_filter}
259
+ self.params.update(destination_filter_params)
260
+ self.add_to_query(destination_query_match)
261
+
199
262
 
200
263
  class RelationshipCreateQuery(RelationshipQuery):
201
264
  name = "relationship_create"
@@ -214,8 +277,6 @@ class RelationshipCreateQuery(RelationshipQuery):
214
277
  super().__init__(destination=destination, destination_id=destination_id, **kwargs)
215
278
 
216
279
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
217
- self.params["source_id"] = self.source_id
218
- self.params["destination_id"] = self.destination_id
219
280
  self.params["name"] = self.schema.identifier
220
281
  self.params["branch_support"] = self.schema.branch.value
221
282
 
@@ -228,12 +289,11 @@ class RelationshipCreateQuery(RelationshipQuery):
228
289
  self.params["is_protected"] = self.rel.is_protected
229
290
  self.params["is_visible"] = self.rel.is_visible
230
291
 
231
- query_match = """
232
- MATCH (s:Node { uuid: $source_id })
233
- MATCH (d:Node { uuid: $destination_id })
234
- """
235
- self.add_to_query(query_match)
236
-
292
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
293
+ self.add_dest_match_to_query(
294
+ destination_branch=self.destination.get_branch_based_on_support_type(),
295
+ destination_id=self.destination_id or self.destination.get_id(),
296
+ )
237
297
  self.query_add_all_node_property_match()
238
298
 
239
299
  self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.ACTIVE)
@@ -378,7 +438,6 @@ class RelationshipDataDeleteQuery(RelationshipQuery):
378
438
 
379
439
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
380
440
  self.params["source_id"] = self.source_id
381
- self.params["destination_id"] = self.data.peer_id
382
441
  self.params["rel_node_id"] = self.data.rel_node_id
383
442
  self.params["name"] = self.schema.identifier
384
443
  self.params["branch"] = self.branch.name
@@ -388,9 +447,10 @@ class RelationshipDataDeleteQuery(RelationshipQuery):
388
447
  # -----------------------------------------------------------------------
389
448
  # Match all nodes, including properties
390
449
  # -----------------------------------------------------------------------
450
+
451
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
452
+ self.add_dest_match_to_query(destination_branch=self.branch, destination_id=self.data.peer_id)
391
453
  query = """
392
- MATCH (s:Node { uuid: $source_id })
393
- MATCH (d:Node { uuid: $destination_id })
394
454
  MATCH (rl:Relationship { uuid: $rel_node_id })
395
455
  """
396
456
  self.add_to_query(query)
@@ -442,8 +502,6 @@ class RelationshipDeleteQuery(RelationshipQuery):
442
502
 
443
503
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
444
504
  rel_filter, rel_params = self.branch.get_query_filter_path(at=self.at, variable_name="edge")
445
- self.params["source_id"] = self.source_id
446
- self.params["destination_id"] = self.destination_id
447
505
  self.params["rel_id"] = self.rel.id
448
506
  self.params["branch"] = self.branch.name
449
507
  self.params["rel_prop"] = self.get_relationship_properties_dict(status=RelationshipStatus.DELETED)
@@ -454,9 +512,14 @@ class RelationshipDeleteQuery(RelationshipQuery):
454
512
  r1 = f"{arrows.left.start}[r1:{self.rel_type} $rel_prop ]{arrows.left.end}"
455
513
  r2 = f"{arrows.right.start}[r2:{self.rel_type} $rel_prop ]{arrows.right.end}"
456
514
 
515
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
516
+ self.add_dest_match_to_query(
517
+ destination_branch=self.destination.get_branch_based_on_support_type(),
518
+ destination_id=self.destination_id or self.destination.get_id(),
519
+ )
457
520
  query = """
458
- MATCH (s:Node { uuid: $source_id })-[:IS_RELATED]-(rl:Relationship {uuid: $rel_id})-[:IS_RELATED]-(d:Node { uuid: $destination_id })
459
- WITH s, rl, d
521
+ MATCH (s)-[:IS_RELATED]-(rl:Relationship {uuid: $rel_id})-[:IS_RELATED]-(d)
522
+ WITH DISTINCT s, rl, d
460
523
  LIMIT 1
461
524
  CREATE (s)%(r1)s(rl)
462
525
  CREATE (rl)%(r2)s(d)
@@ -752,10 +815,15 @@ class RelationshipGetPeerQuery(Query):
752
815
  def get_peers(self) -> Generator[RelationshipPeerData, None, None]:
753
816
  for result in self.get_results_group_by(("peer", "uuid"), ("source_node", "uuid")):
754
817
  rels = result.get("rels")
818
+ source_node = result.get_node("source_node")
819
+ peer_node = result.get_node("peer")
755
820
  data = RelationshipPeerData(
756
- source_id=result.get_node("source_node").get("uuid"),
757
- peer_id=result.get_node("peer").get("uuid"),
758
- peer_kind=result.get_node("peer").get("kind"),
821
+ source_id=source_node.get("uuid"),
822
+ source_db_id=source_node.element_id,
823
+ source_kind=source_node.get("kind"),
824
+ peer_id=peer_node.get("uuid"),
825
+ peer_db_id=peer_node.element_id,
826
+ peer_kind=peer_node.get("kind"),
759
827
  rel_node_db_id=result.get("rl").element_id,
760
828
  rel_node_id=result.get("rl").get("uuid"),
761
829
  updated_at=rels[0]["from"],
@@ -793,8 +861,6 @@ class RelationshipGetQuery(RelationshipQuery):
793
861
  type: QueryType = QueryType.READ
794
862
 
795
863
  async def query_init(self, db: InfrahubDatabase, **kwargs) -> None: # noqa: ARG002
796
- self.params["source_id"] = self.source_id
797
- self.params["destination_id"] = self.destination_id
798
864
  self.params["name"] = self.schema.identifier
799
865
  self.params["branch"] = self.branch.name
800
866
 
@@ -808,9 +874,12 @@ class RelationshipGetQuery(RelationshipQuery):
808
874
  r1 = f"{arrows.left.start}[r1:{self.rel.rel_type}]{arrows.left.end}"
809
875
  r2 = f"{arrows.right.start}[r2:{self.rel.rel_type}]{arrows.right.end}"
810
876
 
877
+ self.add_source_match_to_query(source_branch=self.source.get_branch_based_on_support_type())
878
+ self.add_dest_match_to_query(
879
+ destination_branch=self.destination.get_branch_based_on_support_type(),
880
+ destination_id=self.destination_id or self.destination.get_id(),
881
+ )
811
882
  query = """
812
- MATCH (s:Node { uuid: $source_id })
813
- MATCH (d:Node { uuid: $destination_id })
814
883
  MATCH (s)%s(rl:Relationship { name: $name })%s(d)
815
884
  WHERE %s
816
885
  """ % (
@@ -1037,7 +1106,11 @@ class RelationshipDeleteAllQuery(Query):
1037
1106
  CALL {
1038
1107
  WITH rl
1039
1108
  MATCH (rl)-[active_edge:IS_RELATED]->(n)
1040
- WHERE %(active_rel_filter)s AND active_edge.status ="active"
1109
+ WHERE %(active_rel_filter)s
1110
+ WITH rl, active_edge, n
1111
+ ORDER BY %(id_func)s(rl), %(id_func)s(n), active_edge.from DESC
1112
+ WITH rl, n, head(collect(active_edge)) AS active_edge
1113
+ WHERE active_edge.status = "active"
1041
1114
  CREATE (rl)-[deleted_edge:IS_RELATED $rel_prop]->(n)
1042
1115
  SET deleted_edge.hierarchy = active_edge.hierarchy
1043
1116
  WITH rl, active_edge, n
@@ -1053,7 +1126,11 @@ class RelationshipDeleteAllQuery(Query):
1053
1126
 
1054
1127
  WITH rl
1055
1128
  MATCH (rl)<-[active_edge:IS_RELATED]-(n)
1056
- WHERE %(active_rel_filter)s AND active_edge.status ="active"
1129
+ WHERE %(active_rel_filter)s
1130
+ WITH rl, active_edge, n
1131
+ ORDER BY %(id_func)s(rl), %(id_func)s(n), active_edge.from DESC
1132
+ WITH rl, n, head(collect(active_edge)) AS active_edge
1133
+ WHERE active_edge.status = "active"
1057
1134
  CREATE (rl)<-[deleted_edge:IS_RELATED $rel_prop]-(n)
1058
1135
  SET deleted_edge.hierarchy = active_edge.hierarchy
1059
1136
  WITH rl, active_edge, n
@@ -1066,9 +1143,7 @@ class RelationshipDeleteAllQuery(Query):
1066
1143
  "inbound" as rel_direction
1067
1144
  }
1068
1145
  RETURN DISTINCT uuid, kind, rel_identifier, rel_direction
1069
- """ % {
1070
- "active_rel_filter": active_rel_filter,
1071
- }
1146
+ """ % {"active_rel_filter": active_rel_filter, "id_func": db.get_id_function_name()}
1072
1147
 
1073
1148
  self.add_to_query(query)
1074
1149
 
@@ -57,7 +57,7 @@ async def build_subquery_filter(
57
57
  params.update(field_params)
58
58
 
59
59
  field_where.append("all(r IN relationships(path) WHERE (%s))" % branch_filter)
60
- filter_str = f"({node_alias})" + "".join([str(item) for item in field_filter])
60
+ filter_str = f"({node_alias}:Node {{uuid: {node_alias}.uuid}})" + "".join([str(item) for item in field_filter])
61
61
  where_str = " AND ".join(field_where)
62
62
  branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)"
63
63
  froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from")
@@ -138,7 +138,7 @@ async def build_subquery_order(
138
138
  field_filter[-1].name = "last"
139
139
 
140
140
  field_where.append("all(r IN relationships(path) WHERE (%s))" % branch_filter)
141
- filter_str = f"({node_alias})" + "".join([str(item) for item in field_filter])
141
+ filter_str = f"({node_alias}:Node {{uuid: {node_alias}.uuid}})" + "".join([str(item) for item in field_filter])
142
142
  where_str = " AND ".join(field_where)
143
143
  branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)"
144
144
  froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from")
@@ -416,7 +416,7 @@ class Relationship(FlagPropertyMixin, NodePropertyMixin):
416
416
  await update_relationships_to(rel_ids_to_update, to=delete_at, db=db)
417
417
 
418
418
  delete_query = await RelationshipDeleteQuery.init(
419
- db=db, rel=self, source_id=node.id, destination_id=peer.id, branch=branch, at=delete_at
419
+ db=db, rel=self, source=node, destination=peer, branch=branch, at=delete_at
420
420
  )
421
421
  await delete_query.execute(db=db)
422
422
 
@@ -2050,6 +2050,9 @@ class SchemaBranch:
2050
2050
 
2051
2051
  identified.add(node_schema)
2052
2052
 
2053
+ if node_schema.is_node_schema:
2054
+ identified.update([self.get(name=kind, duplicate=False) for kind in node_schema.inherit_from])
2055
+
2053
2056
  for relationship in node_schema.relationships:
2054
2057
  if (
2055
2058
  relationship.peer in [InfrahubKind.GENERICGROUP, InfrahubKind.PROFILE]
@@ -225,6 +225,13 @@ class NodeUniqueAttributeConstraintQuery(Query):
225
225
  attr_name,
226
226
  attr_value,
227
227
  relationship_identifier
228
+ ORDER BY
229
+ node_id,
230
+ deepest_branch_name,
231
+ node_count,
232
+ attr_name,
233
+ attr_value,
234
+ relationship_identifier
228
235
  """ % {
229
236
  "select_subqueries_str": select_subqueries_str,
230
237
  "branch_filter": branch_filter,
@@ -10,6 +10,7 @@ from infrahub.core import registry
10
10
  from infrahub.core.constants import DiffAction, RelationshipCardinality
11
11
  from infrahub.core.constants.database import DatabaseEdgeType
12
12
  from infrahub.core.diff.model.path import NameTrackingId
13
+ from infrahub.core.diff.query.filters import EnrichedDiffQueryFilters
13
14
  from infrahub.core.diff.repository.repository import DiffRepository
14
15
  from infrahub.core.query.diff import DiffCountChanges
15
16
  from infrahub.core.timestamp import Timestamp
@@ -415,7 +416,7 @@ class DiffTreeResolver:
415
416
  diff_branch_names=[diff_branch.name],
416
417
  from_time=from_timestamp,
417
418
  to_time=to_timestamp,
418
- filters=filters_dict,
419
+ filters=EnrichedDiffQueryFilters(**filters_dict),
419
420
  include_parents=include_parents,
420
421
  limit=limit,
421
422
  offset=offset,
@@ -5,8 +5,11 @@ from enum import Enum
5
5
  from typing import TYPE_CHECKING, Any
6
6
 
7
7
  from prefect.events.actions import RunDeployment
8
+ from prefect.events.schemas.automations import (
9
+ Automation, # noqa: TC002
10
+ Posture,
11
+ )
8
12
  from prefect.events.schemas.automations import EventTrigger as PrefectEventTrigger
9
- from prefect.events.schemas.automations import Posture
10
13
  from prefect.events.schemas.events import ResourceSpecification
11
14
  from pydantic import BaseModel, Field
12
15
 
@@ -19,6 +22,13 @@ if TYPE_CHECKING:
19
22
  from uuid import UUID
20
23
 
21
24
 
25
+ class TriggerSetupReport(BaseModel):
26
+ created: list[TriggerDefinition] = Field(default_factory=list)
27
+ updated: list[TriggerDefinition] = Field(default_factory=list)
28
+ deleted: list[Automation] = Field(default_factory=list)
29
+ unchanged: list[TriggerDefinition] = Field(default_factory=list)
30
+
31
+
22
32
  class TriggerType(str, Enum):
23
33
  BUILTIN = "builtin"
24
34
  WEBHOOK = "webhook"
infrahub/trigger/setup.py CHANGED
@@ -5,27 +5,43 @@ from prefect.automations import AutomationCore
5
5
  from prefect.cache_policies import NONE
6
6
  from prefect.client.orchestration import PrefectClient
7
7
  from prefect.client.schemas.filters import DeploymentFilter, DeploymentFilterName
8
+ from prefect.events.schemas.automations import Automation
8
9
 
9
10
  from infrahub.trigger.models import TriggerDefinition
10
11
 
11
- from .models import TriggerType
12
+ from .models import TriggerSetupReport, TriggerType
12
13
 
13
14
  if TYPE_CHECKING:
14
15
  from uuid import UUID
15
16
 
16
17
 
18
+ def compare_automations(target: AutomationCore, existing: Automation) -> bool:
19
+ """Compare an AutomationCore with an existing Automation object to identify if they are identical or not
20
+
21
+ Return True if the target is identical to the existing automation
22
+ """
23
+
24
+ target_dump = target.model_dump(exclude_defaults=True, exclude_none=True)
25
+ existing_dump = existing.model_dump(exclude_defaults=True, exclude_none=True, exclude={"id"})
26
+
27
+ return target_dump == existing_dump
28
+
29
+
17
30
  @task(name="trigger-setup", task_run_name="Setup triggers", cache_policy=NONE) # type: ignore[arg-type]
18
31
  async def setup_triggers(
19
32
  client: PrefectClient,
20
33
  triggers: list[TriggerDefinition],
21
34
  trigger_type: TriggerType | None = None,
22
- ) -> None:
35
+ force_update: bool = False,
36
+ ) -> TriggerSetupReport:
23
37
  log = get_run_logger()
24
38
 
39
+ report = TriggerSetupReport()
40
+
25
41
  if trigger_type:
26
- log.info(f"Setting up triggers of type {trigger_type.value}")
42
+ log.debug(f"Setting up triggers of type {trigger_type.value}")
27
43
  else:
28
- log.info("Setting up all triggers")
44
+ log.debug("Setting up all triggers")
29
45
 
30
46
  # -------------------------------------------------------------
31
47
  # Retrieve existing Deployments and Automation from the server
@@ -38,23 +54,24 @@ async def setup_triggers(
38
54
  )
39
55
  }
40
56
  deployments_mapping: dict[str, UUID] = {name: item.id for name, item in deployments.items()}
41
- existing_automations = {item.name: item for item in await client.read_automations()}
42
57
 
43
58
  # If a trigger type is provided, narrow down the list of existing triggers to know which one to delete
59
+ existing_automations: dict[str, Automation] = {}
44
60
  if trigger_type:
45
- trigger_automations = [
46
- item.name for item in await client.read_automations() if item.name.startswith(trigger_type.value)
47
- ]
61
+ existing_automations = {
62
+ item.name: item for item in await client.read_automations() if item.name.startswith(trigger_type.value)
63
+ }
48
64
  else:
49
- trigger_automations = [item.name for item in await client.read_automations()]
65
+ existing_automations = {item.name: item for item in await client.read_automations()}
50
66
 
51
67
  trigger_names = [trigger.generate_name() for trigger in triggers]
68
+ automation_names = list(existing_automations.keys())
52
69
 
53
- log.debug(f"{len(trigger_automations)} existing triggers ({trigger_automations})")
54
- log.debug(f"{len(trigger_names)} triggers to configure ({trigger_names})")
70
+ log.debug(f"{len(automation_names)} existing triggers ({automation_names})")
71
+ log.debug(f"{len(trigger_names)} triggers to configure ({trigger_names})")
55
72
 
56
- to_delete = set(trigger_automations) - set(trigger_names)
57
- log.debug(f"{len(trigger_names)} triggers to delete ({to_delete})")
73
+ to_delete = set(automation_names) - set(trigger_names)
74
+ log.debug(f"{len(to_delete)} triggers to delete ({to_delete})")
58
75
 
59
76
  # -------------------------------------------------------------
60
77
  # Create or Update all triggers
@@ -71,11 +88,16 @@ async def setup_triggers(
71
88
  existing_automation = existing_automations.get(trigger.generate_name(), None)
72
89
 
73
90
  if existing_automation:
74
- await client.update_automation(automation_id=existing_automation.id, automation=automation)
75
- log.info(f"{trigger.generate_name()} Updated")
91
+ if force_update or not compare_automations(target=automation, existing=existing_automation):
92
+ await client.update_automation(automation_id=existing_automation.id, automation=automation)
93
+ log.info(f"{trigger.generate_name()} Updated")
94
+ report.updated.append(trigger)
95
+ else:
96
+ report.unchanged.append(trigger)
76
97
  else:
77
98
  await client.create_automation(automation=automation)
78
99
  log.info(f"{trigger.generate_name()} Created")
100
+ report.created.append(trigger)
79
101
 
80
102
  # -------------------------------------------------------------
81
103
  # Delete Triggers that shouldn't be there
@@ -86,5 +108,19 @@ async def setup_triggers(
86
108
  if not existing_automation:
87
109
  continue
88
110
 
111
+ report.deleted.append(existing_automation)
89
112
  await client.delete_automation(automation_id=existing_automation.id)
90
113
  log.info(f"{item_to_delete} Deleted")
114
+
115
+ if trigger_type:
116
+ log.info(
117
+ f"Processed triggers of type {trigger_type.value}: "
118
+ f"{len(report.created)} created, {len(report.updated)} updated, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
119
+ )
120
+ else:
121
+ log.info(
122
+ f"Processed all triggers: "
123
+ f"{len(report.created)} created, {len(report.updated)} updated, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
124
+ )
125
+
126
+ return report
infrahub/trigger/tasks.py CHANGED
@@ -31,7 +31,4 @@ async def trigger_configure_all(service: InfrahubServices) -> None:
31
31
  )
32
32
 
33
33
  async with get_client(sync_client=False) as prefect_client:
34
- await setup_triggers(
35
- client=prefect_client,
36
- triggers=triggers,
37
- )
34
+ await setup_triggers(client=prefect_client, triggers=triggers, force_update=True)
infrahub/types.py CHANGED
@@ -366,7 +366,7 @@ ATTRIBUTE_PYTHON_TYPES: dict[str, type] = {
366
366
  ATTRIBUTE_KIND_LABELS = list(ATTRIBUTE_TYPES.keys())
367
367
 
368
368
  # Data types supporting large values, which can therefore not be indexed in neo4j.
369
- LARGE_ATTRIBUTE_TYPES = [TextArea, JSON]
369
+ LARGE_ATTRIBUTE_TYPES = [TextArea, JSON, List]
370
370
 
371
371
 
372
372
  def get_attribute_type(kind: str = "Default") -> type[InfrahubDataType]:
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import base64
4
4
  import hashlib
5
5
  import hmac
6
+ import json
6
7
  from typing import TYPE_CHECKING, Any
7
8
  from uuid import UUID, uuid4
8
9
 
@@ -170,7 +171,7 @@ class StandardWebhook(Webhook):
170
171
  def _assign_headers(self, uuid: UUID | None = None, at: Timestamp | None = None) -> None:
171
172
  message_id = f"msg_{uuid.hex}" if uuid else f"msg_{uuid4().hex}"
172
173
  timestamp = str(at.to_timestamp()) if at else str(Timestamp().to_timestamp())
173
- payload = self._payload or {}
174
+ payload = json.dumps(self._payload or {})
174
175
  unsigned_data = f"{message_id}.{timestamp}.{payload}".encode()
175
176
  signature = self._sign(data=unsigned_data)
176
177
 
@@ -251,6 +251,14 @@ COMPUTED_ATTRIBUTE_PROCESS_JINJA2 = WorkflowDefinition(
251
251
  tags=[WorkflowTag.DATABASE_CHANGE],
252
252
  )
253
253
 
254
+ COMPUTED_ATTRIBUTE_JINJA2_UPDATE_VALUE = WorkflowDefinition(
255
+ name="computed-attribute-jinja2-update-value",
256
+ type=WorkflowType.CORE,
257
+ module="infrahub.computed_attribute.tasks",
258
+ function="computed_attribute_jinja2_update_value",
259
+ tags=[WorkflowTag.DATABASE_CHANGE],
260
+ )
261
+
254
262
  TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES = WorkflowDefinition(
255
263
  name="trigger_update_jinja2_computed_attributes",
256
264
  type=WorkflowType.CORE,
@@ -443,6 +451,7 @@ workflows = [
443
451
  BRANCH_MERGE_POST_PROCESS,
444
452
  BRANCH_REBASE,
445
453
  BRANCH_VALIDATE,
454
+ COMPUTED_ATTRIBUTE_JINJA2_UPDATE_VALUE,
446
455
  COMPUTED_ATTRIBUTE_PROCESS_JINJA2,
447
456
  COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM,
448
457
  COMPUTED_ATTRIBUTE_SETUP_JINJA2,
@@ -71,7 +71,5 @@ async def setup_task_manager() -> None:
71
71
  await setup_worker_pools(client=client)
72
72
  await setup_deployments(client=client)
73
73
  await setup_triggers(
74
- client=client,
75
- triggers=builtin_triggers,
76
- trigger_type=TriggerType.BUILTIN,
74
+ client=client, triggers=builtin_triggers, trigger_type=TriggerType.BUILTIN, force_update=True
77
75
  )
infrahub_sdk/timestamp.py CHANGED
@@ -153,7 +153,7 @@ class Timestamp:
153
153
  nanoseconds: int = 0,
154
154
  disambiguate: Literal["compatible"] = "compatible",
155
155
  ) -> Timestamp:
156
- return Timestamp(
156
+ return self.__class__(
157
157
  self._obj.add(
158
158
  years=years,
159
159
  months=months,
@@ -183,7 +183,7 @@ class Timestamp:
183
183
  nanoseconds: int = 0,
184
184
  disambiguate: Literal["compatible"] = "compatible",
185
185
  ) -> Timestamp:
186
- return Timestamp(
186
+ return self.__class__(
187
187
  self._obj.subtract(
188
188
  years=years,
189
189
  months=months,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: infrahub-server
3
- Version: 1.2.9rc0
3
+ Version: 1.2.10
4
4
  Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
5
5
  Home-page: https://opsmill.com
6
6
  License: AGPL-3.0-only
@@ -77,7 +77,7 @@ Description-Content-Type: text/markdown
77
77
 
78
78
  Infrahub from [OpsMill](https://opsmill.com) is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run. Infrahub offers a central hub to manage the data, templates and playbooks that powers your infrastructure by combining the version control and branch management capabilities similar to Git with the flexible data model and UI of a graph database.
79
79
 
80
- If you just want to try Infrahub out, you can use our [Always-On Sandbox](https://demo.infrahub.app/) to get started.
80
+ If you just want to try Infrahub out, you can use our [Infrahub Sandbox](https://sandbox.infrahub.app/) to get started.
81
81
 
82
82
  ![infrahub screenshot](docs/docs/media/infrahub-readme.gif)
83
83
 
@@ -103,7 +103,7 @@ If you just want to try Infrahub out, you can use our [Always-On Sandbox](https:
103
103
 
104
104
  ## Quick Start
105
105
 
106
- [Always-On Sandbox](https://demo.infrahub.app/) - Instantly login to the UI of a demo environment of Infrahub with sample data pre-loaded.
106
+ [Infrahub Sandbox](https://sandbox.infrahub.app/) - Instantly login to the UI of a demo environment of Infrahub with sample data pre-loaded.
107
107
 
108
108
  [Getting Started Environment & Tutorial](https://opsmill.instruqt.com/pages/labs) - It spins up an instance of Infrahub on our cloud, provides a browser, terminal, code editor and walks you through the basic concepts:
109
109