infrahub-server 1.2.9rc0__py3-none-any.whl → 1.3.0a0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (166) hide show
  1. infrahub/actions/constants.py +86 -0
  2. infrahub/actions/gather.py +114 -0
  3. infrahub/actions/models.py +241 -0
  4. infrahub/actions/parsers.py +104 -0
  5. infrahub/actions/schema.py +382 -0
  6. infrahub/actions/tasks.py +126 -0
  7. infrahub/actions/triggers.py +21 -0
  8. infrahub/cli/db.py +1 -2
  9. infrahub/computed_attribute/models.py +13 -0
  10. infrahub/computed_attribute/tasks.py +48 -26
  11. infrahub/config.py +9 -0
  12. infrahub/core/account.py +24 -47
  13. infrahub/core/attribute.py +53 -14
  14. infrahub/core/branch/models.py +8 -9
  15. infrahub/core/branch/tasks.py +0 -2
  16. infrahub/core/constants/infrahubkind.py +8 -0
  17. infrahub/core/constraint/node/runner.py +1 -1
  18. infrahub/core/convert_object_type/__init__.py +0 -0
  19. infrahub/core/convert_object_type/conversion.py +122 -0
  20. infrahub/core/convert_object_type/schema_mapping.py +56 -0
  21. infrahub/core/diff/calculator.py +65 -11
  22. infrahub/core/diff/combiner.py +38 -31
  23. infrahub/core/diff/coordinator.py +44 -28
  24. infrahub/core/diff/data_check_synchronizer.py +3 -2
  25. infrahub/core/diff/enricher/hierarchy.py +36 -27
  26. infrahub/core/diff/ipam_diff_parser.py +5 -4
  27. infrahub/core/diff/merger/merger.py +46 -16
  28. infrahub/core/diff/merger/serializer.py +1 -0
  29. infrahub/core/diff/model/field_specifiers_map.py +64 -0
  30. infrahub/core/diff/model/path.py +58 -58
  31. infrahub/core/diff/parent_node_adder.py +14 -16
  32. infrahub/core/diff/query/all_conflicts.py +1 -5
  33. infrahub/core/diff/query/artifact.py +10 -20
  34. infrahub/core/diff/query/diff_get.py +3 -6
  35. infrahub/core/diff/query/drop_nodes.py +42 -0
  36. infrahub/core/diff/query/field_specifiers.py +8 -7
  37. infrahub/core/diff/query/field_summary.py +2 -4
  38. infrahub/core/diff/query/filters.py +15 -1
  39. infrahub/core/diff/query/merge.py +284 -101
  40. infrahub/core/diff/query/save.py +26 -34
  41. infrahub/core/diff/query/summary_counts_enricher.py +34 -54
  42. infrahub/core/diff/query_parser.py +55 -65
  43. infrahub/core/diff/repository/deserializer.py +38 -24
  44. infrahub/core/diff/repository/repository.py +31 -12
  45. infrahub/core/diff/tasks.py +3 -3
  46. infrahub/core/graph/__init__.py +1 -1
  47. infrahub/core/manager.py +14 -11
  48. infrahub/core/migrations/graph/__init__.py +2 -0
  49. infrahub/core/migrations/graph/m003_relationship_parent_optional.py +1 -2
  50. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +2 -4
  51. infrahub/core/migrations/graph/m019_restore_rels_to_time.py +11 -22
  52. infrahub/core/migrations/graph/m020_duplicate_edges.py +3 -6
  53. infrahub/core/migrations/graph/m021_missing_hierarchy_merge.py +1 -2
  54. infrahub/core/migrations/graph/m024_missing_hierarchy_backfill.py +1 -2
  55. infrahub/core/migrations/graph/m027_delete_isolated_nodes.py +50 -0
  56. infrahub/core/migrations/graph/m028_delete_diffs.py +38 -0
  57. infrahub/core/migrations/query/attribute_add.py +1 -2
  58. infrahub/core/migrations/query/attribute_rename.py +3 -6
  59. infrahub/core/migrations/query/delete_element_in_schema.py +3 -6
  60. infrahub/core/migrations/query/node_duplicate.py +3 -6
  61. infrahub/core/migrations/query/relationship_duplicate.py +3 -6
  62. infrahub/core/migrations/schema/node_attribute_remove.py +3 -6
  63. infrahub/core/migrations/schema/node_remove.py +3 -6
  64. infrahub/core/models.py +29 -2
  65. infrahub/core/node/__init__.py +18 -4
  66. infrahub/core/node/create.py +211 -0
  67. infrahub/core/protocols.py +51 -0
  68. infrahub/core/protocols_base.py +3 -0
  69. infrahub/core/query/__init__.py +2 -2
  70. infrahub/core/query/branch.py +27 -17
  71. infrahub/core/query/diff.py +186 -81
  72. infrahub/core/query/ipam.py +10 -20
  73. infrahub/core/query/node.py +65 -49
  74. infrahub/core/query/relationship.py +156 -58
  75. infrahub/core/query/resource_manager.py +1 -2
  76. infrahub/core/query/subquery.py +4 -6
  77. infrahub/core/relationship/model.py +4 -1
  78. infrahub/core/schema/__init__.py +2 -1
  79. infrahub/core/schema/attribute_parameters.py +36 -0
  80. infrahub/core/schema/attribute_schema.py +83 -8
  81. infrahub/core/schema/basenode_schema.py +25 -1
  82. infrahub/core/schema/definitions/core/__init__.py +21 -0
  83. infrahub/core/schema/definitions/internal.py +13 -3
  84. infrahub/core/schema/generated/attribute_schema.py +9 -3
  85. infrahub/core/schema/schema_branch.py +15 -7
  86. infrahub/core/validators/__init__.py +5 -1
  87. infrahub/core/validators/attribute/choices.py +1 -2
  88. infrahub/core/validators/attribute/enum.py +1 -2
  89. infrahub/core/validators/attribute/kind.py +1 -2
  90. infrahub/core/validators/attribute/length.py +13 -6
  91. infrahub/core/validators/attribute/optional.py +1 -2
  92. infrahub/core/validators/attribute/regex.py +5 -5
  93. infrahub/core/validators/attribute/unique.py +1 -3
  94. infrahub/core/validators/determiner.py +18 -2
  95. infrahub/core/validators/enum.py +7 -0
  96. infrahub/core/validators/node/hierarchy.py +3 -6
  97. infrahub/core/validators/query.py +1 -3
  98. infrahub/core/validators/relationship/count.py +6 -12
  99. infrahub/core/validators/relationship/optional.py +2 -4
  100. infrahub/core/validators/relationship/peer.py +3 -8
  101. infrahub/core/validators/tasks.py +1 -1
  102. infrahub/core/validators/uniqueness/query.py +12 -9
  103. infrahub/database/__init__.py +1 -3
  104. infrahub/events/group_action.py +1 -0
  105. infrahub/graphql/analyzer.py +139 -18
  106. infrahub/graphql/app.py +1 -1
  107. infrahub/graphql/loaders/node.py +1 -1
  108. infrahub/graphql/loaders/peers.py +1 -1
  109. infrahub/graphql/manager.py +4 -0
  110. infrahub/graphql/mutations/action.py +164 -0
  111. infrahub/graphql/mutations/convert_object_type.py +62 -0
  112. infrahub/graphql/mutations/main.py +24 -175
  113. infrahub/graphql/mutations/proposed_change.py +21 -18
  114. infrahub/graphql/queries/convert_object_type_mapping.py +36 -0
  115. infrahub/graphql/queries/diff/tree.py +2 -1
  116. infrahub/graphql/queries/relationship.py +1 -1
  117. infrahub/graphql/resolvers/many_relationship.py +4 -4
  118. infrahub/graphql/resolvers/resolver.py +4 -4
  119. infrahub/graphql/resolvers/single_relationship.py +2 -2
  120. infrahub/graphql/schema.py +6 -0
  121. infrahub/graphql/subscription/graphql_query.py +2 -2
  122. infrahub/graphql/types/branch.py +1 -1
  123. infrahub/menu/menu.py +31 -0
  124. infrahub/message_bus/messages/__init__.py +0 -10
  125. infrahub/message_bus/operations/__init__.py +0 -8
  126. infrahub/message_bus/operations/refresh/registry.py +1 -1
  127. infrahub/patch/queries/consolidate_duplicated_nodes.py +3 -6
  128. infrahub/patch/queries/delete_duplicated_edges.py +5 -10
  129. infrahub/prefect_server/models.py +1 -19
  130. infrahub/proposed_change/models.py +68 -3
  131. infrahub/proposed_change/tasks.py +907 -30
  132. infrahub/task_manager/models.py +10 -6
  133. infrahub/telemetry/database.py +1 -1
  134. infrahub/telemetry/tasks.py +1 -1
  135. infrahub/trigger/catalogue.py +2 -0
  136. infrahub/trigger/models.py +29 -3
  137. infrahub/trigger/setup.py +51 -15
  138. infrahub/trigger/tasks.py +4 -5
  139. infrahub/types.py +1 -1
  140. infrahub/webhook/models.py +2 -1
  141. infrahub/workflows/catalogue.py +85 -0
  142. infrahub/workflows/initialization.py +1 -3
  143. infrahub_sdk/timestamp.py +2 -2
  144. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.3.0a0.dist-info}/METADATA +4 -4
  145. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.3.0a0.dist-info}/RECORD +153 -146
  146. infrahub_testcontainers/container.py +0 -1
  147. infrahub_testcontainers/docker-compose.test.yml +4 -4
  148. infrahub_testcontainers/helpers.py +8 -2
  149. infrahub_testcontainers/performance_test.py +6 -3
  150. infrahub/message_bus/messages/check_generator_run.py +0 -26
  151. infrahub/message_bus/messages/finalize_validator_execution.py +0 -15
  152. infrahub/message_bus/messages/proposed_change/base_with_diff.py +0 -16
  153. infrahub/message_bus/messages/proposed_change/request_proposedchange_refreshartifacts.py +0 -11
  154. infrahub/message_bus/messages/request_generatordefinition_check.py +0 -20
  155. infrahub/message_bus/messages/request_proposedchange_pipeline.py +0 -23
  156. infrahub/message_bus/operations/check/__init__.py +0 -3
  157. infrahub/message_bus/operations/check/generator.py +0 -156
  158. infrahub/message_bus/operations/finalize/__init__.py +0 -3
  159. infrahub/message_bus/operations/finalize/validator.py +0 -133
  160. infrahub/message_bus/operations/requests/__init__.py +0 -9
  161. infrahub/message_bus/operations/requests/generator_definition.py +0 -140
  162. infrahub/message_bus/operations/requests/proposed_change.py +0 -629
  163. /infrahub/{message_bus/messages/proposed_change → actions}/__init__.py +0 -0
  164. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.3.0a0.dist-info}/LICENSE.txt +0 -0
  165. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.3.0a0.dist-info}/WHEEL +0 -0
  166. {infrahub_server-1.2.9rc0.dist-info → infrahub_server-1.3.0a0.dist-info}/entry_points.txt +0 -0
@@ -84,11 +84,15 @@ class FlowProgress(BaseModel):
84
84
 
85
85
 
86
86
  class InfrahubEventFilter(EventFilter):
87
- matching_related: list[EventRelatedFilter] = Field(default_factory=list)
87
+ def add_related_filter(self, related: EventRelatedFilter) -> None:
88
+ if not isinstance(self.related, list):
89
+ self.related = []
90
+
91
+ self.related.append(related)
88
92
 
89
93
  def add_account_filter(self, account__ids: list[str] | None) -> None:
90
94
  if account__ids:
91
- self.matching_related.append(
95
+ self.add_related_filter(
92
96
  EventRelatedFilter(
93
97
  labels=ResourceSpecification(
94
98
  {"prefect.resource.role": "infrahub.account", "infrahub.resource.id": account__ids}
@@ -98,7 +102,7 @@ class InfrahubEventFilter(EventFilter):
98
102
 
99
103
  def add_branch_filter(self, branches: list[str] | None = None) -> None:
100
104
  if branches:
101
- self.matching_related.append(
105
+ self.add_related_filter(
102
106
  EventRelatedFilter(
103
107
  labels=ResourceSpecification(
104
108
  {"prefect.resource.role": "infrahub.branch", "infrahub.resource.label": branches}
@@ -116,7 +120,7 @@ class InfrahubEventFilter(EventFilter):
116
120
 
117
121
  if event_filter:
118
122
  event_filter["prefect.resource.role"] = "infrahub.event"
119
- self.matching_related.append(EventRelatedFilter(labels=ResourceSpecification(event_filter)))
123
+ self.add_related_filter(EventRelatedFilter(labels=ResourceSpecification(event_filter)))
120
124
 
121
125
  def add_event_id_filter(self, ids: list[str] | None = None) -> None:
122
126
  if ids:
@@ -151,7 +155,7 @@ class InfrahubEventFilter(EventFilter):
151
155
 
152
156
  def add_parent_filter(self, parent__ids: list[str] | None) -> None:
153
157
  if parent__ids:
154
- self.matching_related.append(
158
+ self.add_related_filter(
155
159
  EventRelatedFilter(
156
160
  labels=ResourceSpecification(
157
161
  {"prefect.resource.role": "infrahub.child_event", "infrahub.event_parent.id": parent__ids}
@@ -161,7 +165,7 @@ class InfrahubEventFilter(EventFilter):
161
165
 
162
166
  def add_related_node_filter(self, related_node__ids: list[str] | None) -> None:
163
167
  if related_node__ids:
164
- self.matching_related.append(
168
+ self.add_related_filter(
165
169
  EventRelatedFilter(
166
170
  labels=ResourceSpecification(
167
171
  {"prefect.resource.role": "infrahub.related.node", "prefect.resource.id": related_node__ids}
@@ -49,7 +49,7 @@ async def get_system_info(db: InfrahubDatabase) -> TelemetryDatabaseSystemInfoDa
49
49
 
50
50
  @task(name="telemetry-gather-db", task_run_name="Gather Database Information", cache_policy=NONE)
51
51
  async def gather_database_information(db: InfrahubDatabase) -> TelemetryDatabaseData:
52
- async with db.start_session() as dbs:
52
+ async with db.start_session(read_only=True) as dbs:
53
53
  server_info = []
54
54
  system_info = None
55
55
  database_type = db.db_type.value
@@ -38,7 +38,7 @@ async def gather_schema_information(branch: Branch) -> TelemetrySchemaData:
38
38
 
39
39
  @task(name="telemetry-feature-information", task_run_name="Gather Feature Information", cache_policy=NONE)
40
40
  async def gather_feature_information(service: InfrahubServices) -> dict[str, int]:
41
- async with service.database.start_session() as db:
41
+ async with service.database.start_session(read_only=True) as db:
42
42
  data = {}
43
43
  features_to_count = [
44
44
  InfrahubKind.ARTIFACT,
@@ -1,3 +1,4 @@
1
+ from infrahub.actions.triggers import TRIGGER_ACTION_RULE_UPDATE
1
2
  from infrahub.computed_attribute.triggers import (
2
3
  TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
3
4
  TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
@@ -6,6 +7,7 @@ from infrahub.trigger.models import TriggerDefinition
6
7
  from infrahub.webhook.triggers import TRIGGER_WEBHOOK_DELETE, TRIGGER_WEBHOOK_SETUP_UPDATE
7
8
 
8
9
  builtin_triggers: list[TriggerDefinition] = [
10
+ TRIGGER_ACTION_RULE_UPDATE,
9
11
  TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
10
12
  TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
11
13
  TRIGGER_WEBHOOK_DELETE,
@@ -5,8 +5,11 @@ from enum import Enum
5
5
  from typing import TYPE_CHECKING, Any
6
6
 
7
7
  from prefect.events.actions import RunDeployment
8
+ from prefect.events.schemas.automations import (
9
+ Automation, # noqa: TC002
10
+ Posture,
11
+ )
8
12
  from prefect.events.schemas.automations import EventTrigger as PrefectEventTrigger
9
- from prefect.events.schemas.automations import Posture
10
13
  from prefect.events.schemas.events import ResourceSpecification
11
14
  from pydantic import BaseModel, Field
12
15
 
@@ -19,7 +22,15 @@ if TYPE_CHECKING:
19
22
  from uuid import UUID
20
23
 
21
24
 
25
+ class TriggerSetupReport(BaseModel):
26
+ created: list[TriggerDefinition] = Field(default_factory=list)
27
+ updated: list[TriggerDefinition] = Field(default_factory=list)
28
+ deleted: list[Automation] = Field(default_factory=list)
29
+ unchanged: list[TriggerDefinition] = Field(default_factory=list)
30
+
31
+
22
32
  class TriggerType(str, Enum):
33
+ ACTION_TRIGGER_RULE = "action_trigger_rule"
23
34
  BUILTIN = "builtin"
24
35
  WEBHOOK = "webhook"
25
36
  COMPUTED_ATTR_JINJA2 = "computed_attr_jinja2"
@@ -28,10 +39,15 @@ class TriggerType(str, Enum):
28
39
  # OBJECT = "object"
29
40
 
30
41
 
42
+ def _match_related_dict() -> dict:
43
+ # Make Mypy happy as match related is a dict[str, Any] | list[dict[str, Any]]
44
+ return {}
45
+
46
+
31
47
  class EventTrigger(BaseModel):
32
48
  events: set = Field(default_factory=set)
33
49
  match: dict[str, Any] = Field(default_factory=dict)
34
- match_related: dict[str, Any] = Field(default_factory=dict)
50
+ match_related: dict[str, Any] | list[dict[str, Any]] = Field(default_factory=_match_related_dict)
35
51
 
36
52
  def get_prefect(self) -> PrefectEventTrigger:
37
53
  return PrefectEventTrigger(
@@ -39,10 +55,20 @@ class EventTrigger(BaseModel):
39
55
  expect=self.events,
40
56
  within=timedelta(0),
41
57
  match=ResourceSpecification(self.match),
42
- match_related=ResourceSpecification(self.match_related),
58
+ match_related=self.related_resource_specification,
43
59
  threshold=1,
44
60
  )
45
61
 
62
+ @property
63
+ def related_resource_specification(self) -> ResourceSpecification | list[ResourceSpecification]:
64
+ if isinstance(self.match_related, dict):
65
+ return ResourceSpecification(self.match_related)
66
+
67
+ if len(self.match_related) == 1:
68
+ return ResourceSpecification(self.match_related[0])
69
+
70
+ return [ResourceSpecification(related_match) for related_match in self.match_related]
71
+
46
72
 
47
73
  class ExecuteWorkflow(BaseModel):
48
74
  workflow: WorkflowDefinition
infrahub/trigger/setup.py CHANGED
@@ -5,27 +5,43 @@ from prefect.automations import AutomationCore
5
5
  from prefect.cache_policies import NONE
6
6
  from prefect.client.orchestration import PrefectClient
7
7
  from prefect.client.schemas.filters import DeploymentFilter, DeploymentFilterName
8
+ from prefect.events.schemas.automations import Automation
8
9
 
9
10
  from infrahub.trigger.models import TriggerDefinition
10
11
 
11
- from .models import TriggerType
12
+ from .models import TriggerSetupReport, TriggerType
12
13
 
13
14
  if TYPE_CHECKING:
14
15
  from uuid import UUID
15
16
 
16
17
 
18
+ def compare_automations(target: AutomationCore, existing: Automation) -> bool:
19
+ """Compare an AutomationCore with an existing Automation object to identify if they are identical or not
20
+
21
+ Return True if the target is identical to the existing automation
22
+ """
23
+
24
+ target_dump = target.model_dump(exclude_defaults=True, exclude_none=True)
25
+ existing_dump = existing.model_dump(exclude_defaults=True, exclude_none=True, exclude={"id"})
26
+
27
+ return target_dump == existing_dump
28
+
29
+
17
30
  @task(name="trigger-setup", task_run_name="Setup triggers", cache_policy=NONE) # type: ignore[arg-type]
18
31
  async def setup_triggers(
19
32
  client: PrefectClient,
20
33
  triggers: list[TriggerDefinition],
21
34
  trigger_type: TriggerType | None = None,
22
- ) -> None:
35
+ force_update: bool = False,
36
+ ) -> TriggerSetupReport:
23
37
  log = get_run_logger()
24
38
 
39
+ report = TriggerSetupReport()
40
+
25
41
  if trigger_type:
26
- log.info(f"Setting up triggers of type {trigger_type.value}")
42
+ log.debug(f"Setting up triggers of type {trigger_type.value}")
27
43
  else:
28
- log.info("Setting up all triggers")
44
+ log.debug("Setting up all triggers")
29
45
 
30
46
  # -------------------------------------------------------------
31
47
  # Retrieve existing Deployments and Automation from the server
@@ -38,23 +54,24 @@ async def setup_triggers(
38
54
  )
39
55
  }
40
56
  deployments_mapping: dict[str, UUID] = {name: item.id for name, item in deployments.items()}
41
- existing_automations = {item.name: item for item in await client.read_automations()}
42
57
 
43
58
  # If a trigger type is provided, narrow down the list of existing triggers to know which one to delete
59
+ existing_automations: dict[str, Automation] = {}
44
60
  if trigger_type:
45
- trigger_automations = [
46
- item.name for item in await client.read_automations() if item.name.startswith(trigger_type.value)
47
- ]
61
+ existing_automations = {
62
+ item.name: item for item in await client.read_automations() if item.name.startswith(trigger_type.value)
63
+ }
48
64
  else:
49
- trigger_automations = [item.name for item in await client.read_automations()]
65
+ existing_automations = {item.name: item for item in await client.read_automations()}
50
66
 
51
67
  trigger_names = [trigger.generate_name() for trigger in triggers]
68
+ automation_names = list(existing_automations.keys())
52
69
 
53
- log.debug(f"{len(trigger_automations)} existing triggers ({trigger_automations})")
54
- log.debug(f"{len(trigger_names)} triggers to configure ({trigger_names})")
70
+ log.debug(f"{len(automation_names)} existing triggers ({automation_names})")
71
+ log.debug(f"{len(trigger_names)} triggers to configure ({trigger_names})")
55
72
 
56
- to_delete = set(trigger_automations) - set(trigger_names)
57
- log.debug(f"{len(trigger_names)} triggers to delete ({to_delete})")
73
+ to_delete = set(automation_names) - set(trigger_names)
74
+ log.debug(f"{len(to_delete)} triggers to delete ({to_delete})")
58
75
 
59
76
  # -------------------------------------------------------------
60
77
  # Create or Update all triggers
@@ -71,11 +88,16 @@ async def setup_triggers(
71
88
  existing_automation = existing_automations.get(trigger.generate_name(), None)
72
89
 
73
90
  if existing_automation:
74
- await client.update_automation(automation_id=existing_automation.id, automation=automation)
75
- log.info(f"{trigger.generate_name()} Updated")
91
+ if force_update or not compare_automations(target=automation, existing=existing_automation):
92
+ await client.update_automation(automation_id=existing_automation.id, automation=automation)
93
+ log.info(f"{trigger.generate_name()} Updated")
94
+ report.updated.append(trigger)
95
+ else:
96
+ report.unchanged.append(trigger)
76
97
  else:
77
98
  await client.create_automation(automation=automation)
78
99
  log.info(f"{trigger.generate_name()} Created")
100
+ report.created.append(trigger)
79
101
 
80
102
  # -------------------------------------------------------------
81
103
  # Delete Triggers that shouldn't be there
@@ -86,5 +108,19 @@ async def setup_triggers(
86
108
  if not existing_automation:
87
109
  continue
88
110
 
111
+ report.deleted.append(existing_automation)
89
112
  await client.delete_automation(automation_id=existing_automation.id)
90
113
  log.info(f"{item_to_delete} Deleted")
114
+
115
+ if trigger_type:
116
+ log.info(
117
+ f"Processed triggers of type {trigger_type.value}: "
118
+ f"{len(report.created)} created, {len(report.updated)} updated, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
119
+ )
120
+ else:
121
+ log.info(
122
+ f"Processed all triggers: "
123
+ f"{len(report.created)} created, {len(report.updated)} updated, {len(report.unchanged)} unchanged, {len(report.deleted)} deleted"
124
+ )
125
+
126
+ return report
infrahub/trigger/tasks.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from prefect import flow
2
2
  from prefect.client.orchestration import get_client
3
3
 
4
+ from infrahub.actions.gather import gather_trigger_action_rules
4
5
  from infrahub.computed_attribute.gather import (
5
6
  gather_trigger_computed_attribute_jinja2,
6
7
  gather_trigger_computed_attribute_python,
@@ -21,17 +22,15 @@ async def trigger_configure_all(service: InfrahubServices) -> None:
21
22
  computed_attribute_python_triggers,
22
23
  computed_attribute_python_query_triggers,
23
24
  ) = await gather_trigger_computed_attribute_python(db=db)
24
-
25
+ action_rules = await gather_trigger_action_rules(db=db)
25
26
  triggers = (
26
27
  computed_attribute_j2_triggers
27
28
  + computed_attribute_python_triggers
28
29
  + computed_attribute_python_query_triggers
29
30
  + builtin_triggers
30
31
  + webhook_trigger
32
+ + action_rules
31
33
  )
32
34
 
33
35
  async with get_client(sync_client=False) as prefect_client:
34
- await setup_triggers(
35
- client=prefect_client,
36
- triggers=triggers,
37
- )
36
+ await setup_triggers(client=prefect_client, triggers=triggers, force_update=True)
infrahub/types.py CHANGED
@@ -366,7 +366,7 @@ ATTRIBUTE_PYTHON_TYPES: dict[str, type] = {
366
366
  ATTRIBUTE_KIND_LABELS = list(ATTRIBUTE_TYPES.keys())
367
367
 
368
368
  # Data types supporting large values, which can therefore not be indexed in neo4j.
369
- LARGE_ATTRIBUTE_TYPES = [TextArea, JSON]
369
+ LARGE_ATTRIBUTE_TYPES = [TextArea, JSON, List]
370
370
 
371
371
 
372
372
  def get_attribute_type(kind: str = "Default") -> type[InfrahubDataType]:
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import base64
4
4
  import hashlib
5
5
  import hmac
6
+ import json
6
7
  from typing import TYPE_CHECKING, Any
7
8
  from uuid import UUID, uuid4
8
9
 
@@ -170,7 +171,7 @@ class StandardWebhook(Webhook):
170
171
  def _assign_headers(self, uuid: UUID | None = None, at: Timestamp | None = None) -> None:
171
172
  message_id = f"msg_{uuid.hex}" if uuid else f"msg_{uuid4().hex}"
172
173
  timestamp = str(at.to_timestamp()) if at else str(Timestamp().to_timestamp())
173
- payload = self._payload or {}
174
+ payload = json.dumps(self._payload or {})
174
175
  unsigned_data = f"{message_id}.{timestamp}.{payload}".encode()
175
176
  signature = self._sign(data=unsigned_data)
176
177
 
@@ -6,6 +6,35 @@ from .models import WorkerPoolDefinition, WorkflowDefinition
6
6
  INFRAHUB_WORKER_POOL = WorkerPoolDefinition(name="infrahub-worker", description="Default Pool for internal tasks")
7
7
 
8
8
 
9
+ ACTION_ADD_NODE_TO_GROUP = WorkflowDefinition(
10
+ name="action-add-node-to-group",
11
+ type=WorkflowType.CORE,
12
+ module="infrahub.actions.tasks",
13
+ function="add_node_to_group",
14
+ )
15
+
16
+ ACTION_RUN_GENERATOR = WorkflowDefinition(
17
+ name="action-run-generator",
18
+ type=WorkflowType.CORE,
19
+ module="infrahub.actions.tasks",
20
+ function="run_generator",
21
+ )
22
+
23
+ ACTION_RUN_GENERATOR_GROUP_EVENT = WorkflowDefinition(
24
+ name="action-run-generator-group-event",
25
+ type=WorkflowType.CORE,
26
+ module="infrahub.actions.tasks",
27
+ function="run_generator_group_event",
28
+ )
29
+
30
+
31
+ CONFIGURE_ACTION_RULES = WorkflowDefinition(
32
+ name="configure-action-rules",
33
+ type=WorkflowType.CORE,
34
+ module="infrahub.actions.tasks",
35
+ function="configure_action_rules",
36
+ )
37
+
9
38
  TRANSFORM_JINJA2_RENDER = WorkflowDefinition(
10
39
  name="transform_render_jinja2_template",
11
40
  type=WorkflowType.USER,
@@ -66,6 +95,13 @@ IPAM_RECONCILIATION = WorkflowDefinition(
66
95
  tags=[WorkflowTag.DATABASE_CHANGE],
67
96
  )
68
97
 
98
+ REMOVE_ADD_NODE_FROM_GROUP = WorkflowDefinition(
99
+ name="action-remove-node-from-group",
100
+ type=WorkflowType.CORE,
101
+ module="infrahub.actions.tasks",
102
+ function="remove_node_from_group",
103
+ )
104
+
69
105
  REQUEST_GENERATOR_RUN = WorkflowDefinition(
70
106
  name="generator-run",
71
107
  type=WorkflowType.USER,
@@ -74,6 +110,14 @@ REQUEST_GENERATOR_RUN = WorkflowDefinition(
74
110
  tags=[WorkflowTag.DATABASE_CHANGE],
75
111
  )
76
112
 
113
+ RUN_GENERATOR_AS_CHECK = WorkflowDefinition(
114
+ name="run-generator-as-check",
115
+ type=WorkflowType.USER,
116
+ module="infrahub.proposed_change.tasks",
117
+ function="run_generator_as_check",
118
+ tags=[WorkflowTag.DATABASE_CHANGE],
119
+ )
120
+
77
121
  REQUEST_GENERATOR_DEFINITION_RUN = WorkflowDefinition(
78
122
  name="request-generator-definition-run",
79
123
  type=WorkflowType.CORE,
@@ -82,6 +126,14 @@ REQUEST_GENERATOR_DEFINITION_RUN = WorkflowDefinition(
82
126
  tags=[WorkflowTag.DATABASE_CHANGE],
83
127
  )
84
128
 
129
+ REQUEST_GENERATOR_DEFINITION_CHECK = WorkflowDefinition(
130
+ name="request-generator-definition-check",
131
+ type=WorkflowType.CORE,
132
+ module="infrahub.proposed_change.tasks",
133
+ function="request_generator_definition_check",
134
+ tags=[WorkflowTag.DATABASE_CHANGE],
135
+ )
136
+
85
137
  REQUEST_ARTIFACT_GENERATE = WorkflowDefinition(
86
138
  name="artifact-generate",
87
139
  type=WorkflowType.CORE, # NOTE need to check
@@ -251,6 +303,14 @@ COMPUTED_ATTRIBUTE_PROCESS_JINJA2 = WorkflowDefinition(
251
303
  tags=[WorkflowTag.DATABASE_CHANGE],
252
304
  )
253
305
 
306
+ COMPUTED_ATTRIBUTE_JINJA2_UPDATE_VALUE = WorkflowDefinition(
307
+ name="computed-attribute-jinja2-update-value",
308
+ type=WorkflowType.CORE,
309
+ module="infrahub.computed_attribute.tasks",
310
+ function="computed_attribute_jinja2_update_value",
311
+ tags=[WorkflowTag.DATABASE_CHANGE],
312
+ )
313
+
254
314
  TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES = WorkflowDefinition(
255
315
  name="trigger_update_jinja2_computed_attributes",
256
316
  type=WorkflowType.CORE,
@@ -330,6 +390,21 @@ GIT_REPOSITORIES_IMPORT_OBJECTS = WorkflowDefinition(
330
390
  tags=[WorkflowTag.DATABASE_CHANGE],
331
391
  )
332
392
 
393
+ REQUEST_PROPOSED_CHANGE_PIPELINE = WorkflowDefinition(
394
+ name="proposed-changed-pipeline",
395
+ type=WorkflowType.INTERNAL,
396
+ module="infrahub.proposed_change.tasks",
397
+ function="run_proposed_change_pipeline",
398
+ tags=[WorkflowTag.DATABASE_CHANGE],
399
+ )
400
+
401
+ REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS = WorkflowDefinition(
402
+ name="proposed-changed-refresh-artifacts",
403
+ type=WorkflowType.INTERNAL,
404
+ module="infrahub.proposed_change.tasks",
405
+ function="refresh_artifacts",
406
+ )
407
+
333
408
  REQUEST_PROPOSED_CHANGE_RUN_GENERATORS = WorkflowDefinition(
334
409
  name="proposed-changed-run-generator",
335
410
  type=WorkflowType.INTERNAL,
@@ -434,6 +509,9 @@ TRIGGER_CONFIGURE_ALL = WorkflowDefinition(
434
509
  worker_pools = [INFRAHUB_WORKER_POOL]
435
510
 
436
511
  workflows = [
512
+ ACTION_ADD_NODE_TO_GROUP,
513
+ ACTION_RUN_GENERATOR,
514
+ ACTION_RUN_GENERATOR_GROUP_EVENT,
437
515
  ANONYMOUS_TELEMETRY_SEND,
438
516
  BRANCH_CANCEL_PROPOSED_CHANGES,
439
517
  BRANCH_CREATE,
@@ -443,10 +521,12 @@ workflows = [
443
521
  BRANCH_MERGE_POST_PROCESS,
444
522
  BRANCH_REBASE,
445
523
  BRANCH_VALIDATE,
524
+ COMPUTED_ATTRIBUTE_JINJA2_UPDATE_VALUE,
446
525
  COMPUTED_ATTRIBUTE_PROCESS_JINJA2,
447
526
  COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM,
448
527
  COMPUTED_ATTRIBUTE_SETUP_JINJA2,
449
528
  COMPUTED_ATTRIBUTE_SETUP_PYTHON,
529
+ CONFIGURE_ACTION_RULES,
450
530
  DIFF_REFRESH,
451
531
  DIFF_REFRESH_ALL,
452
532
  DIFF_UPDATE,
@@ -468,16 +548,21 @@ workflows = [
468
548
  IPAM_RECONCILIATION,
469
549
  PROPOSED_CHANGE_MERGE,
470
550
  QUERY_COMPUTED_ATTRIBUTE_TRANSFORM_TARGETS,
551
+ REMOVE_ADD_NODE_FROM_GROUP,
471
552
  REQUEST_ARTIFACT_DEFINITION_CHECK,
472
553
  REQUEST_ARTIFACT_DEFINITION_GENERATE,
473
554
  REQUEST_ARTIFACT_GENERATE,
555
+ REQUEST_GENERATOR_DEFINITION_CHECK,
474
556
  REQUEST_GENERATOR_DEFINITION_RUN,
475
557
  REQUEST_GENERATOR_RUN,
476
558
  REQUEST_PROPOSED_CHANGE_DATA_INTEGRITY,
559
+ REQUEST_PROPOSED_CHANGE_PIPELINE,
560
+ REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS,
477
561
  REQUEST_PROPOSED_CHANGE_REPOSITORY_CHECKS,
478
562
  REQUEST_PROPOSED_CHANGE_RUN_GENERATORS,
479
563
  REQUEST_PROPOSED_CHANGE_SCHEMA_INTEGRITY,
480
564
  REQUEST_PROPOSED_CHANGE_USER_TESTS,
565
+ RUN_GENERATOR_AS_CHECK,
481
566
  SCHEMA_APPLY_MIGRATION,
482
567
  SCHEMA_VALIDATE_MIGRATION,
483
568
  TRANSFORM_JINJA2_RENDER,
@@ -71,7 +71,5 @@ async def setup_task_manager() -> None:
71
71
  await setup_worker_pools(client=client)
72
72
  await setup_deployments(client=client)
73
73
  await setup_triggers(
74
- client=client,
75
- triggers=builtin_triggers,
76
- trigger_type=TriggerType.BUILTIN,
74
+ client=client, triggers=builtin_triggers, trigger_type=TriggerType.BUILTIN, force_update=True
77
75
  )
infrahub_sdk/timestamp.py CHANGED
@@ -153,7 +153,7 @@ class Timestamp:
153
153
  nanoseconds: int = 0,
154
154
  disambiguate: Literal["compatible"] = "compatible",
155
155
  ) -> Timestamp:
156
- return Timestamp(
156
+ return self.__class__(
157
157
  self._obj.add(
158
158
  years=years,
159
159
  months=months,
@@ -183,7 +183,7 @@ class Timestamp:
183
183
  nanoseconds: int = 0,
184
184
  disambiguate: Literal["compatible"] = "compatible",
185
185
  ) -> Timestamp:
186
- return Timestamp(
186
+ return self.__class__(
187
187
  self._obj.subtract(
188
188
  years=years,
189
189
  months=months,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: infrahub-server
3
- Version: 1.2.9rc0
3
+ Version: 1.3.0a0
4
4
  Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
5
5
  Home-page: https://opsmill.com
6
6
  License: AGPL-3.0-only
@@ -39,7 +39,7 @@ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (==1.28.1)
39
39
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (==1.28.1)
40
40
  Requires-Dist: opentelemetry-instrumentation-aio-pika (==0.49b1)
41
41
  Requires-Dist: opentelemetry-instrumentation-fastapi (==0.49b1)
42
- Requires-Dist: prefect (==3.3.7)
42
+ Requires-Dist: prefect (==3.4.1)
43
43
  Requires-Dist: prefect-redis (==0.2.2)
44
44
  Requires-Dist: pyarrow (>=14,<15)
45
45
  Requires-Dist: pydantic (>=2.10,<2.11)
@@ -77,7 +77,7 @@ Description-Content-Type: text/markdown
77
77
 
78
78
  Infrahub from [OpsMill](https://opsmill.com) is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run. Infrahub offers a central hub to manage the data, templates and playbooks that powers your infrastructure by combining the version control and branch management capabilities similar to Git with the flexible data model and UI of a graph database.
79
79
 
80
- If you just want to try Infrahub out, you can use our [Always-On Sandbox](https://demo.infrahub.app/) to get started.
80
+ If you just want to try Infrahub out, you can use our [Infrahub Sandbox](https://sandbox.infrahub.app/) to get started.
81
81
 
82
82
  ![infrahub screenshot](docs/docs/media/infrahub-readme.gif)
83
83
 
@@ -103,7 +103,7 @@ If you just want to try Infrahub out, you can use our [Always-On Sandbox](https:
103
103
 
104
104
  ## Quick Start
105
105
 
106
- [Always-On Sandbox](https://demo.infrahub.app/) - Instantly login to the UI of a demo environment of Infrahub with sample data pre-loaded.
106
+ [Infrahub Sandbox](https://sandbox.infrahub.app/) - Instantly login to the UI of a demo environment of Infrahub with sample data pre-loaded.
107
107
 
108
108
  [Getting Started Environment & Tutorial](https://opsmill.instruqt.com/pages/labs) - It spins up an instance of Infrahub on our cloud, provides a browser, terminal, code editor and walks you through the basic concepts:
109
109