infrahub-server 1.4.10__py3-none-any.whl → 1.5.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. infrahub/actions/tasks.py +208 -16
  2. infrahub/api/artifact.py +3 -0
  3. infrahub/api/diff/diff.py +1 -1
  4. infrahub/api/query.py +2 -0
  5. infrahub/api/schema.py +3 -0
  6. infrahub/auth.py +5 -5
  7. infrahub/cli/db.py +26 -2
  8. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  9. infrahub/config.py +7 -2
  10. infrahub/core/attribute.py +25 -22
  11. infrahub/core/branch/models.py +2 -2
  12. infrahub/core/branch/needs_rebase_status.py +11 -0
  13. infrahub/core/branch/tasks.py +4 -3
  14. infrahub/core/changelog/models.py +4 -12
  15. infrahub/core/constants/__init__.py +1 -0
  16. infrahub/core/constants/infrahubkind.py +1 -0
  17. infrahub/core/convert_object_type/object_conversion.py +201 -0
  18. infrahub/core/convert_object_type/repository_conversion.py +89 -0
  19. infrahub/core/convert_object_type/schema_mapping.py +27 -3
  20. infrahub/core/diff/model/path.py +4 -0
  21. infrahub/core/diff/payload_builder.py +1 -1
  22. infrahub/core/diff/query/artifact.py +1 -1
  23. infrahub/core/graph/__init__.py +1 -1
  24. infrahub/core/initialization.py +2 -2
  25. infrahub/core/ipam/utilization.py +1 -1
  26. infrahub/core/manager.py +9 -84
  27. infrahub/core/migrations/graph/__init__.py +6 -0
  28. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +166 -0
  29. infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
  30. infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
  31. infrahub/core/migrations/schema/node_attribute_add.py +5 -2
  32. infrahub/core/migrations/shared.py +5 -6
  33. infrahub/core/node/__init__.py +165 -42
  34. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  35. infrahub/core/node/create.py +67 -35
  36. infrahub/core/node/lock_utils.py +98 -0
  37. infrahub/core/node/node_property_attribute.py +230 -0
  38. infrahub/core/node/standard.py +1 -1
  39. infrahub/core/property.py +11 -0
  40. infrahub/core/protocols.py +8 -1
  41. infrahub/core/query/attribute.py +27 -15
  42. infrahub/core/query/node.py +61 -185
  43. infrahub/core/query/relationship.py +43 -26
  44. infrahub/core/query/subquery.py +0 -8
  45. infrahub/core/registry.py +2 -2
  46. infrahub/core/relationship/constraints/count.py +1 -1
  47. infrahub/core/relationship/model.py +60 -20
  48. infrahub/core/schema/attribute_schema.py +0 -2
  49. infrahub/core/schema/basenode_schema.py +42 -2
  50. infrahub/core/schema/definitions/core/__init__.py +2 -0
  51. infrahub/core/schema/definitions/core/generator.py +2 -0
  52. infrahub/core/schema/definitions/core/group.py +16 -2
  53. infrahub/core/schema/definitions/core/repository.py +7 -0
  54. infrahub/core/schema/definitions/internal.py +14 -1
  55. infrahub/core/schema/generated/base_node_schema.py +6 -1
  56. infrahub/core/schema/node_schema.py +5 -2
  57. infrahub/core/schema/relationship_schema.py +0 -1
  58. infrahub/core/schema/schema_branch.py +137 -2
  59. infrahub/core/schema/schema_branch_display.py +123 -0
  60. infrahub/core/schema/schema_branch_hfid.py +114 -0
  61. infrahub/core/validators/aggregated_checker.py +1 -1
  62. infrahub/core/validators/determiner.py +12 -1
  63. infrahub/core/validators/relationship/peer.py +1 -1
  64. infrahub/core/validators/tasks.py +1 -1
  65. infrahub/display_labels/__init__.py +0 -0
  66. infrahub/display_labels/gather.py +48 -0
  67. infrahub/display_labels/models.py +240 -0
  68. infrahub/display_labels/tasks.py +186 -0
  69. infrahub/display_labels/triggers.py +22 -0
  70. infrahub/events/group_action.py +1 -1
  71. infrahub/events/node_action.py +1 -1
  72. infrahub/generators/constants.py +7 -0
  73. infrahub/generators/models.py +38 -12
  74. infrahub/generators/tasks.py +34 -16
  75. infrahub/git/base.py +38 -1
  76. infrahub/git/integrator.py +22 -14
  77. infrahub/graphql/analyzer.py +1 -1
  78. infrahub/graphql/api/dependencies.py +2 -4
  79. infrahub/graphql/api/endpoints.py +2 -2
  80. infrahub/graphql/app.py +2 -4
  81. infrahub/graphql/initialization.py +2 -3
  82. infrahub/graphql/manager.py +212 -137
  83. infrahub/graphql/middleware.py +12 -0
  84. infrahub/graphql/mutations/branch.py +11 -0
  85. infrahub/graphql/mutations/computed_attribute.py +110 -3
  86. infrahub/graphql/mutations/convert_object_type.py +34 -13
  87. infrahub/graphql/mutations/display_label.py +111 -0
  88. infrahub/graphql/mutations/generator.py +25 -7
  89. infrahub/graphql/mutations/hfid.py +118 -0
  90. infrahub/graphql/mutations/ipam.py +21 -8
  91. infrahub/graphql/mutations/main.py +37 -153
  92. infrahub/graphql/mutations/profile.py +195 -0
  93. infrahub/graphql/mutations/proposed_change.py +2 -1
  94. infrahub/graphql/mutations/relationship.py +2 -2
  95. infrahub/graphql/mutations/repository.py +22 -83
  96. infrahub/graphql/mutations/resource_manager.py +2 -2
  97. infrahub/graphql/mutations/schema.py +5 -5
  98. infrahub/graphql/mutations/webhook.py +1 -1
  99. infrahub/graphql/queries/resource_manager.py +1 -1
  100. infrahub/graphql/registry.py +173 -0
  101. infrahub/graphql/resolvers/resolver.py +2 -0
  102. infrahub/graphql/schema.py +8 -1
  103. infrahub/groups/tasks.py +1 -1
  104. infrahub/hfid/__init__.py +0 -0
  105. infrahub/hfid/gather.py +48 -0
  106. infrahub/hfid/models.py +240 -0
  107. infrahub/hfid/tasks.py +185 -0
  108. infrahub/hfid/triggers.py +22 -0
  109. infrahub/lock.py +67 -30
  110. infrahub/locks/__init__.py +0 -0
  111. infrahub/locks/tasks.py +37 -0
  112. infrahub/middleware.py +26 -1
  113. infrahub/patch/plan_writer.py +2 -2
  114. infrahub/profiles/__init__.py +0 -0
  115. infrahub/profiles/node_applier.py +101 -0
  116. infrahub/profiles/queries/__init__.py +0 -0
  117. infrahub/profiles/queries/get_profile_data.py +99 -0
  118. infrahub/profiles/tasks.py +63 -0
  119. infrahub/proposed_change/tasks.py +10 -1
  120. infrahub/repositories/__init__.py +0 -0
  121. infrahub/repositories/create_repository.py +113 -0
  122. infrahub/server.py +16 -3
  123. infrahub/services/__init__.py +8 -5
  124. infrahub/tasks/registry.py +6 -4
  125. infrahub/trigger/catalogue.py +4 -0
  126. infrahub/trigger/models.py +2 -0
  127. infrahub/trigger/tasks.py +3 -0
  128. infrahub/webhook/models.py +1 -1
  129. infrahub/workflows/catalogue.py +110 -3
  130. infrahub/workflows/initialization.py +16 -0
  131. infrahub/workflows/models.py +17 -2
  132. infrahub_sdk/branch.py +5 -8
  133. infrahub_sdk/checks.py +1 -1
  134. infrahub_sdk/client.py +364 -84
  135. infrahub_sdk/convert_object_type.py +61 -0
  136. infrahub_sdk/ctl/check.py +2 -3
  137. infrahub_sdk/ctl/cli_commands.py +18 -12
  138. infrahub_sdk/ctl/config.py +8 -2
  139. infrahub_sdk/ctl/generator.py +6 -3
  140. infrahub_sdk/ctl/graphql.py +184 -0
  141. infrahub_sdk/ctl/repository.py +39 -1
  142. infrahub_sdk/ctl/schema.py +18 -3
  143. infrahub_sdk/ctl/utils.py +4 -0
  144. infrahub_sdk/ctl/validate.py +5 -3
  145. infrahub_sdk/diff.py +4 -5
  146. infrahub_sdk/exceptions.py +2 -0
  147. infrahub_sdk/generator.py +7 -1
  148. infrahub_sdk/graphql/__init__.py +12 -0
  149. infrahub_sdk/graphql/constants.py +1 -0
  150. infrahub_sdk/graphql/plugin.py +85 -0
  151. infrahub_sdk/graphql/query.py +77 -0
  152. infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
  153. infrahub_sdk/graphql/utils.py +40 -0
  154. infrahub_sdk/node/attribute.py +2 -0
  155. infrahub_sdk/node/node.py +28 -20
  156. infrahub_sdk/playback.py +1 -2
  157. infrahub_sdk/protocols.py +54 -6
  158. infrahub_sdk/pytest_plugin/plugin.py +7 -4
  159. infrahub_sdk/pytest_plugin/utils.py +40 -0
  160. infrahub_sdk/repository.py +1 -2
  161. infrahub_sdk/schema/__init__.py +38 -0
  162. infrahub_sdk/schema/main.py +1 -0
  163. infrahub_sdk/schema/repository.py +8 -0
  164. infrahub_sdk/spec/object.py +120 -7
  165. infrahub_sdk/spec/range_expansion.py +118 -0
  166. infrahub_sdk/timestamp.py +18 -6
  167. infrahub_sdk/transforms.py +1 -1
  168. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +9 -11
  169. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +177 -134
  170. infrahub_testcontainers/container.py +1 -1
  171. infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
  172. infrahub_testcontainers/docker-compose.test.yml +1 -1
  173. infrahub_testcontainers/models.py +2 -2
  174. infrahub_testcontainers/performance_test.py +4 -4
  175. infrahub/core/convert_object_type/conversion.py +0 -134
  176. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
  177. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
  178. {infrahub_server-1.4.10.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,186 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import cast
4
+
5
+ from infrahub_sdk.template import Jinja2Template
6
+ from prefect import flow
7
+ from prefect.logging import get_run_logger
8
+
9
+ from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
10
+ from infrahub.core.registry import registry
11
+ from infrahub.events import BranchDeletedEvent
12
+ from infrahub.trigger.models import TriggerSetupReport, TriggerType
13
+ from infrahub.trigger.setup import setup_triggers_specific
14
+ from infrahub.workers.dependencies import get_client, get_component, get_database, get_workflow
15
+ from infrahub.workflows.catalogue import DISPLAY_LABELS_PROCESS_JINJA2, TRIGGER_UPDATE_DISPLAY_LABELS
16
+ from infrahub.workflows.utils import add_tags, wait_for_schema_to_converge
17
+
18
+ from .gather import gather_trigger_display_labels_jinja2
19
+ from .models import DisplayLabelJinja2GraphQL, DisplayLabelJinja2GraphQLResponse, DisplayLabelTriggerDefinition
20
+
21
+ UPDATE_DISPLAY_LABEL = """
22
+ mutation UpdateDisplayLabel(
23
+ $id: String!,
24
+ $kind: String!,
25
+ $value: String!
26
+ ) {
27
+ InfrahubUpdateDisplayLabel(
28
+ data: {id: $id, value: $value, kind: $kind}
29
+ ) {
30
+ ok
31
+ }
32
+ }
33
+ """
34
+
35
+
36
+ @flow(
37
+ name="display-label-jinja2-update-value",
38
+ flow_run_name="Update value for display_label on {node_kind}",
39
+ )
40
+ async def display_label_jinja2_update_value(
41
+ branch_name: str,
42
+ obj: DisplayLabelJinja2GraphQLResponse,
43
+ node_kind: str,
44
+ template: Jinja2Template,
45
+ ) -> None:
46
+ log = get_run_logger()
47
+ client = get_client()
48
+
49
+ await add_tags(branches=[branch_name], nodes=[obj.node_id], db_change=True)
50
+
51
+ value = await template.render(variables=obj.variables)
52
+ if value == obj.display_label_value:
53
+ log.debug(f"Ignoring to update {obj} with existing value on display_label={value}")
54
+ return
55
+
56
+ await client.execute_graphql(
57
+ query=UPDATE_DISPLAY_LABEL,
58
+ variables={"id": obj.node_id, "kind": node_kind, "value": value},
59
+ branch_name=branch_name,
60
+ )
61
+ log.info(f"Updating {node_kind}.display_label='{value}' ({obj.node_id})")
62
+
63
+
64
+ @flow(
65
+ name="display-label-process-jinja2",
66
+ flow_run_name="Process display_labels for {target_kind}",
67
+ )
68
+ async def process_display_label(
69
+ branch_name: str,
70
+ node_kind: str,
71
+ object_id: str,
72
+ target_kind: str,
73
+ context: InfrahubContext, # noqa: ARG001
74
+ ) -> None:
75
+ log = get_run_logger()
76
+ client = get_client()
77
+
78
+ await add_tags(branches=[branch_name])
79
+
80
+ target_schema = branch_name if branch_name in registry.get_altered_schema_branches() else registry.default_branch
81
+ schema_branch = registry.schema.get_schema_branch(name=target_schema)
82
+ node_schema = schema_branch.get_node(name=target_kind, duplicate=False)
83
+
84
+ if node_kind == target_kind:
85
+ display_label_template = schema_branch.display_labels.get_template_node(kind=node_kind)
86
+ else:
87
+ display_label_template = schema_branch.display_labels.get_related_template(
88
+ related_kind=node_kind, target_kind=target_kind
89
+ )
90
+
91
+ jinja_template = Jinja2Template(template=display_label_template.template)
92
+ variables = jinja_template.get_variables()
93
+ display_label_graphql = DisplayLabelJinja2GraphQL(
94
+ node_schema=node_schema, variables=variables, filter_key=display_label_template.filter_key
95
+ )
96
+
97
+ query = display_label_graphql.render_graphql_query(filter_id=object_id)
98
+ response = await client.execute_graphql(query=query, branch_name=branch_name)
99
+ update_candidates = display_label_graphql.parse_response(response=response)
100
+
101
+ if not update_candidates:
102
+ log.debug("No nodes found that requires updates")
103
+ return
104
+
105
+ batch = await client.create_batch()
106
+ for node in update_candidates:
107
+ batch.add(
108
+ task=display_label_jinja2_update_value,
109
+ branch_name=branch_name,
110
+ obj=node,
111
+ node_kind=node_schema.kind,
112
+ template=jinja_template,
113
+ )
114
+
115
+ _ = [response async for _, response in batch.execute()]
116
+
117
+
118
+ @flow(name="display-labels-setup-jinja2", flow_run_name="Setup display labels in task-manager")
119
+ async def display_labels_setup_jinja2(
120
+ context: InfrahubContext, branch_name: str | None = None, event_name: str | None = None
121
+ ) -> None:
122
+ database = await get_database()
123
+ async with database.start_session() as db:
124
+ log = get_run_logger()
125
+
126
+ if branch_name:
127
+ await add_tags(branches=[branch_name])
128
+ component = await get_component()
129
+ await wait_for_schema_to_converge(branch_name=branch_name, component=component, db=db, log=log)
130
+
131
+ report: TriggerSetupReport = await setup_triggers_specific(
132
+ gatherer=gather_trigger_display_labels_jinja2, trigger_type=TriggerType.DISPLAY_LABEL_JINJA2
133
+ ) # type: ignore[misc]
134
+
135
+ # Configure all DisplayLabelTriggerDefinitions in Prefect
136
+ display_reports = [cast(DisplayLabelTriggerDefinition, entry) for entry in report.updated + report.created]
137
+ direct_target_triggers = [display_report for display_report in display_reports if display_report.target_kind]
138
+
139
+ for display_report in direct_target_triggers:
140
+ if event_name != BranchDeletedEvent.event_name and display_report.branch == branch_name:
141
+ await get_workflow().submit_workflow(
142
+ workflow=TRIGGER_UPDATE_DISPLAY_LABELS,
143
+ context=context,
144
+ parameters={
145
+ "branch_name": display_report.branch,
146
+ "kind": display_report.target_kind,
147
+ },
148
+ )
149
+
150
+ log.info(f"{report.in_use_count} Display labels for Jinja2 automation configuration completed")
151
+
152
+
153
+ @flow(
154
+ name="trigger-update-display-labels",
155
+ flow_run_name="Trigger updates for display labels for kind",
156
+ )
157
+ async def trigger_update_display_labels(
158
+ branch_name: str,
159
+ kind: str,
160
+ context: InfrahubContext,
161
+ ) -> None:
162
+ await add_tags(branches=[branch_name])
163
+
164
+ client = get_client()
165
+
166
+ # NOTE we only need the id of the nodes, this query will still query for the HFID
167
+ node_schema = registry.schema.get_node_schema(name=kind, branch=branch_name)
168
+ nodes = await client.all(
169
+ kind=kind,
170
+ branch=branch_name,
171
+ exclude=node_schema.attribute_names + node_schema.relationship_names,
172
+ populate_store=False,
173
+ )
174
+
175
+ for node in nodes:
176
+ await get_workflow().submit_workflow(
177
+ workflow=DISPLAY_LABELS_PROCESS_JINJA2,
178
+ context=context,
179
+ parameters={
180
+ "branch_name": branch_name,
181
+ "node_kind": kind,
182
+ "target_kind": kind,
183
+ "object_id": node.id,
184
+ "context": context,
185
+ },
186
+ )
@@ -0,0 +1,22 @@
1
+ from infrahub.events.branch_action import BranchDeletedEvent
2
+ from infrahub.events.schema_action import SchemaUpdatedEvent
3
+ from infrahub.trigger.models import BuiltinTriggerDefinition, EventTrigger, ExecuteWorkflow
4
+ from infrahub.workflows.catalogue import DISPLAY_LABELS_SETUP_JINJA2
5
+
6
+ TRIGGER_DISPLAY_LABELS_ALL_SCHEMA = BuiltinTriggerDefinition(
7
+ name="display-labels-setup-all",
8
+ trigger=EventTrigger(events={SchemaUpdatedEvent.event_name, BranchDeletedEvent.event_name}),
9
+ actions=[
10
+ ExecuteWorkflow(
11
+ workflow=DISPLAY_LABELS_SETUP_JINJA2,
12
+ parameters={
13
+ "branch_name": "{{ event.resource['infrahub.branch.name'] }}",
14
+ "event_name": "{{ event.event }}",
15
+ "context": {
16
+ "__prefect_kind": "json",
17
+ "value": {"__prefect_kind": "jinja", "template": "{{ event.payload['context'] | tojson }}"},
18
+ },
19
+ },
20
+ ),
21
+ ],
22
+ )
@@ -22,7 +22,7 @@ class GroupMutatedEvent(InfrahubEvent):
22
22
  def get_related(self) -> list[dict[str, str]]:
23
23
  related = super().get_related()
24
24
 
25
- if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
25
+ if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GENERATORAWAREGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
26
26
  # Temporary workaround to avoid too large payloads for the related field
27
27
  return related
28
28
 
@@ -24,7 +24,7 @@ class NodeMutatedEvent(InfrahubEvent):
24
24
 
25
25
  def get_related(self) -> list[dict[str, str]]:
26
26
  related = super().get_related()
27
- if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
27
+ if self.kind in [InfrahubKind.GENERATORGROUP, InfrahubKind.GENERATORAWAREGROUP, InfrahubKind.GRAPHQLQUERYGROUP]:
28
28
  # Temporary workaround to avoid too large payloads for the related field
29
29
  return related
30
30
 
@@ -0,0 +1,7 @@
1
+ from enum import Enum
2
+
3
+
4
+ class GeneratorDefinitionRunSource(Enum):
5
+ PROPOSED_CHANGE = "proposed_change"
6
+ MERGE = "merge"
7
+ UNKNOWN = "unknown"
@@ -6,7 +6,7 @@ from pydantic import BaseModel, ConfigDict, Field
6
6
  class RequestGeneratorRun(BaseModel):
7
7
  """Runs a generator."""
8
8
 
9
- generator_definition: ProposedChangeGeneratorDefinition = Field(..., description="The Generator definition")
9
+ generator_definition: GeneratorDefinitionModel = Field(..., description="The Generator definition")
10
10
  generator_instance: str | None = Field(
11
11
  default=None, description="The id of the generator instance if it previously existed"
12
12
  )
@@ -31,14 +31,40 @@ class RequestGeneratorDefinitionRun(BaseModel):
31
31
  target_members: list[str] = Field(default_factory=list, description="List of targets to run the generator for")
32
32
 
33
33
 
34
- class ProposedChangeGeneratorDefinition(BaseModel):
35
- definition_id: str
36
- definition_name: str
37
- query_name: str
38
- convert_query_response: bool
39
- query_models: list[str]
40
- repository_id: str
41
- class_name: str
42
- file_path: str
43
- parameters: dict
44
- group_id: str
34
+ class GeneratorDefinitionModel(BaseModel):
35
+ definition_id: str = Field(..., description="The id of the generator definition.")
36
+ definition_name: str = Field(..., description="The name of the generator definition.")
37
+ query_name: str = Field(..., description="The name of the query to use when collecting data.")
38
+ convert_query_response: bool = Field(
39
+ ...,
40
+ description="Decide if the generator should convert the result of the GraphQL query to SDK InfrahubNode objects.",
41
+ )
42
+ class_name: str = Field(..., description="The name of the generator class to run.")
43
+ file_path: str = Field(..., description="The file path of the generator in the repository.")
44
+ group_id: str = Field(..., description="The group to target when running this generator")
45
+ parameters: dict = Field(..., description="The input parameters required to run this check")
46
+
47
+ execute_in_proposed_change: bool = Field(
48
+ ..., description="Indicates if the generator should execute in a proposed change."
49
+ )
50
+ execute_after_merge: bool = Field(..., description="Indicates if the generator should execute after a merge.")
51
+
52
+ @classmethod
53
+ def from_pc_generator_definition(cls, model: ProposedChangeGeneratorDefinition) -> GeneratorDefinitionModel:
54
+ return GeneratorDefinitionModel(
55
+ definition_id=model.definition_id,
56
+ definition_name=model.definition_name,
57
+ query_name=model.query_name,
58
+ convert_query_response=model.convert_query_response,
59
+ class_name=model.class_name,
60
+ file_path=model.file_path,
61
+ group_id=model.group_id,
62
+ parameters=model.parameters,
63
+ execute_in_proposed_change=model.execute_in_proposed_change,
64
+ execute_after_merge=model.execute_after_merge,
65
+ )
66
+
67
+
68
+ class ProposedChangeGeneratorDefinition(GeneratorDefinitionModel):
69
+ query_models: list[str] = Field(..., description="The models to use when collecting data.")
70
+ repository_id: str = Field(..., description="The id of the repository.")
@@ -14,7 +14,9 @@ from prefect.states import Completed, Failed
14
14
  from infrahub import lock
15
15
  from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
16
16
  from infrahub.core.constants import GeneratorInstanceStatus, InfrahubKind
17
+ from infrahub.generators.constants import GeneratorDefinitionRunSource
17
18
  from infrahub.generators.models import (
19
+ GeneratorDefinitionModel,
18
20
  ProposedChangeGeneratorDefinition,
19
21
  RequestGeneratorDefinitionRun,
20
22
  RequestGeneratorRun,
@@ -52,9 +54,12 @@ async def run_generator(model: RequestGeneratorRun) -> None:
52
54
  name=model.generator_definition.definition_name,
53
55
  class_name=model.generator_definition.class_name,
54
56
  file_path=model.generator_definition.file_path,
57
+ parameters=model.generator_definition.parameters,
55
58
  query=model.generator_definition.query_name,
56
59
  targets=model.generator_definition.group_id,
57
60
  convert_query_response=model.generator_definition.convert_query_response,
61
+ execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
62
+ execute_after_merge=model.generator_definition.execute_after_merge,
58
63
  )
59
64
 
60
65
  commit_worktree = repository.get_commit_worktree(commit=model.commit)
@@ -78,6 +83,8 @@ async def run_generator(model: RequestGeneratorRun) -> None:
78
83
  params=model.variables,
79
84
  generator_instance=generator_instance.id,
80
85
  convert_query_response=generator_definition.convert_query_response,
86
+ execute_in_proposed_change=generator_definition.execute_in_proposed_change,
87
+ execute_after_merge=generator_definition.execute_after_merge,
81
88
  infrahub_node=InfrahubNode,
82
89
  )
83
90
  await generator.run(identifier=generator_definition.name)
@@ -127,28 +134,39 @@ async def _define_instance(model: RequestGeneratorRun, client: InfrahubClient) -
127
134
 
128
135
 
129
136
  @flow(name="generator-definition-run", flow_run_name="Run all generators")
130
- async def run_generator_definition(branch: str, context: InfrahubContext) -> None:
137
+ async def run_generator_definition(
138
+ branch: str, context: InfrahubContext, source: GeneratorDefinitionRunSource = GeneratorDefinitionRunSource.UNKNOWN
139
+ ) -> None:
131
140
  await add_tags(branches=[branch])
132
141
 
133
142
  generators = await get_client().filters(
134
143
  kind=InfrahubKind.GENERATORDEFINITION, prefetch_relationships=True, populate_store=True, branch=branch
135
144
  )
136
145
 
137
- generator_definitions = [
138
- ProposedChangeGeneratorDefinition(
139
- definition_id=generator.id,
140
- definition_name=generator.name.value,
141
- class_name=generator.class_name.value,
142
- file_path=generator.file_path.value,
143
- query_name=generator.query.peer.name.value,
144
- query_models=generator.query.peer.models.value,
145
- repository_id=generator.repository.peer.id,
146
- parameters=generator.parameters.value,
147
- group_id=generator.targets.peer.id,
148
- convert_query_response=generator.convert_query_response.value,
146
+ generator_definitions: list[ProposedChangeGeneratorDefinition] = []
147
+
148
+ for generator in generators:
149
+ if (
150
+ source == GeneratorDefinitionRunSource.PROPOSED_CHANGE and not generator.execute_in_proposed_change.value
151
+ ) or (source == GeneratorDefinitionRunSource.MERGE and not generator.execute_after_merge.value):
152
+ continue
153
+
154
+ generator_definitions.append(
155
+ ProposedChangeGeneratorDefinition(
156
+ definition_id=generator.id,
157
+ definition_name=generator.name.value,
158
+ class_name=generator.class_name.value,
159
+ file_path=generator.file_path.value,
160
+ query_name=generator.query.peer.name.value,
161
+ query_models=generator.query.peer.models.value,
162
+ repository_id=generator.repository.peer.id,
163
+ parameters=generator.parameters.value,
164
+ group_id=generator.targets.peer.id,
165
+ convert_query_response=generator.convert_query_response.value,
166
+ execute_in_proposed_change=generator.execute_in_proposed_change.value,
167
+ execute_after_merge=generator.execute_after_merge.value,
168
+ )
149
169
  )
150
- for generator in generators
151
- ]
152
170
 
153
171
  for generator_definition in generator_definitions:
154
172
  model = RequestGeneratorDefinitionRun(branch=branch, generator_definition=generator_definition)
@@ -213,7 +231,7 @@ async def request_generator_definition_run(
213
231
 
214
232
  generator_instance = instance_by_member.get(member.id)
215
233
  request_generator_run_model = RequestGeneratorRun(
216
- generator_definition=model.generator_definition,
234
+ generator_definition=GeneratorDefinitionModel.from_pc_generator_definition(model.generator_definition),
217
235
  commit=repository.commit.value,
218
236
  generator_instance=generator_instance,
219
237
  repository_id=repository.id,
infrahub/git/base.py CHANGED
@@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, NoReturn
7
7
  from uuid import UUID # noqa: TC003
8
8
 
9
9
  import git
10
- from git import Blob, Repo
10
+ from git import BadName, Blob, Repo
11
11
  from git.exc import GitCommandError, InvalidGitRepositoryError
12
12
  from git.refs.remote import RemoteReference
13
13
  from infrahub_sdk import InfrahubClient # noqa: TC002
@@ -67,6 +67,15 @@ class RepoFileInformation(BaseModel):
67
67
  """Extension of the file Example: py """
68
68
 
69
69
 
70
+ class RepoChangedFiles(BaseModel):
71
+ added: list[str] = Field(default_factory=list)
72
+ copied: list[tuple[str, str]] = Field(default_factory=list)
73
+ deleted: list[str] = Field(default_factory=list)
74
+ renamed: list[tuple[str, str]] = Field(default_factory=list)
75
+ modified: list[str] = Field(default_factory=list)
76
+ type_changed: list[tuple[str, str]] = Field(default_factory=list)
77
+
78
+
70
79
  def extract_repo_file_information(
71
80
  full_filename: Path, repo_directory: Path, worktree_directory: Path | None = None
72
81
  ) -> RepoFileInformation:
@@ -970,3 +979,31 @@ class InfrahubRepositoryBase(BaseModel, ABC):
970
979
  if branch_name == self.default_branch and branch_name != registry.default_branch:
971
980
  return registry.default_branch
972
981
  return branch_name
982
+
983
+ def get_changed_files(self, first_commit: str, second_commit: str | None = None) -> RepoChangedFiles:
984
+ """Return the changes between two commits in this repo."""
985
+ changes = RepoChangedFiles()
986
+ repo = self.get_git_repo_main()
987
+
988
+ try:
989
+ commit_a = repo.commit(first_commit)
990
+ commit_b = repo.commit(second_commit) if second_commit else repo.head.commit
991
+ except BadName as exc:
992
+ raise CommitNotFoundError(identifier=str(self.id), commit=exc.args[0]) from exc
993
+
994
+ for diff in commit_a.diff(commit_b):
995
+ match diff.change_type:
996
+ case "A":
997
+ changes.added.append(diff.b_path)
998
+ case "C":
999
+ changes.copied.append((diff.a_path, diff.b_path))
1000
+ case "D":
1001
+ changes.deleted.append(diff.a_path)
1002
+ case "R":
1003
+ changes.renamed.append((diff.a_path, diff.b_path))
1004
+ case "M":
1005
+ changes.modified.append(diff.b_path)
1006
+ case "T":
1007
+ changes.type_changed.append((diff.a_path, diff.b_path))
1008
+
1009
+ return changes
@@ -226,7 +226,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
226
226
  )
227
227
  )
228
228
 
229
- @task(name="import-jinja2-tansforms", task_run_name="Import Jinja2 transform", cache_policy=NONE) # type: ignore[arg-type]
229
+ @task(name="import-jinja2-transforms", task_run_name="Import Jinja2 transform", cache_policy=NONE)
230
230
  async def import_jinja2_transforms(
231
231
  self,
232
232
  branch_name: str,
@@ -331,7 +331,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
331
331
 
332
332
  await existing_transform.save()
333
333
 
334
- @task(name="import-artifact-definitions", task_run_name="Import Artifact Definitions", cache_policy=NONE) # type: ignore[arg-type]
334
+ @task(name="import-artifact-definitions", task_run_name="Import Artifact Definitions", cache_policy=NONE)
335
335
  async def import_artifact_definitions(
336
336
  self,
337
337
  branch_name: str,
@@ -432,7 +432,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
432
432
 
433
433
  await existing_artifact_definition.save()
434
434
 
435
- @task(name="repository-get-config", task_run_name="get repository config", cache_policy=NONE) # type: ignore[arg-type]
435
+ @task(name="repository-get-config", task_run_name="get repository config", cache_policy=NONE)
436
436
  async def get_repository_config(self, branch_name: str, commit: str) -> InfrahubRepositoryConfig | None:
437
437
  branch_wt = self.get_worktree(identifier=commit or branch_name)
438
438
  log = get_run_logger()
@@ -469,7 +469,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
469
469
  log.error(f"Unable to load the configuration file {config_file_name}, the format is not valid : {exc}")
470
470
  return None
471
471
 
472
- @task(name="import-schema-files", task_run_name="Import schema files", cache_policy=NONE) # type: ignore[arg-type]
472
+ @task(name="import-schema-files", task_run_name="Import schema files", cache_policy=NONE)
473
473
  async def import_schema_files(self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig) -> None:
474
474
  log = get_run_logger()
475
475
  branch_wt = self.get_worktree(identifier=commit or branch_name)
@@ -541,7 +541,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
541
541
  for schema_file in schemas_data:
542
542
  log.info(f"schema '{schema_file.identifier}' loaded successfully!")
543
543
 
544
- @task(name="import-graphql-queries", task_run_name="Import GraphQL Queries", cache_policy=NONE) # type: ignore[arg-type]
544
+ @task(name="import-graphql-queries", task_run_name="Import GraphQL Queries", cache_policy=NONE)
545
545
  async def import_all_graphql_query(
546
546
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
547
547
  ) -> None:
@@ -599,7 +599,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
599
599
  await obj.save()
600
600
  return obj
601
601
 
602
- @task(name="import-python-check-definitions", task_run_name="Import Python Check Definitions", cache_policy=NONE) # type: ignore[arg-type]
602
+ @task(name="import-python-check-definitions", task_run_name="Import Python Check Definitions", cache_policy=NONE)
603
603
  async def import_python_check_definitions(
604
604
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
605
605
  ) -> None:
@@ -670,7 +670,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
670
670
  log.info(f"CheckDefinition '{check_name!r}' not found locally, deleting")
671
671
  await check_definition_in_graph[check_name].delete()
672
672
 
673
- @task(name="import-generator-definitions", task_run_name="Import Generator Definitions", cache_policy=NONE) # type: ignore[arg-type]
673
+ @task(name="import-generator-definitions", task_run_name="Import Generator Definitions", cache_policy=NONE)
674
674
  async def import_generator_definitions(
675
675
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
676
676
  ) -> None:
@@ -756,11 +756,13 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
756
756
  or existing_generator.parameters.value != generator.parameters
757
757
  or existing_generator.convert_query_response.value != generator.convert_query_response
758
758
  or existing_generator.targets.id != generator.targets
759
+ or existing_generator.execute_in_proposed_change.value != generator.execute_in_proposed_change
760
+ or existing_generator.execute_after_merge.value != generator.execute_after_merge
759
761
  ):
760
762
  return True
761
763
  return False
762
764
 
763
- @task(name="import-python-transforms", task_run_name="Import Python Transforms", cache_policy=NONE) # type: ignore[arg-type]
765
+ @task(name="import-python-transforms", task_run_name="Import Python Transforms", cache_policy=NONE)
764
766
  async def import_python_transforms(
765
767
  self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
766
768
  ) -> None:
@@ -885,7 +887,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
885
887
  file_type=file_type,
886
888
  )
887
889
 
888
- @task(name="import-objects", task_run_name="Import Objects", cache_policy=NONE) # type: ignore[arg-type]
890
+ @task(name="import-objects", task_run_name="Import Objects", cache_policy=NONE)
889
891
  async def import_objects(
890
892
  self,
891
893
  branch_name: str,
@@ -905,7 +907,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
905
907
  object_type=RepositoryObjects.MENU,
906
908
  )
907
909
 
908
- @task(name="check-definition-get", task_run_name="Get Check Definition", cache_policy=NONE) # type: ignore[arg-type]
910
+ @task(name="check-definition-get", task_run_name="Get Check Definition", cache_policy=NONE)
909
911
  async def get_check_definition(
910
912
  self,
911
913
  branch_name: str,
@@ -945,7 +947,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
945
947
  raise
946
948
  return checks
947
949
 
948
- @task(name="python-transform-get", task_run_name="Get Python Transform", cache_policy=NONE) # type: ignore[arg-type]
950
+ @task(name="python-transform-get", task_run_name="Get Python Transform", cache_policy=NONE)
949
951
  async def get_python_transforms(
950
952
  self, branch_name: str, module: types.ModuleType, file_path: str, transform: InfrahubPythonTransformConfig
951
953
  ) -> list[TransformPythonInformation]:
@@ -1023,6 +1025,12 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1023
1025
  if existing_generator.targets.id != generator.targets:
1024
1026
  existing_generator.targets = {"id": generator.targets, "source": str(self.id), "is_protected": True}
1025
1027
 
1028
+ if existing_generator.execute_in_proposed_change.value != generator.execute_in_proposed_change:
1029
+ existing_generator.execute_in_proposed_change.value = generator.execute_in_proposed_change
1030
+
1031
+ if existing_generator.execute_after_merge.value != generator.execute_after_merge:
1032
+ existing_generator.execute_after_merge.value = generator.execute_after_merge
1033
+
1026
1034
  await existing_generator.save()
1027
1035
 
1028
1036
  async def create_python_check_definition(
@@ -1152,7 +1160,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1152
1160
  await self.import_python_transforms(branch_name=branch_name, commit=commit, config_file=config_file) # type: ignore[misc]
1153
1161
  await self.import_generator_definitions(branch_name=branch_name, commit=commit, config_file=config_file) # type: ignore[misc]
1154
1162
 
1155
- @task(name="jinja2-template-render", task_run_name="Render Jinja2 template", cache_policy=NONE) # type: ignore[arg-type]
1163
+ @task(name="jinja2-template-render", task_run_name="Render Jinja2 template", cache_policy=NONE)
1156
1164
  async def render_jinja2_template(self, commit: str, location: str, data: dict) -> str:
1157
1165
  log = get_run_logger()
1158
1166
  commit_worktree = self.get_commit_worktree(commit=commit)
@@ -1168,7 +1176,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1168
1176
  repository_name=self.name, commit=commit, location=location, message=exc.message
1169
1177
  ) from exc
1170
1178
 
1171
- @task(name="python-check-execute", task_run_name="Execute Python Check", cache_policy=NONE) # type: ignore[arg-type]
1179
+ @task(name="python-check-execute", task_run_name="Execute Python Check", cache_policy=NONE)
1172
1180
  async def execute_python_check(
1173
1181
  self,
1174
1182
  branch_name: str,
@@ -1227,7 +1235,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1227
1235
  repository_name=self.name, class_name=class_name, commit=commit, location=location, message=str(exc)
1228
1236
  ) from exc
1229
1237
 
1230
- @task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE) # type: ignore[arg-type]
1238
+ @task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE)
1231
1239
  async def execute_python_transform(
1232
1240
  self,
1233
1241
  branch_name: str,
@@ -639,7 +639,7 @@ class InfrahubGraphQLQueryAnalyzer(GraphQLQueryAnalyzer):
639
639
  self, node: InlineFragmentNode, query_node: GraphQLQueryNode
640
640
  ) -> GraphQLQueryNode:
641
641
  context_type = query_node.context_type
642
- infrahub_model = self.schema_branch.get(name=node.type_condition.name.value)
642
+ infrahub_model = self.schema_branch.get(name=node.type_condition.name.value, duplicate=False)
643
643
  context_type = ContextType.DIRECT
644
644
  current_node = GraphQLQueryNode(
645
645
  parent=query_node,
@@ -1,5 +1,3 @@
1
- from typing import Any
2
-
3
1
  from infrahub import config
4
2
 
5
3
  from ..app import InfrahubGraphQLApp
@@ -36,5 +34,5 @@ def build_graphql_query_permission_checker() -> GraphQLQueryPermissionChecker:
36
34
  )
37
35
 
38
36
 
39
- def build_graphql_app(**kwargs: Any) -> InfrahubGraphQLApp:
40
- return InfrahubGraphQLApp(build_graphql_query_permission_checker(), **kwargs)
37
+ def build_graphql_app() -> InfrahubGraphQLApp:
38
+ return InfrahubGraphQLApp(build_graphql_query_permission_checker())
@@ -9,7 +9,7 @@ from starlette.routing import Route, WebSocketRoute
9
9
 
10
10
  from infrahub.api.dependencies import get_branch_dep, get_current_user
11
11
  from infrahub.core import registry
12
- from infrahub.graphql.manager import GraphQLSchemaManager
12
+ from infrahub.graphql.registry import registry as graphql_registry
13
13
 
14
14
  from .dependencies import build_graphql_app
15
15
 
@@ -32,6 +32,6 @@ async def get_graphql_schema(
32
32
  branch: Branch = Depends(get_branch_dep), _: AccountSession = Depends(get_current_user)
33
33
  ) -> PlainTextResponse:
34
34
  schema_branch = registry.schema.get_schema_branch(name=branch.name)
35
- gqlm = GraphQLSchemaManager.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
35
+ gqlm = graphql_registry.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
36
36
  graphql_schema = gqlm.get_graphql_schema()
37
37
  return PlainTextResponse(content=print_schema(graphql_schema))