infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (183) hide show
  1. infrahub/actions/tasks.py +8 -0
  2. infrahub/api/diff/diff.py +1 -1
  3. infrahub/api/internal.py +2 -0
  4. infrahub/api/oauth2.py +13 -19
  5. infrahub/api/oidc.py +15 -21
  6. infrahub/api/schema.py +24 -3
  7. infrahub/artifacts/models.py +2 -1
  8. infrahub/auth.py +137 -3
  9. infrahub/cli/__init__.py +2 -0
  10. infrahub/cli/db.py +103 -98
  11. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  12. infrahub/cli/dev.py +118 -0
  13. infrahub/cli/tasks.py +46 -0
  14. infrahub/cli/upgrade.py +30 -3
  15. infrahub/computed_attribute/tasks.py +20 -8
  16. infrahub/core/attribute.py +13 -5
  17. infrahub/core/branch/enums.py +1 -1
  18. infrahub/core/branch/models.py +7 -3
  19. infrahub/core/branch/tasks.py +70 -8
  20. infrahub/core/changelog/models.py +4 -12
  21. infrahub/core/constants/__init__.py +3 -0
  22. infrahub/core/constants/infrahubkind.py +1 -0
  23. infrahub/core/diff/model/path.py +4 -0
  24. infrahub/core/diff/payload_builder.py +1 -1
  25. infrahub/core/diff/query/artifact.py +1 -0
  26. infrahub/core/diff/query/field_summary.py +1 -0
  27. infrahub/core/graph/__init__.py +1 -1
  28. infrahub/core/initialization.py +5 -2
  29. infrahub/core/ipam/utilization.py +1 -1
  30. infrahub/core/manager.py +6 -3
  31. infrahub/core/migrations/__init__.py +3 -0
  32. infrahub/core/migrations/exceptions.py +4 -0
  33. infrahub/core/migrations/graph/__init__.py +12 -11
  34. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  35. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  36. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  37. infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
  38. infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
  39. infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
  40. infrahub/core/migrations/query/__init__.py +7 -8
  41. infrahub/core/migrations/query/attribute_add.py +8 -6
  42. infrahub/core/migrations/query/attribute_remove.py +134 -0
  43. infrahub/core/migrations/runner.py +54 -0
  44. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  45. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  46. infrahub/core/migrations/schema/node_attribute_add.py +35 -4
  47. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  48. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  49. infrahub/core/migrations/schema/node_remove.py +2 -1
  50. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  51. infrahub/core/migrations/shared.py +52 -19
  52. infrahub/core/node/__init__.py +158 -51
  53. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  54. infrahub/core/node/create.py +46 -63
  55. infrahub/core/node/lock_utils.py +70 -44
  56. infrahub/core/node/node_property_attribute.py +230 -0
  57. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  58. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  59. infrahub/core/node/resource_manager/number_pool.py +2 -1
  60. infrahub/core/node/standard.py +1 -1
  61. infrahub/core/protocols.py +7 -1
  62. infrahub/core/query/attribute.py +55 -0
  63. infrahub/core/query/ipam.py +1 -0
  64. infrahub/core/query/node.py +23 -4
  65. infrahub/core/query/relationship.py +1 -0
  66. infrahub/core/registry.py +2 -2
  67. infrahub/core/relationship/constraints/count.py +1 -1
  68. infrahub/core/relationship/model.py +1 -1
  69. infrahub/core/schema/__init__.py +56 -0
  70. infrahub/core/schema/attribute_schema.py +4 -0
  71. infrahub/core/schema/basenode_schema.py +42 -2
  72. infrahub/core/schema/definitions/core/__init__.py +2 -0
  73. infrahub/core/schema/definitions/core/generator.py +2 -0
  74. infrahub/core/schema/definitions/core/group.py +16 -2
  75. infrahub/core/schema/definitions/internal.py +16 -3
  76. infrahub/core/schema/generated/attribute_schema.py +2 -2
  77. infrahub/core/schema/generated/base_node_schema.py +6 -1
  78. infrahub/core/schema/manager.py +22 -1
  79. infrahub/core/schema/node_schema.py +5 -2
  80. infrahub/core/schema/schema_branch.py +300 -8
  81. infrahub/core/schema/schema_branch_display.py +123 -0
  82. infrahub/core/schema/schema_branch_hfid.py +114 -0
  83. infrahub/core/validators/aggregated_checker.py +1 -1
  84. infrahub/core/validators/determiner.py +12 -1
  85. infrahub/core/validators/relationship/peer.py +1 -1
  86. infrahub/core/validators/tasks.py +1 -1
  87. infrahub/database/graph.py +21 -0
  88. infrahub/display_labels/__init__.py +0 -0
  89. infrahub/display_labels/gather.py +48 -0
  90. infrahub/display_labels/models.py +240 -0
  91. infrahub/display_labels/tasks.py +192 -0
  92. infrahub/display_labels/triggers.py +22 -0
  93. infrahub/events/branch_action.py +27 -1
  94. infrahub/events/group_action.py +1 -1
  95. infrahub/events/node_action.py +1 -1
  96. infrahub/generators/constants.py +7 -0
  97. infrahub/generators/models.py +7 -0
  98. infrahub/generators/tasks.py +34 -22
  99. infrahub/git/base.py +4 -1
  100. infrahub/git/integrator.py +23 -15
  101. infrahub/git/models.py +2 -1
  102. infrahub/git/repository.py +22 -5
  103. infrahub/git/tasks.py +66 -10
  104. infrahub/git/utils.py +123 -1
  105. infrahub/graphql/analyzer.py +1 -1
  106. infrahub/graphql/api/endpoints.py +14 -4
  107. infrahub/graphql/manager.py +4 -9
  108. infrahub/graphql/mutations/convert_object_type.py +11 -1
  109. infrahub/graphql/mutations/display_label.py +118 -0
  110. infrahub/graphql/mutations/generator.py +25 -7
  111. infrahub/graphql/mutations/hfid.py +125 -0
  112. infrahub/graphql/mutations/ipam.py +54 -35
  113. infrahub/graphql/mutations/main.py +27 -28
  114. infrahub/graphql/mutations/relationship.py +2 -2
  115. infrahub/graphql/mutations/resource_manager.py +2 -2
  116. infrahub/graphql/mutations/schema.py +5 -5
  117. infrahub/graphql/queries/resource_manager.py +1 -1
  118. infrahub/graphql/resolvers/resolver.py +2 -0
  119. infrahub/graphql/schema.py +4 -0
  120. infrahub/graphql/schema_sort.py +170 -0
  121. infrahub/graphql/types/branch.py +4 -1
  122. infrahub/graphql/types/enums.py +3 -0
  123. infrahub/groups/tasks.py +1 -1
  124. infrahub/hfid/__init__.py +0 -0
  125. infrahub/hfid/gather.py +48 -0
  126. infrahub/hfid/models.py +240 -0
  127. infrahub/hfid/tasks.py +191 -0
  128. infrahub/hfid/triggers.py +22 -0
  129. infrahub/lock.py +67 -16
  130. infrahub/message_bus/types.py +2 -1
  131. infrahub/middleware.py +26 -1
  132. infrahub/permissions/constants.py +2 -0
  133. infrahub/proposed_change/tasks.py +35 -17
  134. infrahub/server.py +21 -4
  135. infrahub/services/__init__.py +8 -5
  136. infrahub/services/adapters/http/__init__.py +5 -0
  137. infrahub/services/adapters/workflow/worker.py +14 -3
  138. infrahub/task_manager/event.py +5 -0
  139. infrahub/task_manager/models.py +7 -0
  140. infrahub/task_manager/task.py +73 -0
  141. infrahub/trigger/catalogue.py +4 -0
  142. infrahub/trigger/models.py +2 -0
  143. infrahub/trigger/setup.py +13 -4
  144. infrahub/trigger/tasks.py +6 -0
  145. infrahub/workers/dependencies.py +10 -1
  146. infrahub/workers/infrahub_async.py +10 -2
  147. infrahub/workflows/catalogue.py +80 -0
  148. infrahub/workflows/initialization.py +21 -0
  149. infrahub/workflows/utils.py +2 -1
  150. infrahub_sdk/checks.py +1 -1
  151. infrahub_sdk/client.py +13 -10
  152. infrahub_sdk/config.py +29 -2
  153. infrahub_sdk/ctl/cli_commands.py +2 -0
  154. infrahub_sdk/ctl/generator.py +4 -0
  155. infrahub_sdk/ctl/graphql.py +184 -0
  156. infrahub_sdk/ctl/schema.py +28 -9
  157. infrahub_sdk/generator.py +7 -1
  158. infrahub_sdk/graphql/__init__.py +12 -0
  159. infrahub_sdk/graphql/constants.py +1 -0
  160. infrahub_sdk/graphql/plugin.py +85 -0
  161. infrahub_sdk/graphql/query.py +77 -0
  162. infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
  163. infrahub_sdk/graphql/utils.py +40 -0
  164. infrahub_sdk/protocols.py +14 -0
  165. infrahub_sdk/schema/__init__.py +70 -4
  166. infrahub_sdk/schema/repository.py +8 -0
  167. infrahub_sdk/spec/models.py +7 -0
  168. infrahub_sdk/spec/object.py +53 -44
  169. infrahub_sdk/spec/processors/__init__.py +0 -0
  170. infrahub_sdk/spec/processors/data_processor.py +10 -0
  171. infrahub_sdk/spec/processors/factory.py +34 -0
  172. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  173. infrahub_sdk/spec/range_expansion.py +1 -1
  174. infrahub_sdk/transforms.py +1 -1
  175. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
  176. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
  177. infrahub_testcontainers/container.py +115 -3
  178. infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
  179. infrahub_testcontainers/docker-compose.test.yml +6 -1
  180. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  181. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
  182. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
  183. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
@@ -6,10 +6,35 @@ from typing import Any
6
6
 
7
7
  from pydantic import BaseModel
8
8
 
9
- VARIABLE_TYPE_MAPPING = ((str, "String!"), (int, "Int!"), (float, "Float!"), (bool, "Boolean!"))
9
+ from .constants import VARIABLE_TYPE_MAPPING
10
10
 
11
11
 
12
12
  def convert_to_graphql_as_string(value: Any, convert_enum: bool = False) -> str: # noqa: PLR0911
13
+ """Convert a Python value to its GraphQL string representation.
14
+
15
+ This function handles various Python types and converts them to their appropriate
16
+ GraphQL string format, including proper quoting, formatting, and special handling
17
+ for different data types.
18
+
19
+ Args:
20
+ value: The value to convert to GraphQL string format. Can be None, str, bool,
21
+ int, float, Enum, list, BaseModel, or any other type.
22
+ convert_enum: If True, converts Enum values to their underlying value instead
23
+ of their name. Defaults to False.
24
+
25
+ Returns:
26
+ str: The GraphQL string representation of the value.
27
+
28
+ Examples:
29
+ >>> convert_to_graphql_as_string("hello")
30
+ '"hello"'
31
+ >>> convert_to_graphql_as_string(True)
32
+ 'true'
33
+ >>> convert_to_graphql_as_string([1, 2, 3])
34
+ '[1, 2, 3]'
35
+ >>> convert_to_graphql_as_string(None)
36
+ 'null'
37
+ """
13
38
  if value is None:
14
39
  return "null"
15
40
  if isinstance(value, str) and value.startswith("$"):
@@ -56,6 +81,34 @@ def render_variables_to_string(data: dict[str, type[str | int | float | bool]])
56
81
 
57
82
 
58
83
  def render_query_block(data: dict, offset: int = 4, indentation: int = 4, convert_enum: bool = False) -> list[str]:
84
+ """Render a dictionary structure as a GraphQL query block with proper formatting.
85
+
86
+ This function recursively processes a dictionary to generate GraphQL query syntax
87
+ with proper indentation, handling of aliases, filters, and nested structures.
88
+ Special keys like "@filters" and "@alias" are processed for GraphQL-specific
89
+ formatting.
90
+
91
+ Args:
92
+ data: Dictionary representing the GraphQL query structure. Can contain
93
+ nested dictionaries, special keys like "@filters" and "@alias", and
94
+ various value types.
95
+ offset: Number of spaces to use for initial indentation. Defaults to 4.
96
+ indentation: Number of spaces to add for each nesting level. Defaults to 4.
97
+ convert_enum: If True, converts Enum values to their underlying value.
98
+ Defaults to False.
99
+
100
+ Returns:
101
+ list[str]: List of formatted lines representing the GraphQL query block.
102
+
103
+ Examples:
104
+ >>> data = {"user": {"name": None, "email": None}}
105
+ >>> render_query_block(data)
106
+ [' user {', ' name', ' email', ' }']
107
+
108
+ >>> data = {"user": {"@alias": "u", "@filters": {"id": 123}, "name": None}}
109
+ >>> render_query_block(data)
110
+ [' u: user(id: 123) {', ' name', ' }']
111
+ """
59
112
  FILTERS_KEY = "@filters"
60
113
  ALIAS_KEY = "@alias"
61
114
  KEYWORDS_TO_SKIP = [FILTERS_KEY, ALIAS_KEY]
@@ -97,6 +150,33 @@ def render_query_block(data: dict, offset: int = 4, indentation: int = 4, conver
97
150
 
98
151
 
99
152
  def render_input_block(data: dict, offset: int = 4, indentation: int = 4, convert_enum: bool = False) -> list[str]:
153
+ """Render a dictionary structure as a GraphQL input block with proper formatting.
154
+
155
+ This function recursively processes a dictionary to generate GraphQL input syntax
156
+ with proper indentation, handling nested objects, arrays, and various data types.
157
+ Unlike query blocks, input blocks don't handle special keys like "@filters" or
158
+ "@alias" and focus on data structure representation.
159
+
160
+ Args:
161
+ data: Dictionary representing the GraphQL input structure. Can contain
162
+ nested dictionaries, lists, and various value types.
163
+ offset: Number of spaces to use for initial indentation. Defaults to 4.
164
+ indentation: Number of spaces to add for each nesting level. Defaults to 4.
165
+ convert_enum: If True, converts Enum values to their underlying value.
166
+ Defaults to False.
167
+
168
+ Returns:
169
+ list[str]: List of formatted lines representing the GraphQL input block.
170
+
171
+ Examples:
172
+ >>> data = {"name": "John", "age": 30}
173
+ >>> render_input_block(data)
174
+ [' name: "John"', ' age: 30']
175
+
176
+ >>> data = {"user": {"name": "John", "hobbies": ["reading", "coding"]}}
177
+ >>> render_input_block(data)
178
+ [' user: {', ' name: "John"', ' hobbies: [', ' "reading",', ' "coding",', ' ]', ' }']
179
+ """
100
180
  offset_str = " " * offset
101
181
  lines = []
102
182
  for key, value in data.items():
@@ -130,75 +210,3 @@ def render_input_block(data: dict, offset: int = 4, indentation: int = 4, conver
130
210
  else:
131
211
  lines.append(f"{offset_str}{key}: {convert_to_graphql_as_string(value=value, convert_enum=convert_enum)}")
132
212
  return lines
133
-
134
-
135
- class BaseGraphQLQuery:
136
- query_type: str = "not-defined"
137
- indentation: int = 4
138
-
139
- def __init__(self, query: dict, variables: dict | None = None, name: str | None = None):
140
- self.query = query
141
- self.variables = variables
142
- self.name = name or ""
143
-
144
- def render_first_line(self) -> str:
145
- first_line = self.query_type
146
-
147
- if self.name:
148
- first_line += " " + self.name
149
-
150
- if self.variables:
151
- first_line += f" ({render_variables_to_string(self.variables)})"
152
-
153
- first_line += " {"
154
-
155
- return first_line
156
-
157
-
158
- class Query(BaseGraphQLQuery):
159
- query_type = "query"
160
-
161
- def render(self, convert_enum: bool = False) -> str:
162
- lines = [self.render_first_line()]
163
- lines.extend(
164
- render_query_block(
165
- data=self.query, indentation=self.indentation, offset=self.indentation, convert_enum=convert_enum
166
- )
167
- )
168
- lines.append("}")
169
-
170
- return "\n" + "\n".join(lines) + "\n"
171
-
172
-
173
- class Mutation(BaseGraphQLQuery):
174
- query_type = "mutation"
175
-
176
- def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any):
177
- self.input_data = input_data
178
- self.mutation = mutation
179
- super().__init__(*args, **kwargs)
180
-
181
- def render(self, convert_enum: bool = False) -> str:
182
- lines = [self.render_first_line()]
183
- lines.append(" " * self.indentation + f"{self.mutation}(")
184
- lines.extend(
185
- render_input_block(
186
- data=self.input_data,
187
- indentation=self.indentation,
188
- offset=self.indentation * 2,
189
- convert_enum=convert_enum,
190
- )
191
- )
192
- lines.append(" " * self.indentation + "){")
193
- lines.extend(
194
- render_query_block(
195
- data=self.query,
196
- indentation=self.indentation,
197
- offset=self.indentation * 2,
198
- convert_enum=convert_enum,
199
- )
200
- )
201
- lines.append(" " * self.indentation + "}")
202
- lines.append("}")
203
-
204
- return "\n" + "\n".join(lines) + "\n"
@@ -0,0 +1,40 @@
1
+ import ast
2
+
3
+
4
+ def get_class_def_index(module: ast.Module) -> int:
5
+ """Get the index of the first class definition in the module.
6
+ It's useful to insert other classes before the first class definition."""
7
+ for idx, item in enumerate(module.body):
8
+ if isinstance(item, ast.ClassDef):
9
+ return idx
10
+ return -1
11
+
12
+
13
+ def insert_fragments_inline(module: ast.Module, fragment: ast.Module) -> ast.Module:
14
+ """Insert the Pydantic classes for the fragments inline into the module.
15
+
16
+ If no class definitions exist in module, fragments are appended to the end.
17
+ """
18
+ module_class_def_index = get_class_def_index(module)
19
+
20
+ fragment_classes: list[ast.ClassDef] = [item for item in fragment.body if isinstance(item, ast.ClassDef)]
21
+
22
+ # Handle edge case when no class definitions exist
23
+ if module_class_def_index == -1:
24
+ # Append fragments to the end of the module
25
+ module.body.extend(fragment_classes)
26
+ else:
27
+ # Insert fragments before the first class definition
28
+ for idx, item in enumerate(fragment_classes):
29
+ module.body.insert(module_class_def_index + idx, item)
30
+
31
+ return module
32
+
33
+
34
+ def remove_fragment_import(module: ast.Module) -> ast.Module:
35
+ """Remove the fragment import from the module."""
36
+ for item in module.body:
37
+ if isinstance(item, ast.ImportFrom) and item.module == "fragments":
38
+ module.body.remove(item)
39
+ return module
40
+ return module
infrahub_sdk/protocols.py CHANGED
@@ -131,6 +131,7 @@ class CoreGenericRepository(CoreNode):
131
131
  queries: RelationshipManager
132
132
  checks: RelationshipManager
133
133
  generators: RelationshipManager
134
+ groups_objects: RelationshipManager
134
135
 
135
136
 
136
137
  class CoreGroup(CoreNode):
@@ -355,6 +356,10 @@ class CoreGeneratorAction(CoreAction):
355
356
  generator: RelatedNode
356
357
 
357
358
 
359
+ class CoreGeneratorAwareGroup(CoreGroup):
360
+ pass
361
+
362
+
358
363
  class CoreGeneratorCheck(CoreCheck):
359
364
  instance: String
360
365
 
@@ -366,6 +371,8 @@ class CoreGeneratorDefinition(CoreTaskTarget):
366
371
  file_path: String
367
372
  class_name: String
368
373
  convert_query_response: BooleanOptional
374
+ execute_in_proposed_change: BooleanOptional
375
+ execute_after_merge: BooleanOptional
369
376
  query: RelatedNode
370
377
  repository: RelatedNode
371
378
  targets: RelatedNode
@@ -681,6 +688,7 @@ class CoreGenericRepositorySync(CoreNodeSync):
681
688
  queries: RelationshipManagerSync
682
689
  checks: RelationshipManagerSync
683
690
  generators: RelationshipManagerSync
691
+ groups_objects: RelationshipManagerSync
684
692
 
685
693
 
686
694
  class CoreGroupSync(CoreNodeSync):
@@ -905,6 +913,10 @@ class CoreGeneratorActionSync(CoreActionSync):
905
913
  generator: RelatedNodeSync
906
914
 
907
915
 
916
+ class CoreGeneratorAwareGroupSync(CoreGroupSync):
917
+ pass
918
+
919
+
908
920
  class CoreGeneratorCheckSync(CoreCheckSync):
909
921
  instance: String
910
922
 
@@ -916,6 +928,8 @@ class CoreGeneratorDefinitionSync(CoreTaskTargetSync):
916
928
  file_path: String
917
929
  class_name: String
918
930
  convert_query_response: BooleanOptional
931
+ execute_in_proposed_change: BooleanOptional
932
+ execute_after_merge: BooleanOptional
919
933
  query: RelatedNodeSync
920
934
  repository: RelatedNodeSync
921
935
  targets: RelatedNodeSync
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
+ import inspect
4
5
  import json
5
6
  import warnings
6
7
  from collections.abc import MutableMapping
@@ -90,6 +91,26 @@ MainSchemaTypesAll: TypeAlias = Union[
90
91
  ]
91
92
 
92
93
 
94
+ class SchemaWarningType(Enum):
95
+ DEPRECATION = "deprecation"
96
+
97
+
98
+ class SchemaWarningKind(BaseModel):
99
+ kind: str = Field(..., description="The kind impacted by the warning")
100
+ field: str | None = Field(default=None, description="The attribute or relationship impacted by the warning")
101
+
102
+ @property
103
+ def display(self) -> str:
104
+ suffix = f".{self.field}" if self.field else ""
105
+ return f"{self.kind}{suffix}"
106
+
107
+
108
+ class SchemaWarning(BaseModel):
109
+ type: SchemaWarningType = Field(..., description="The type of warning")
110
+ kinds: list[SchemaWarningKind] = Field(default_factory=list, description="The kinds impacted by the warning")
111
+ message: str = Field(..., description="The message that describes the warning")
112
+
113
+
93
114
  class InfrahubSchemaBase:
94
115
  client: InfrahubClient | InfrahubClientSync
95
116
  cache: dict[str, BranchSchema]
@@ -169,7 +190,9 @@ class InfrahubSchemaBase:
169
190
  def _validate_load_schema_response(response: httpx.Response) -> SchemaLoadResponse:
170
191
  if response.status_code == httpx.codes.OK:
171
192
  status = response.json()
172
- return SchemaLoadResponse(hash=status["hash"], previous_hash=status["previous_hash"])
193
+ return SchemaLoadResponse(
194
+ hash=status["hash"], previous_hash=status["previous_hash"], warnings=status.get("warnings") or []
195
+ )
173
196
 
174
197
  if response.status_code in [
175
198
  httpx.codes.BAD_REQUEST,
@@ -185,12 +208,16 @@ class InfrahubSchemaBase:
185
208
 
186
209
  @staticmethod
187
210
  def _get_schema_name(schema: type[SchemaType | SchemaTypeSync] | str) -> str:
188
- if hasattr(schema, "_is_runtime_protocol") and schema._is_runtime_protocol: # type: ignore[union-attr]
189
- return schema.__name__ # type: ignore[union-attr]
190
-
191
211
  if isinstance(schema, str):
192
212
  return schema
193
213
 
214
+ if hasattr(schema, "_is_runtime_protocol") and getattr(schema, "_is_runtime_protocol", None):
215
+ if inspect.iscoroutinefunction(schema.save):
216
+ return schema.__name__
217
+ if schema.__name__[-4:] == "Sync":
218
+ return schema.__name__[:-4]
219
+ return schema.__name__
220
+
194
221
  raise ValueError("schema must be a protocol or a string")
195
222
 
196
223
  @staticmethod
@@ -474,6 +501,25 @@ class InfrahubSchema(InfrahubSchemaBase):
474
501
 
475
502
  return branch_schema.nodes
476
503
 
504
+ async def get_graphql_schema(self, branch: str | None = None) -> str:
505
+ """Get the GraphQL schema as a string.
506
+
507
+ Args:
508
+ branch: The branch to get the schema for. Defaults to default_branch.
509
+
510
+ Returns:
511
+ The GraphQL schema as a string.
512
+ """
513
+ branch = branch or self.client.default_branch
514
+ url = f"{self.client.address}/schema.graphql?branch={branch}"
515
+
516
+ response = await self.client._get(url=url)
517
+
518
+ if response.status_code != 200:
519
+ raise ValueError(f"Failed to fetch GraphQL schema: HTTP {response.status_code} - {response.text}")
520
+
521
+ return response.text
522
+
477
523
  async def _fetch(self, branch: str, namespaces: list[str] | None = None) -> BranchSchema:
478
524
  url_parts = [("branch", branch)]
479
525
  if namespaces:
@@ -697,6 +743,25 @@ class InfrahubSchemaSync(InfrahubSchemaBase):
697
743
 
698
744
  return branch_schema.nodes
699
745
 
746
+ def get_graphql_schema(self, branch: str | None = None) -> str:
747
+ """Get the GraphQL schema as a string.
748
+
749
+ Args:
750
+ branch: The branch to get the schema for. Defaults to default_branch.
751
+
752
+ Returns:
753
+ The GraphQL schema as a string.
754
+ """
755
+ branch = branch or self.client.default_branch
756
+ url = f"{self.client.address}/schema.graphql?branch={branch}"
757
+
758
+ response = self.client._get(url=url)
759
+
760
+ if response.status_code != 200:
761
+ raise ValueError(f"Failed to fetch GraphQL schema: HTTP {response.status_code} - {response.text}")
762
+
763
+ return response.text
764
+
700
765
  def _fetch(self, branch: str, namespaces: list[str] | None = None) -> BranchSchema:
701
766
  url_parts = [("branch", branch)]
702
767
  if namespaces:
@@ -764,6 +829,7 @@ class SchemaLoadResponse(BaseModel):
764
829
  hash: str = Field(default="", description="The new hash for the entire schema")
765
830
  previous_hash: str = Field(default="", description="The previous hash for the entire schema")
766
831
  errors: dict = Field(default_factory=dict, description="Errors reported by the server")
832
+ warnings: list[SchemaWarning] = Field(default_factory=list, description="Warnings reported by the server")
767
833
 
768
834
  @property
769
835
  def schema_updated(self) -> bool:
@@ -96,6 +96,14 @@ class InfrahubGeneratorDefinitionConfig(InfrahubRepositoryConfigElement):
96
96
  default=False,
97
97
  description="Decide if the generator should convert the result of the GraphQL query to SDK InfrahubNode objects.",
98
98
  )
99
+ execute_in_proposed_change: bool = Field(
100
+ default=True,
101
+ description="Decide if the generator should execute in a proposed change.",
102
+ )
103
+ execute_after_merge: bool = Field(
104
+ default=True,
105
+ description="Decide if the generator should execute after a merge.",
106
+ )
99
107
 
100
108
  def load_class(self, import_root: str | None = None, relative_path: str | None = None) -> type[InfrahubGenerator]:
101
109
  module = import_module(module_path=self.file_path, import_root=import_root, relative_path=relative_path)
@@ -0,0 +1,7 @@
1
+ from __future__ import annotations
2
+
3
+ from pydantic import BaseModel
4
+
5
+
6
+ class InfrahubObjectParameters(BaseModel):
7
+ expand_range: bool = False
@@ -1,7 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import copy
4
- import re
5
3
  from enum import Enum
6
4
  from typing import TYPE_CHECKING, Any
7
5
 
@@ -10,7 +8,8 @@ from pydantic import BaseModel, Field
10
8
  from ..exceptions import ObjectValidationError, ValidationError
11
9
  from ..schema import GenericSchemaAPI, RelationshipKind, RelationshipSchema
12
10
  from ..yaml import InfrahubFile, InfrahubFileKind
13
- from .range_expansion import MATCH_PATTERN, range_expansion
11
+ from .models import InfrahubObjectParameters
12
+ from .processors.factory import DataProcessorFactory
14
13
 
15
14
  if TYPE_CHECKING:
16
15
  from ..client import InfrahubClient
@@ -167,47 +166,24 @@ async def get_relationship_info(
167
166
  return info
168
167
 
169
168
 
170
- def expand_data_with_ranges(data: list[dict[str, Any]]) -> list[dict[str, Any]]:
171
- """Expand any item in self.data with range pattern in any value. Supports multiple fields, requires equal expansion length."""
172
- range_pattern = re.compile(MATCH_PATTERN)
173
- expanded = []
174
- for item in data:
175
- # Find all fields to expand
176
- expand_fields = {}
177
- for key, value in item.items():
178
- if isinstance(value, str) and range_pattern.search(value):
179
- try:
180
- expand_fields[key] = range_expansion(value)
181
- except Exception:
182
- # If expansion fails, treat as no expansion
183
- expand_fields[key] = [value]
184
- if not expand_fields:
185
- expanded.append(item)
186
- continue
187
- # Check all expanded lists have the same length
188
- lengths = [len(v) for v in expand_fields.values()]
189
- if len(set(lengths)) > 1:
190
- raise ValidationError(f"Range expansion mismatch: fields expanded to different lengths: {lengths}")
191
- n = lengths[0]
192
- # Zip expanded values and produce new items
193
- for i in range(n):
194
- new_item = copy.deepcopy(item)
195
- for key, values in expand_fields.items():
196
- new_item[key] = values[i]
197
- expanded.append(new_item)
198
- return expanded
199
-
200
-
201
169
  class InfrahubObjectFileData(BaseModel):
202
170
  kind: str
171
+ parameters: InfrahubObjectParameters = Field(default_factory=InfrahubObjectParameters)
203
172
  data: list[dict[str, Any]] = Field(default_factory=list)
204
173
 
174
+ async def _get_processed_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
175
+ """Get data processed according to the strategy"""
176
+
177
+ return await DataProcessorFactory.process_data(kind=self.kind, parameters=self.parameters, data=data)
178
+
205
179
  async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> list[ObjectValidationError]:
206
180
  errors: list[ObjectValidationError] = []
207
181
  schema = await client.schema.get(kind=self.kind, branch=branch)
208
- expanded_data = expand_data_with_ranges(self.data)
209
- self.data = expanded_data
210
- for idx, item in enumerate(expanded_data):
182
+
183
+ processed_data = await self._get_processed_data(data=self.data)
184
+ self.data = processed_data
185
+
186
+ for idx, item in enumerate(processed_data):
211
187
  errors.extend(
212
188
  await self.validate_object(
213
189
  client=client,
@@ -216,14 +192,16 @@ class InfrahubObjectFileData(BaseModel):
216
192
  data=item,
217
193
  branch=branch,
218
194
  default_schema_kind=self.kind,
195
+ parameters=self.parameters,
219
196
  )
220
197
  )
221
198
  return errors
222
199
 
223
200
  async def process(self, client: InfrahubClient, branch: str | None = None) -> None:
224
201
  schema = await client.schema.get(kind=self.kind, branch=branch)
225
- expanded_data = expand_data_with_ranges(self.data)
226
- for idx, item in enumerate(expanded_data):
202
+ processed_data = await self._get_processed_data(data=self.data)
203
+
204
+ for idx, item in enumerate(processed_data):
227
205
  await self.create_node(
228
206
  client=client,
229
207
  schema=schema,
@@ -243,7 +221,9 @@ class InfrahubObjectFileData(BaseModel):
243
221
  context: dict | None = None,
244
222
  branch: str | None = None,
245
223
  default_schema_kind: str | None = None,
224
+ parameters: InfrahubObjectParameters | None = None,
246
225
  ) -> list[ObjectValidationError]:
226
+ parameters = parameters or InfrahubObjectParameters()
247
227
  errors: list[ObjectValidationError] = []
248
228
  context = context.copy() if context else {}
249
229
 
@@ -292,6 +272,7 @@ class InfrahubObjectFileData(BaseModel):
292
272
  context=context,
293
273
  branch=branch,
294
274
  default_schema_kind=default_schema_kind,
275
+ parameters=parameters,
295
276
  )
296
277
  )
297
278
 
@@ -307,7 +288,9 @@ class InfrahubObjectFileData(BaseModel):
307
288
  context: dict | None = None,
308
289
  branch: str | None = None,
309
290
  default_schema_kind: str | None = None,
291
+ parameters: InfrahubObjectParameters | None = None,
310
292
  ) -> list[ObjectValidationError]:
293
+ parameters = parameters or InfrahubObjectParameters()
311
294
  context = context.copy() if context else {}
312
295
  errors: list[ObjectValidationError] = []
313
296
 
@@ -335,6 +318,7 @@ class InfrahubObjectFileData(BaseModel):
335
318
  context=context,
336
319
  branch=branch,
337
320
  default_schema_kind=default_schema_kind,
321
+ parameters=parameters,
338
322
  )
339
323
  )
340
324
  return errors
@@ -348,8 +332,11 @@ class InfrahubObjectFileData(BaseModel):
348
332
  rel_info.find_matching_relationship(peer_schema=peer_schema)
349
333
  context.update(rel_info.get_context(value="placeholder"))
350
334
 
351
- expanded_data = expand_data_with_ranges(data=data["data"])
352
- for idx, peer_data in enumerate(expanded_data):
335
+ processed_data = await DataProcessorFactory.process_data(
336
+ kind=peer_kind, data=data["data"], parameters=parameters
337
+ )
338
+
339
+ for idx, peer_data in enumerate(processed_data):
353
340
  context["list_index"] = idx
354
341
  errors.extend(
355
342
  await cls.validate_object(
@@ -360,6 +347,7 @@ class InfrahubObjectFileData(BaseModel):
360
347
  context=context,
361
348
  branch=branch,
362
349
  default_schema_kind=default_schema_kind,
350
+ parameters=parameters,
363
351
  )
364
352
  )
365
353
  return errors
@@ -384,6 +372,7 @@ class InfrahubObjectFileData(BaseModel):
384
372
  context=context,
385
373
  branch=branch,
386
374
  default_schema_kind=default_schema_kind,
375
+ parameters=parameters,
387
376
  )
388
377
  )
389
378
  return errors
@@ -410,7 +399,9 @@ class InfrahubObjectFileData(BaseModel):
410
399
  context: dict | None = None,
411
400
  branch: str | None = None,
412
401
  default_schema_kind: str | None = None,
402
+ parameters: InfrahubObjectParameters | None = None,
413
403
  ) -> InfrahubNode:
404
+ parameters = parameters or InfrahubObjectParameters()
414
405
  context = context.copy() if context else {}
415
406
 
416
407
  errors = await cls.validate_object(
@@ -421,6 +412,7 @@ class InfrahubObjectFileData(BaseModel):
421
412
  context=context,
422
413
  branch=branch,
423
414
  default_schema_kind=default_schema_kind,
415
+ parameters=parameters,
424
416
  )
425
417
  if errors:
426
418
  messages = [str(error) for error in errors]
@@ -466,6 +458,7 @@ class InfrahubObjectFileData(BaseModel):
466
458
  data=value,
467
459
  branch=branch,
468
460
  default_schema_kind=default_schema_kind,
461
+ parameters=parameters,
469
462
  )
470
463
  clean_data[key] = nodes[0]
471
464
 
@@ -477,6 +470,7 @@ class InfrahubObjectFileData(BaseModel):
477
470
  data=value,
478
471
  branch=branch,
479
472
  default_schema_kind=default_schema_kind,
473
+ parameters=parameters,
480
474
  )
481
475
  clean_data[key] = nodes
482
476
 
@@ -515,6 +509,7 @@ class InfrahubObjectFileData(BaseModel):
515
509
  context=context,
516
510
  branch=branch,
517
511
  default_schema_kind=default_schema_kind,
512
+ parameters=parameters,
518
513
  )
519
514
 
520
515
  return node
@@ -530,7 +525,9 @@ class InfrahubObjectFileData(BaseModel):
530
525
  context: dict | None = None,
531
526
  branch: str | None = None,
532
527
  default_schema_kind: str | None = None,
528
+ parameters: InfrahubObjectParameters | None = None,
533
529
  ) -> list[InfrahubNode]:
530
+ parameters = parameters or InfrahubObjectParameters()
534
531
  nodes: list[InfrahubNode] = []
535
532
  context = context.copy() if context else {}
536
533
 
@@ -550,6 +547,7 @@ class InfrahubObjectFileData(BaseModel):
550
547
  context=context,
551
548
  branch=branch,
552
549
  default_schema_kind=default_schema_kind,
550
+ parameters=parameters,
553
551
  )
554
552
  return [new_node]
555
553
 
@@ -563,7 +561,10 @@ class InfrahubObjectFileData(BaseModel):
563
561
  rel_info.find_matching_relationship(peer_schema=peer_schema)
564
562
  context.update(rel_info.get_context(value=parent_node.id))
565
563
 
566
- expanded_data = expand_data_with_ranges(data=data["data"])
564
+ expanded_data = await DataProcessorFactory.process_data(
565
+ kind=peer_kind, data=data["data"], parameters=parameters
566
+ )
567
+
567
568
  for idx, peer_data in enumerate(expanded_data):
568
569
  context["list_index"] = idx
569
570
  if isinstance(peer_data, dict):
@@ -575,6 +576,7 @@ class InfrahubObjectFileData(BaseModel):
575
576
  context=context,
576
577
  branch=branch,
577
578
  default_schema_kind=default_schema_kind,
579
+ parameters=parameters,
578
580
  )
579
581
  nodes.append(node)
580
582
  return nodes
@@ -600,6 +602,7 @@ class InfrahubObjectFileData(BaseModel):
600
602
  context=context,
601
603
  branch=branch,
602
604
  default_schema_kind=default_schema_kind,
605
+ parameters=parameters,
603
606
  )
604
607
  nodes.append(node)
605
608
 
@@ -633,14 +636,20 @@ class ObjectFile(InfrahubFile):
633
636
  @property
634
637
  def spec(self) -> InfrahubObjectFileData:
635
638
  if not self._spec:
636
- self._spec = InfrahubObjectFileData(**self.data.spec)
639
+ try:
640
+ self._spec = InfrahubObjectFileData(**self.data.spec)
641
+ except Exception as exc:
642
+ raise ValidationError(identifier=str(self.location), message=str(exc))
637
643
  return self._spec
638
644
 
639
645
  def validate_content(self) -> None:
640
646
  super().validate_content()
641
647
  if self.kind != InfrahubFileKind.OBJECT:
642
648
  raise ValueError("File is not an Infrahub Object file")
643
- self._spec = InfrahubObjectFileData(**self.data.spec)
649
+ try:
650
+ self._spec = InfrahubObjectFileData(**self.data.spec)
651
+ except Exception as exc:
652
+ raise ValidationError(identifier=str(self.location), message=str(exc))
644
653
 
645
654
  async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> None:
646
655
  self.validate_content()