infrahub-server 1.5.0b1__py3-none-any.whl → 1.5.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. infrahub/api/internal.py +2 -0
  2. infrahub/api/oauth2.py +13 -19
  3. infrahub/api/oidc.py +15 -21
  4. infrahub/api/schema.py +24 -3
  5. infrahub/artifacts/models.py +2 -1
  6. infrahub/auth.py +137 -3
  7. infrahub/cli/__init__.py +2 -0
  8. infrahub/cli/db.py +83 -102
  9. infrahub/cli/dev.py +118 -0
  10. infrahub/cli/tasks.py +46 -0
  11. infrahub/cli/upgrade.py +30 -3
  12. infrahub/computed_attribute/tasks.py +20 -8
  13. infrahub/core/attribute.py +10 -2
  14. infrahub/core/branch/enums.py +1 -1
  15. infrahub/core/branch/models.py +7 -3
  16. infrahub/core/branch/tasks.py +68 -7
  17. infrahub/core/constants/__init__.py +3 -0
  18. infrahub/core/diff/query/artifact.py +1 -0
  19. infrahub/core/diff/query/field_summary.py +1 -0
  20. infrahub/core/graph/__init__.py +1 -1
  21. infrahub/core/initialization.py +5 -2
  22. infrahub/core/migrations/__init__.py +3 -0
  23. infrahub/core/migrations/exceptions.py +4 -0
  24. infrahub/core/migrations/graph/__init__.py +10 -13
  25. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  26. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  27. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  28. infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
  29. infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
  30. infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
  31. infrahub/core/migrations/query/__init__.py +7 -8
  32. infrahub/core/migrations/query/attribute_add.py +8 -6
  33. infrahub/core/migrations/query/attribute_remove.py +134 -0
  34. infrahub/core/migrations/runner.py +54 -0
  35. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  36. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  37. infrahub/core/migrations/schema/node_attribute_add.py +30 -2
  38. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  39. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  40. infrahub/core/migrations/schema/node_remove.py +2 -1
  41. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  42. infrahub/core/migrations/shared.py +48 -14
  43. infrahub/core/node/__init__.py +16 -11
  44. infrahub/core/node/create.py +46 -63
  45. infrahub/core/node/lock_utils.py +70 -44
  46. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  47. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  48. infrahub/core/node/resource_manager/number_pool.py +2 -1
  49. infrahub/core/query/attribute.py +55 -0
  50. infrahub/core/query/ipam.py +1 -0
  51. infrahub/core/query/node.py +9 -3
  52. infrahub/core/query/relationship.py +1 -0
  53. infrahub/core/schema/__init__.py +56 -0
  54. infrahub/core/schema/attribute_schema.py +4 -0
  55. infrahub/core/schema/definitions/internal.py +2 -2
  56. infrahub/core/schema/generated/attribute_schema.py +2 -2
  57. infrahub/core/schema/manager.py +22 -1
  58. infrahub/core/schema/schema_branch.py +180 -22
  59. infrahub/database/graph.py +21 -0
  60. infrahub/display_labels/tasks.py +13 -7
  61. infrahub/events/branch_action.py +27 -1
  62. infrahub/generators/tasks.py +3 -7
  63. infrahub/git/base.py +4 -1
  64. infrahub/git/integrator.py +1 -1
  65. infrahub/git/models.py +2 -1
  66. infrahub/git/repository.py +22 -5
  67. infrahub/git/tasks.py +66 -10
  68. infrahub/git/utils.py +123 -1
  69. infrahub/graphql/api/endpoints.py +14 -4
  70. infrahub/graphql/manager.py +4 -9
  71. infrahub/graphql/mutations/convert_object_type.py +11 -1
  72. infrahub/graphql/mutations/display_label.py +17 -10
  73. infrahub/graphql/mutations/hfid.py +17 -10
  74. infrahub/graphql/mutations/ipam.py +54 -35
  75. infrahub/graphql/mutations/main.py +27 -28
  76. infrahub/graphql/schema_sort.py +170 -0
  77. infrahub/graphql/types/branch.py +4 -1
  78. infrahub/graphql/types/enums.py +3 -0
  79. infrahub/hfid/tasks.py +13 -7
  80. infrahub/lock.py +52 -12
  81. infrahub/message_bus/types.py +2 -1
  82. infrahub/permissions/constants.py +2 -0
  83. infrahub/proposed_change/tasks.py +25 -16
  84. infrahub/server.py +6 -2
  85. infrahub/services/__init__.py +2 -2
  86. infrahub/services/adapters/http/__init__.py +5 -0
  87. infrahub/services/adapters/workflow/worker.py +14 -3
  88. infrahub/task_manager/event.py +5 -0
  89. infrahub/task_manager/models.py +7 -0
  90. infrahub/task_manager/task.py +73 -0
  91. infrahub/trigger/setup.py +13 -4
  92. infrahub/trigger/tasks.py +3 -0
  93. infrahub/workers/dependencies.py +10 -1
  94. infrahub/workers/infrahub_async.py +10 -2
  95. infrahub/workflows/catalogue.py +8 -0
  96. infrahub/workflows/initialization.py +5 -0
  97. infrahub/workflows/utils.py +2 -1
  98. infrahub_sdk/client.py +13 -10
  99. infrahub_sdk/config.py +29 -2
  100. infrahub_sdk/ctl/schema.py +22 -7
  101. infrahub_sdk/schema/__init__.py +32 -4
  102. infrahub_sdk/spec/models.py +7 -0
  103. infrahub_sdk/spec/object.py +37 -102
  104. infrahub_sdk/spec/processors/__init__.py +0 -0
  105. infrahub_sdk/spec/processors/data_processor.py +10 -0
  106. infrahub_sdk/spec/processors/factory.py +34 -0
  107. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  108. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +3 -1
  109. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +115 -101
  110. infrahub_testcontainers/container.py +114 -2
  111. infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
  112. infrahub_testcontainers/docker-compose.test.yml +5 -0
  113. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  114. infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +0 -97
  115. infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +0 -86
  116. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
  117. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
  118. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub_sdk/config.py CHANGED
@@ -1,9 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import ssl
3
4
  from copy import deepcopy
4
5
  from typing import Any
5
6
 
6
- from pydantic import Field, field_validator, model_validator
7
+ from pydantic import Field, PrivateAttr, field_validator, model_validator
7
8
  from pydantic_settings import BaseSettings, SettingsConfigDict
8
9
  from typing_extensions import Self
9
10
 
@@ -78,6 +79,7 @@ class ConfigBase(BaseSettings):
78
79
  Can be useful to test with self-signed certificates.""",
79
80
  )
80
81
  tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
82
+ _ssl_context: ssl.SSLContext | None = PrivateAttr(default=None)
81
83
 
82
84
  @model_validator(mode="before")
83
85
  @classmethod
@@ -133,6 +135,28 @@ class ConfigBase(BaseSettings):
133
135
  def password_authentication(self) -> bool:
134
136
  return bool(self.username)
135
137
 
138
+ @property
139
+ def tls_context(self) -> ssl.SSLContext:
140
+ if self._ssl_context:
141
+ return self._ssl_context
142
+
143
+ if self.tls_insecure:
144
+ self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
145
+ self._ssl_context.check_hostname = False
146
+ self._ssl_context.verify_mode = ssl.CERT_NONE
147
+ return self._ssl_context
148
+
149
+ if self.tls_ca_file:
150
+ self._ssl_context = ssl.create_default_context(cafile=self.tls_ca_file)
151
+
152
+ if self._ssl_context is None:
153
+ self._ssl_context = ssl.create_default_context()
154
+
155
+ return self._ssl_context
156
+
157
+ def set_ssl_context(self, context: ssl.SSLContext) -> None:
158
+ self._ssl_context = context
159
+
136
160
 
137
161
  class Config(ConfigBase):
138
162
  recorder: RecorderType = Field(default=RecorderType.NONE, description="Select builtin recorder for later replay.")
@@ -174,4 +198,7 @@ class Config(ConfigBase):
174
198
  if field not in covered_keys:
175
199
  config[field] = deepcopy(getattr(self, field))
176
200
 
177
- return Config(**config)
201
+ new_config = Config(**config)
202
+ if self._ssl_context:
203
+ new_config.set_ssl_context(self._ssl_context)
204
+ return new_config
@@ -14,6 +14,7 @@ from ..async_typer import AsyncTyper
14
14
  from ..ctl.client import initialize_client
15
15
  from ..ctl.utils import catch_exception, init_logging
16
16
  from ..queries import SCHEMA_HASH_SYNC_STATUS
17
+ from ..schema import SchemaWarning
17
18
  from ..yaml import SchemaFile
18
19
  from .parameters import CONFIG_PARAM
19
20
  from .utils import load_yamlfile_from_disk_and_exit
@@ -152,6 +153,8 @@ async def load(
152
153
 
153
154
  console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.")
154
155
 
156
+ _display_schema_warnings(console=console, warnings=response.warnings)
157
+
155
158
  if response.schema_updated and wait:
156
159
  waited = 0
157
160
  continue_waiting = True
@@ -187,12 +190,24 @@ async def check(
187
190
 
188
191
  success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch)
189
192
 
190
- if not success:
193
+ if not success or not response:
191
194
  display_schema_load_errors(response=response or {}, schemas_data=schemas_data)
195
+ return
196
+
197
+ for schema_file in schemas_data:
198
+ console.print(f"[green] schema '{schema_file.location}' is Valid!")
199
+
200
+ warnings = response.pop("warnings", [])
201
+ schema_warnings = [SchemaWarning.model_validate(warning) for warning in warnings]
202
+ _display_schema_warnings(console=console, warnings=schema_warnings)
203
+ if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
204
+ print("No diff")
192
205
  else:
193
- for schema_file in schemas_data:
194
- console.print(f"[green] schema '{schema_file.location}' is Valid!")
195
- if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
196
- print("No diff")
197
- else:
198
- print(yaml.safe_dump(data=response, indent=4))
206
+ print(yaml.safe_dump(data=response, indent=4))
207
+
208
+
209
+ def _display_schema_warnings(console: Console, warnings: list[SchemaWarning]) -> None:
210
+ for warning in warnings:
211
+ console.print(
212
+ f"[yellow] {warning.type.value}: {warning.message} [{', '.join([kind.display for kind in warning.kinds])}]"
213
+ )
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
+ import inspect
4
5
  import json
5
6
  import warnings
6
7
  from collections.abc import MutableMapping
@@ -90,6 +91,26 @@ MainSchemaTypesAll: TypeAlias = Union[
90
91
  ]
91
92
 
92
93
 
94
+ class SchemaWarningType(Enum):
95
+ DEPRECATION = "deprecation"
96
+
97
+
98
+ class SchemaWarningKind(BaseModel):
99
+ kind: str = Field(..., description="The kind impacted by the warning")
100
+ field: str | None = Field(default=None, description="The attribute or relationship impacted by the warning")
101
+
102
+ @property
103
+ def display(self) -> str:
104
+ suffix = f".{self.field}" if self.field else ""
105
+ return f"{self.kind}{suffix}"
106
+
107
+
108
+ class SchemaWarning(BaseModel):
109
+ type: SchemaWarningType = Field(..., description="The type of warning")
110
+ kinds: list[SchemaWarningKind] = Field(default_factory=list, description="The kinds impacted by the warning")
111
+ message: str = Field(..., description="The message that describes the warning")
112
+
113
+
93
114
  class InfrahubSchemaBase:
94
115
  client: InfrahubClient | InfrahubClientSync
95
116
  cache: dict[str, BranchSchema]
@@ -169,7 +190,9 @@ class InfrahubSchemaBase:
169
190
  def _validate_load_schema_response(response: httpx.Response) -> SchemaLoadResponse:
170
191
  if response.status_code == httpx.codes.OK:
171
192
  status = response.json()
172
- return SchemaLoadResponse(hash=status["hash"], previous_hash=status["previous_hash"])
193
+ return SchemaLoadResponse(
194
+ hash=status["hash"], previous_hash=status["previous_hash"], warnings=status.get("warnings") or []
195
+ )
173
196
 
174
197
  if response.status_code in [
175
198
  httpx.codes.BAD_REQUEST,
@@ -185,12 +208,16 @@ class InfrahubSchemaBase:
185
208
 
186
209
  @staticmethod
187
210
  def _get_schema_name(schema: type[SchemaType | SchemaTypeSync] | str) -> str:
188
- if hasattr(schema, "_is_runtime_protocol") and schema._is_runtime_protocol: # type: ignore[union-attr]
189
- return schema.__name__ # type: ignore[union-attr]
190
-
191
211
  if isinstance(schema, str):
192
212
  return schema
193
213
 
214
+ if hasattr(schema, "_is_runtime_protocol") and getattr(schema, "_is_runtime_protocol", None):
215
+ if inspect.iscoroutinefunction(schema.save):
216
+ return schema.__name__
217
+ if schema.__name__[-4:] == "Sync":
218
+ return schema.__name__[:-4]
219
+ return schema.__name__
220
+
194
221
  raise ValueError("schema must be a protocol or a string")
195
222
 
196
223
  @staticmethod
@@ -802,6 +829,7 @@ class SchemaLoadResponse(BaseModel):
802
829
  hash: str = Field(default="", description="The new hash for the entire schema")
803
830
  previous_hash: str = Field(default="", description="The previous hash for the entire schema")
804
831
  errors: dict = Field(default_factory=dict, description="Errors reported by the server")
832
+ warnings: list[SchemaWarning] = Field(default_factory=list, description="Warnings reported by the server")
805
833
 
806
834
  @property
807
835
  def schema_updated(self) -> bool:
@@ -0,0 +1,7 @@
1
+ from __future__ import annotations
2
+
3
+ from pydantic import BaseModel
4
+
5
+
6
+ class InfrahubObjectParameters(BaseModel):
7
+ expand_range: bool = False
@@ -1,17 +1,15 @@
1
1
  from __future__ import annotations
2
2
 
3
- import copy
4
- import re
5
- from abc import ABC, abstractmethod
6
3
  from enum import Enum
7
- from typing import TYPE_CHECKING, Any, ClassVar
4
+ from typing import TYPE_CHECKING, Any
8
5
 
9
6
  from pydantic import BaseModel, Field
10
7
 
11
8
  from ..exceptions import ObjectValidationError, ValidationError
12
9
  from ..schema import GenericSchemaAPI, RelationshipKind, RelationshipSchema
13
10
  from ..yaml import InfrahubFile, InfrahubFileKind
14
- from .range_expansion import MATCH_PATTERN, range_expansion
11
+ from .models import InfrahubObjectParameters
12
+ from .processors.factory import DataProcessorFactory
15
13
 
16
14
  if TYPE_CHECKING:
17
15
  from ..client import InfrahubClient
@@ -46,11 +44,6 @@ class RelationshipDataFormat(str, Enum):
46
44
  MANY_REF = "many_ref_list"
47
45
 
48
46
 
49
- class ObjectStrategy(str, Enum):
50
- NORMAL = "normal"
51
- RANGE_EXPAND = "range_expand"
52
-
53
-
54
47
  class RelationshipInfo(BaseModel):
55
48
  name: str
56
49
  rel_schema: RelationshipSchema
@@ -173,97 +166,21 @@ async def get_relationship_info(
173
166
  return info
174
167
 
175
168
 
176
- def expand_data_with_ranges(data: list[dict[str, Any]]) -> list[dict[str, Any]]:
177
- """Expand any item in data with range pattern in any value. Supports multiple fields, requires equal expansion length."""
178
- range_pattern = re.compile(MATCH_PATTERN)
179
- expanded = []
180
- for item in data:
181
- # Find all fields to expand
182
- expand_fields = {}
183
- for key, value in item.items():
184
- if isinstance(value, str) and range_pattern.search(value):
185
- try:
186
- expand_fields[key] = range_expansion(value)
187
- except Exception:
188
- # If expansion fails, treat as no expansion
189
- expand_fields[key] = [value]
190
- if not expand_fields:
191
- expanded.append(item)
192
- continue
193
- # Check all expanded lists have the same length
194
- lengths = [len(v) for v in expand_fields.values()]
195
- if len(set(lengths)) > 1:
196
- raise ValidationError(f"Range expansion mismatch: fields expanded to different lengths: {lengths}")
197
- n = lengths[0]
198
- # Zip expanded values and produce new items
199
- for i in range(n):
200
- new_item = copy.deepcopy(item)
201
- for key, values in expand_fields.items():
202
- new_item[key] = values[i]
203
- expanded.append(new_item)
204
- return expanded
205
-
206
-
207
- class DataProcessor(ABC):
208
- """Abstract base class for data processing strategies"""
209
-
210
- @abstractmethod
211
- def process_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
212
- """Process the data according to the strategy"""
213
-
214
-
215
- class SingleDataProcessor(DataProcessor):
216
- """Process data without any expansion"""
217
-
218
- def process_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
219
- return data
220
-
221
-
222
- class RangeExpandDataProcessor(DataProcessor):
223
- """Process data with range expansion"""
224
-
225
- def process_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
226
- return expand_data_with_ranges(data)
227
-
228
-
229
- class DataProcessorFactory:
230
- """Factory to create appropriate data processor based on strategy"""
231
-
232
- _processors: ClassVar[dict[ObjectStrategy, type[DataProcessor]]] = {
233
- ObjectStrategy.NORMAL: SingleDataProcessor,
234
- ObjectStrategy.RANGE_EXPAND: RangeExpandDataProcessor,
235
- }
236
-
237
- @classmethod
238
- def get_processor(cls, strategy: ObjectStrategy) -> DataProcessor:
239
- processor_class = cls._processors.get(strategy)
240
- if not processor_class:
241
- raise ValueError(
242
- f"Unknown strategy: {strategy} - no processor found. Valid strategies are: {list(cls._processors.keys())}"
243
- )
244
- return processor_class()
245
-
246
- @classmethod
247
- def register_processor(cls, strategy: ObjectStrategy, processor_class: type[DataProcessor]) -> None:
248
- """Register a new processor for a strategy - useful for future extensions"""
249
- cls._processors[strategy] = processor_class
250
-
251
-
252
169
  class InfrahubObjectFileData(BaseModel):
253
170
  kind: str
254
- strategy: ObjectStrategy = ObjectStrategy.NORMAL
171
+ parameters: InfrahubObjectParameters = Field(default_factory=InfrahubObjectParameters)
255
172
  data: list[dict[str, Any]] = Field(default_factory=list)
256
173
 
257
- def _get_processed_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
174
+ async def _get_processed_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
258
175
  """Get data processed according to the strategy"""
259
- processor = DataProcessorFactory.get_processor(self.strategy)
260
- return processor.process_data(data)
176
+
177
+ return await DataProcessorFactory.process_data(kind=self.kind, parameters=self.parameters, data=data)
261
178
 
262
179
  async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> list[ObjectValidationError]:
263
180
  errors: list[ObjectValidationError] = []
264
181
  schema = await client.schema.get(kind=self.kind, branch=branch)
265
182
 
266
- processed_data = self._get_processed_data(data=self.data)
183
+ processed_data = await self._get_processed_data(data=self.data)
267
184
  self.data = processed_data
268
185
 
269
186
  for idx, item in enumerate(processed_data):
@@ -275,14 +192,14 @@ class InfrahubObjectFileData(BaseModel):
275
192
  data=item,
276
193
  branch=branch,
277
194
  default_schema_kind=self.kind,
278
- strategy=self.strategy, # Pass strategy down
195
+ parameters=self.parameters,
279
196
  )
280
197
  )
281
198
  return errors
282
199
 
283
200
  async def process(self, client: InfrahubClient, branch: str | None = None) -> None:
284
201
  schema = await client.schema.get(kind=self.kind, branch=branch)
285
- processed_data = self._get_processed_data(data=self.data)
202
+ processed_data = await self._get_processed_data(data=self.data)
286
203
 
287
204
  for idx, item in enumerate(processed_data):
288
205
  await self.create_node(
@@ -304,8 +221,9 @@ class InfrahubObjectFileData(BaseModel):
304
221
  context: dict | None = None,
305
222
  branch: str | None = None,
306
223
  default_schema_kind: str | None = None,
307
- strategy: ObjectStrategy = ObjectStrategy.NORMAL,
224
+ parameters: InfrahubObjectParameters | None = None,
308
225
  ) -> list[ObjectValidationError]:
226
+ parameters = parameters or InfrahubObjectParameters()
309
227
  errors: list[ObjectValidationError] = []
310
228
  context = context.copy() if context else {}
311
229
 
@@ -354,7 +272,7 @@ class InfrahubObjectFileData(BaseModel):
354
272
  context=context,
355
273
  branch=branch,
356
274
  default_schema_kind=default_schema_kind,
357
- strategy=strategy,
275
+ parameters=parameters,
358
276
  )
359
277
  )
360
278
 
@@ -370,8 +288,9 @@ class InfrahubObjectFileData(BaseModel):
370
288
  context: dict | None = None,
371
289
  branch: str | None = None,
372
290
  default_schema_kind: str | None = None,
373
- strategy: ObjectStrategy = ObjectStrategy.NORMAL,
291
+ parameters: InfrahubObjectParameters | None = None,
374
292
  ) -> list[ObjectValidationError]:
293
+ parameters = parameters or InfrahubObjectParameters()
375
294
  context = context.copy() if context else {}
376
295
  errors: list[ObjectValidationError] = []
377
296
 
@@ -399,6 +318,7 @@ class InfrahubObjectFileData(BaseModel):
399
318
  context=context,
400
319
  branch=branch,
401
320
  default_schema_kind=default_schema_kind,
321
+ parameters=parameters,
402
322
  )
403
323
  )
404
324
  return errors
@@ -412,11 +332,11 @@ class InfrahubObjectFileData(BaseModel):
412
332
  rel_info.find_matching_relationship(peer_schema=peer_schema)
413
333
  context.update(rel_info.get_context(value="placeholder"))
414
334
 
415
- # Use strategy-aware data processing
416
- processor = DataProcessorFactory.get_processor(strategy)
417
- expanded_data = processor.process_data(data["data"])
335
+ processed_data = await DataProcessorFactory.process_data(
336
+ kind=peer_kind, data=data["data"], parameters=parameters
337
+ )
418
338
 
419
- for idx, peer_data in enumerate(expanded_data):
339
+ for idx, peer_data in enumerate(processed_data):
420
340
  context["list_index"] = idx
421
341
  errors.extend(
422
342
  await cls.validate_object(
@@ -427,7 +347,7 @@ class InfrahubObjectFileData(BaseModel):
427
347
  context=context,
428
348
  branch=branch,
429
349
  default_schema_kind=default_schema_kind,
430
- strategy=strategy,
350
+ parameters=parameters,
431
351
  )
432
352
  )
433
353
  return errors
@@ -452,6 +372,7 @@ class InfrahubObjectFileData(BaseModel):
452
372
  context=context,
453
373
  branch=branch,
454
374
  default_schema_kind=default_schema_kind,
375
+ parameters=parameters,
455
376
  )
456
377
  )
457
378
  return errors
@@ -478,7 +399,9 @@ class InfrahubObjectFileData(BaseModel):
478
399
  context: dict | None = None,
479
400
  branch: str | None = None,
480
401
  default_schema_kind: str | None = None,
402
+ parameters: InfrahubObjectParameters | None = None,
481
403
  ) -> InfrahubNode:
404
+ parameters = parameters or InfrahubObjectParameters()
482
405
  context = context.copy() if context else {}
483
406
 
484
407
  errors = await cls.validate_object(
@@ -489,6 +412,7 @@ class InfrahubObjectFileData(BaseModel):
489
412
  context=context,
490
413
  branch=branch,
491
414
  default_schema_kind=default_schema_kind,
415
+ parameters=parameters,
492
416
  )
493
417
  if errors:
494
418
  messages = [str(error) for error in errors]
@@ -534,6 +458,7 @@ class InfrahubObjectFileData(BaseModel):
534
458
  data=value,
535
459
  branch=branch,
536
460
  default_schema_kind=default_schema_kind,
461
+ parameters=parameters,
537
462
  )
538
463
  clean_data[key] = nodes[0]
539
464
 
@@ -545,6 +470,7 @@ class InfrahubObjectFileData(BaseModel):
545
470
  data=value,
546
471
  branch=branch,
547
472
  default_schema_kind=default_schema_kind,
473
+ parameters=parameters,
548
474
  )
549
475
  clean_data[key] = nodes
550
476
 
@@ -583,6 +509,7 @@ class InfrahubObjectFileData(BaseModel):
583
509
  context=context,
584
510
  branch=branch,
585
511
  default_schema_kind=default_schema_kind,
512
+ parameters=parameters,
586
513
  )
587
514
 
588
515
  return node
@@ -598,7 +525,9 @@ class InfrahubObjectFileData(BaseModel):
598
525
  context: dict | None = None,
599
526
  branch: str | None = None,
600
527
  default_schema_kind: str | None = None,
528
+ parameters: InfrahubObjectParameters | None = None,
601
529
  ) -> list[InfrahubNode]:
530
+ parameters = parameters or InfrahubObjectParameters()
602
531
  nodes: list[InfrahubNode] = []
603
532
  context = context.copy() if context else {}
604
533
 
@@ -618,6 +547,7 @@ class InfrahubObjectFileData(BaseModel):
618
547
  context=context,
619
548
  branch=branch,
620
549
  default_schema_kind=default_schema_kind,
550
+ parameters=parameters,
621
551
  )
622
552
  return [new_node]
623
553
 
@@ -631,7 +561,10 @@ class InfrahubObjectFileData(BaseModel):
631
561
  rel_info.find_matching_relationship(peer_schema=peer_schema)
632
562
  context.update(rel_info.get_context(value=parent_node.id))
633
563
 
634
- expanded_data = expand_data_with_ranges(data=data["data"])
564
+ expanded_data = await DataProcessorFactory.process_data(
565
+ kind=peer_kind, data=data["data"], parameters=parameters
566
+ )
567
+
635
568
  for idx, peer_data in enumerate(expanded_data):
636
569
  context["list_index"] = idx
637
570
  if isinstance(peer_data, dict):
@@ -643,6 +576,7 @@ class InfrahubObjectFileData(BaseModel):
643
576
  context=context,
644
577
  branch=branch,
645
578
  default_schema_kind=default_schema_kind,
579
+ parameters=parameters,
646
580
  )
647
581
  nodes.append(node)
648
582
  return nodes
@@ -668,6 +602,7 @@ class InfrahubObjectFileData(BaseModel):
668
602
  context=context,
669
603
  branch=branch,
670
604
  default_schema_kind=default_schema_kind,
605
+ parameters=parameters,
671
606
  )
672
607
  nodes.append(node)
673
608
 
File without changes
@@ -0,0 +1,10 @@
1
+ from abc import ABC, abstractmethod
2
+ from typing import Any
3
+
4
+
5
+ class DataProcessor(ABC):
6
+ """Abstract base class for data processing strategies"""
7
+
8
+ @abstractmethod
9
+ async def process_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
10
+ """Process the data according to the strategy"""
@@ -0,0 +1,34 @@
1
+ from collections.abc import Sequence
2
+ from typing import Any
3
+
4
+ from ..models import InfrahubObjectParameters
5
+ from .data_processor import DataProcessor
6
+ from .range_expand_processor import RangeExpandDataProcessor
7
+
8
+ PROCESSOR_PER_KIND: dict[str, DataProcessor] = {}
9
+
10
+
11
+ class DataProcessorFactory:
12
+ """Factory to create appropriate data processor based on strategy"""
13
+
14
+ @classmethod
15
+ def get_processors(cls, kind: str, parameters: InfrahubObjectParameters) -> Sequence[DataProcessor]:
16
+ processors: list[DataProcessor] = []
17
+ if parameters.expand_range:
18
+ processors.append(RangeExpandDataProcessor())
19
+ if kind in PROCESSOR_PER_KIND:
20
+ processors.append(PROCESSOR_PER_KIND[kind])
21
+
22
+ return processors
23
+
24
+ @classmethod
25
+ async def process_data(
26
+ cls,
27
+ kind: str,
28
+ data: list[dict[str, Any]],
29
+ parameters: InfrahubObjectParameters,
30
+ ) -> list[dict[str, Any]]:
31
+ processors = cls.get_processors(kind=kind, parameters=parameters)
32
+ for processor in processors:
33
+ data = await processor.process_data(data=data)
34
+ return data
@@ -0,0 +1,56 @@
1
+ from __future__ import annotations
2
+
3
+ import copy
4
+ import logging
5
+ import re
6
+ from typing import Any
7
+
8
+ from ...exceptions import ValidationError
9
+ from ..range_expansion import MATCH_PATTERN, range_expansion
10
+ from .data_processor import DataProcessor
11
+
12
+ log = logging.getLogger("infrahub_sdk")
13
+
14
+
15
+ class RangeExpandDataProcessor(DataProcessor):
16
+ """Process data with range expansion"""
17
+
18
+ @classmethod
19
+ async def process_data(
20
+ cls,
21
+ data: list[dict[str, Any]],
22
+ ) -> list[dict[str, Any]]:
23
+ """Expand any item in data with range pattern in any value. Supports multiple fields, requires equal expansion length."""
24
+ range_pattern = re.compile(MATCH_PATTERN)
25
+ expanded = []
26
+ for item in data:
27
+ # Find all fields to expand
28
+ expand_fields = {}
29
+ for key, value in item.items():
30
+ if isinstance(value, str) and range_pattern.search(value):
31
+ try:
32
+ expand_fields[key] = range_expansion(value)
33
+ except (ValueError, TypeError, KeyError):
34
+ # If expansion fails, treat as no expansion
35
+ log.debug(
36
+ f"Range expansion failed for value '{value}' in key '{key}'. Treating as no expansion."
37
+ )
38
+ expand_fields[key] = [value]
39
+ if not expand_fields:
40
+ expanded.append(item)
41
+ continue
42
+ # Check all expanded lists have the same length
43
+ lengths = [len(v) for v in expand_fields.values()]
44
+ if len(set(lengths)) > 1:
45
+ raise ValidationError(
46
+ identifier="range_expansion",
47
+ message=f"Range expansion mismatch: fields expanded to different lengths: {lengths}",
48
+ )
49
+ n = lengths[0]
50
+ # Zip expanded values and produce new items
51
+ for i in range(n):
52
+ new_item = copy.deepcopy(item)
53
+ for key, values in expand_fields.items():
54
+ new_item[key] = values[i]
55
+ expanded.append(new_item)
56
+ return expanded
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: infrahub-server
3
- Version: 1.5.0b1
3
+ Version: 1.5.0b2
4
4
  Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
5
5
  License: Apache-2.0
6
6
  Author: OpsMill
@@ -18,6 +18,8 @@ Requires-Dist: asgi-correlation-id (==4.2.0)
18
18
  Requires-Dist: authlib (==1.6.5)
19
19
  Requires-Dist: bcrypt (>=4.1,<4.2)
20
20
  Requires-Dist: boto3 (==1.34.129)
21
+ Requires-Dist: cachetools-async (>=0.0.5,<0.0.6)
22
+ Requires-Dist: click (==8.1.7)
21
23
  Requires-Dist: copier (>=9.8.0,<10.0.0)
22
24
  Requires-Dist: dulwich (>=0.22.7,<0.23.0)
23
25
  Requires-Dist: email-validator (>=2.1,<2.2)