infrahub-server 1.4.13__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +27 -3
- infrahub/auth.py +5 -5
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +160 -157
- infrahub/cli/dev.py +118 -0
- infrahub/cli/upgrade.py +56 -9
- infrahub/computed_attribute/tasks.py +19 -7
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +35 -24
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +9 -5
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +72 -10
- infrahub/core/changelog/models.py +2 -10
- infrahub/core/constants/__init__.py +4 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +7 -4
- infrahub/core/manager.py +3 -81
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +11 -10
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +26 -5
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +66 -19
- infrahub/core/models.py +2 -2
- infrahub/core/node/__init__.py +207 -54
- infrahub/core/node/create.py +53 -49
- infrahub/core/node/lock_utils.py +124 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +82 -15
- infrahub/core/query/ipam.py +16 -4
- infrahub/core/query/node.py +66 -188
- infrahub/core/query/relationship.py +44 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/relationship/model.py +69 -24
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/check.py +1 -1
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/core/transform.py +1 -1
- infrahub/core/schema/definitions/internal.py +12 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +3 -0
- infrahub/core/schema/node_schema.py +1 -0
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +295 -10
- infrahub/core/schema/schema_branch_display.py +135 -0
- infrahub/core/schema/schema_branch_hfid.py +120 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +38 -1
- infrahub/git/integrator.py +22 -14
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +16 -6
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +213 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +16 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +44 -13
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +73 -41
- infrahub/graphql/mutations/main.py +61 -178
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +8 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +119 -42
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/patch/plan_writer.py +2 -2
- infrahub/permissions/constants.py +2 -0
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +98 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +24 -5
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +9 -1
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/workflow/worker.py +5 -2
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workers/dependencies.py +3 -1
- infrahub/workers/infrahub_async.py +5 -1
- infrahub/workflows/catalogue.py +118 -3
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/models.py +17 -2
- infrahub_sdk/branch.py +17 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +376 -95
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/branch.py +3 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +20 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +40 -10
- infrahub_sdk/ctl/task.py +110 -0
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/node/relationship.py +1 -3
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +54 -6
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/task/models.py +6 -4
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
- {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +221 -165
- infrahub_testcontainers/container.py +114 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -0
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.13.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
infrahub_sdk/spec/object.py
CHANGED
|
@@ -8,6 +8,8 @@ from pydantic import BaseModel, Field
|
|
|
8
8
|
from ..exceptions import ObjectValidationError, ValidationError
|
|
9
9
|
from ..schema import GenericSchemaAPI, RelationshipKind, RelationshipSchema
|
|
10
10
|
from ..yaml import InfrahubFile, InfrahubFileKind
|
|
11
|
+
from .models import InfrahubObjectParameters
|
|
12
|
+
from .processors.factory import DataProcessorFactory
|
|
11
13
|
|
|
12
14
|
if TYPE_CHECKING:
|
|
13
15
|
from ..client import InfrahubClient
|
|
@@ -166,12 +168,22 @@ async def get_relationship_info(
|
|
|
166
168
|
|
|
167
169
|
class InfrahubObjectFileData(BaseModel):
|
|
168
170
|
kind: str
|
|
171
|
+
parameters: InfrahubObjectParameters = Field(default_factory=InfrahubObjectParameters)
|
|
169
172
|
data: list[dict[str, Any]] = Field(default_factory=list)
|
|
170
173
|
|
|
174
|
+
async def _get_processed_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
175
|
+
"""Get data processed according to the strategy"""
|
|
176
|
+
|
|
177
|
+
return await DataProcessorFactory.process_data(kind=self.kind, parameters=self.parameters, data=data)
|
|
178
|
+
|
|
171
179
|
async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> list[ObjectValidationError]:
|
|
172
180
|
errors: list[ObjectValidationError] = []
|
|
173
181
|
schema = await client.schema.get(kind=self.kind, branch=branch)
|
|
174
|
-
|
|
182
|
+
|
|
183
|
+
processed_data = await self._get_processed_data(data=self.data)
|
|
184
|
+
self.data = processed_data
|
|
185
|
+
|
|
186
|
+
for idx, item in enumerate(processed_data):
|
|
175
187
|
errors.extend(
|
|
176
188
|
await self.validate_object(
|
|
177
189
|
client=client,
|
|
@@ -180,13 +192,16 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
180
192
|
data=item,
|
|
181
193
|
branch=branch,
|
|
182
194
|
default_schema_kind=self.kind,
|
|
195
|
+
parameters=self.parameters,
|
|
183
196
|
)
|
|
184
197
|
)
|
|
185
198
|
return errors
|
|
186
199
|
|
|
187
200
|
async def process(self, client: InfrahubClient, branch: str | None = None) -> None:
|
|
188
201
|
schema = await client.schema.get(kind=self.kind, branch=branch)
|
|
189
|
-
|
|
202
|
+
processed_data = await self._get_processed_data(data=self.data)
|
|
203
|
+
|
|
204
|
+
for idx, item in enumerate(processed_data):
|
|
190
205
|
await self.create_node(
|
|
191
206
|
client=client,
|
|
192
207
|
schema=schema,
|
|
@@ -206,7 +221,9 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
206
221
|
context: dict | None = None,
|
|
207
222
|
branch: str | None = None,
|
|
208
223
|
default_schema_kind: str | None = None,
|
|
224
|
+
parameters: InfrahubObjectParameters | None = None,
|
|
209
225
|
) -> list[ObjectValidationError]:
|
|
226
|
+
parameters = parameters or InfrahubObjectParameters()
|
|
210
227
|
errors: list[ObjectValidationError] = []
|
|
211
228
|
context = context.copy() if context else {}
|
|
212
229
|
|
|
@@ -255,6 +272,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
255
272
|
context=context,
|
|
256
273
|
branch=branch,
|
|
257
274
|
default_schema_kind=default_schema_kind,
|
|
275
|
+
parameters=parameters,
|
|
258
276
|
)
|
|
259
277
|
)
|
|
260
278
|
|
|
@@ -270,7 +288,9 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
270
288
|
context: dict | None = None,
|
|
271
289
|
branch: str | None = None,
|
|
272
290
|
default_schema_kind: str | None = None,
|
|
291
|
+
parameters: InfrahubObjectParameters | None = None,
|
|
273
292
|
) -> list[ObjectValidationError]:
|
|
293
|
+
parameters = parameters or InfrahubObjectParameters()
|
|
274
294
|
context = context.copy() if context else {}
|
|
275
295
|
errors: list[ObjectValidationError] = []
|
|
276
296
|
|
|
@@ -298,6 +318,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
298
318
|
context=context,
|
|
299
319
|
branch=branch,
|
|
300
320
|
default_schema_kind=default_schema_kind,
|
|
321
|
+
parameters=parameters,
|
|
301
322
|
)
|
|
302
323
|
)
|
|
303
324
|
return errors
|
|
@@ -311,7 +332,11 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
311
332
|
rel_info.find_matching_relationship(peer_schema=peer_schema)
|
|
312
333
|
context.update(rel_info.get_context(value="placeholder"))
|
|
313
334
|
|
|
314
|
-
|
|
335
|
+
processed_data = await DataProcessorFactory.process_data(
|
|
336
|
+
kind=peer_kind, data=data["data"], parameters=parameters
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
for idx, peer_data in enumerate(processed_data):
|
|
315
340
|
context["list_index"] = idx
|
|
316
341
|
errors.extend(
|
|
317
342
|
await cls.validate_object(
|
|
@@ -322,6 +347,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
322
347
|
context=context,
|
|
323
348
|
branch=branch,
|
|
324
349
|
default_schema_kind=default_schema_kind,
|
|
350
|
+
parameters=parameters,
|
|
325
351
|
)
|
|
326
352
|
)
|
|
327
353
|
return errors
|
|
@@ -346,6 +372,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
346
372
|
context=context,
|
|
347
373
|
branch=branch,
|
|
348
374
|
default_schema_kind=default_schema_kind,
|
|
375
|
+
parameters=parameters,
|
|
349
376
|
)
|
|
350
377
|
)
|
|
351
378
|
return errors
|
|
@@ -372,7 +399,9 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
372
399
|
context: dict | None = None,
|
|
373
400
|
branch: str | None = None,
|
|
374
401
|
default_schema_kind: str | None = None,
|
|
402
|
+
parameters: InfrahubObjectParameters | None = None,
|
|
375
403
|
) -> InfrahubNode:
|
|
404
|
+
parameters = parameters or InfrahubObjectParameters()
|
|
376
405
|
context = context.copy() if context else {}
|
|
377
406
|
|
|
378
407
|
errors = await cls.validate_object(
|
|
@@ -383,6 +412,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
383
412
|
context=context,
|
|
384
413
|
branch=branch,
|
|
385
414
|
default_schema_kind=default_schema_kind,
|
|
415
|
+
parameters=parameters,
|
|
386
416
|
)
|
|
387
417
|
if errors:
|
|
388
418
|
messages = [str(error) for error in errors]
|
|
@@ -428,6 +458,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
428
458
|
data=value,
|
|
429
459
|
branch=branch,
|
|
430
460
|
default_schema_kind=default_schema_kind,
|
|
461
|
+
parameters=parameters,
|
|
431
462
|
)
|
|
432
463
|
clean_data[key] = nodes[0]
|
|
433
464
|
|
|
@@ -439,6 +470,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
439
470
|
data=value,
|
|
440
471
|
branch=branch,
|
|
441
472
|
default_schema_kind=default_schema_kind,
|
|
473
|
+
parameters=parameters,
|
|
442
474
|
)
|
|
443
475
|
clean_data[key] = nodes
|
|
444
476
|
|
|
@@ -477,6 +509,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
477
509
|
context=context,
|
|
478
510
|
branch=branch,
|
|
479
511
|
default_schema_kind=default_schema_kind,
|
|
512
|
+
parameters=parameters,
|
|
480
513
|
)
|
|
481
514
|
|
|
482
515
|
return node
|
|
@@ -492,7 +525,9 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
492
525
|
context: dict | None = None,
|
|
493
526
|
branch: str | None = None,
|
|
494
527
|
default_schema_kind: str | None = None,
|
|
528
|
+
parameters: InfrahubObjectParameters | None = None,
|
|
495
529
|
) -> list[InfrahubNode]:
|
|
530
|
+
parameters = parameters or InfrahubObjectParameters()
|
|
496
531
|
nodes: list[InfrahubNode] = []
|
|
497
532
|
context = context.copy() if context else {}
|
|
498
533
|
|
|
@@ -512,6 +547,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
512
547
|
context=context,
|
|
513
548
|
branch=branch,
|
|
514
549
|
default_schema_kind=default_schema_kind,
|
|
550
|
+
parameters=parameters,
|
|
515
551
|
)
|
|
516
552
|
return [new_node]
|
|
517
553
|
|
|
@@ -525,7 +561,11 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
525
561
|
rel_info.find_matching_relationship(peer_schema=peer_schema)
|
|
526
562
|
context.update(rel_info.get_context(value=parent_node.id))
|
|
527
563
|
|
|
528
|
-
|
|
564
|
+
expanded_data = await DataProcessorFactory.process_data(
|
|
565
|
+
kind=peer_kind, data=data["data"], parameters=parameters
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
for idx, peer_data in enumerate(expanded_data):
|
|
529
569
|
context["list_index"] = idx
|
|
530
570
|
if isinstance(peer_data, dict):
|
|
531
571
|
node = await cls.create_node(
|
|
@@ -536,6 +576,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
536
576
|
context=context,
|
|
537
577
|
branch=branch,
|
|
538
578
|
default_schema_kind=default_schema_kind,
|
|
579
|
+
parameters=parameters,
|
|
539
580
|
)
|
|
540
581
|
nodes.append(node)
|
|
541
582
|
return nodes
|
|
@@ -561,6 +602,7 @@ class InfrahubObjectFileData(BaseModel):
|
|
|
561
602
|
context=context,
|
|
562
603
|
branch=branch,
|
|
563
604
|
default_schema_kind=default_schema_kind,
|
|
605
|
+
parameters=parameters,
|
|
564
606
|
)
|
|
565
607
|
nodes.append(node)
|
|
566
608
|
|
|
@@ -594,14 +636,20 @@ class ObjectFile(InfrahubFile):
|
|
|
594
636
|
@property
|
|
595
637
|
def spec(self) -> InfrahubObjectFileData:
|
|
596
638
|
if not self._spec:
|
|
597
|
-
|
|
639
|
+
try:
|
|
640
|
+
self._spec = InfrahubObjectFileData(**self.data.spec)
|
|
641
|
+
except Exception as exc:
|
|
642
|
+
raise ValidationError(identifier=str(self.location), message=str(exc))
|
|
598
643
|
return self._spec
|
|
599
644
|
|
|
600
645
|
def validate_content(self) -> None:
|
|
601
646
|
super().validate_content()
|
|
602
647
|
if self.kind != InfrahubFileKind.OBJECT:
|
|
603
648
|
raise ValueError("File is not an Infrahub Object file")
|
|
604
|
-
|
|
649
|
+
try:
|
|
650
|
+
self._spec = InfrahubObjectFileData(**self.data.spec)
|
|
651
|
+
except Exception as exc:
|
|
652
|
+
raise ValidationError(identifier=str(self.location), message=str(exc))
|
|
605
653
|
|
|
606
654
|
async def validate_format(self, client: InfrahubClient, branch: str | None = None) -> None:
|
|
607
655
|
self.validate_content()
|
|
File without changes
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class DataProcessor(ABC):
|
|
6
|
+
"""Abstract base class for data processing strategies"""
|
|
7
|
+
|
|
8
|
+
@abstractmethod
|
|
9
|
+
async def process_data(self, data: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
10
|
+
"""Process the data according to the strategy"""
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from collections.abc import Sequence
|
|
2
|
+
from typing import Any
|
|
3
|
+
|
|
4
|
+
from ..models import InfrahubObjectParameters
|
|
5
|
+
from .data_processor import DataProcessor
|
|
6
|
+
from .range_expand_processor import RangeExpandDataProcessor
|
|
7
|
+
|
|
8
|
+
PROCESSOR_PER_KIND: dict[str, DataProcessor] = {}
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class DataProcessorFactory:
|
|
12
|
+
"""Factory to create appropriate data processor based on strategy"""
|
|
13
|
+
|
|
14
|
+
@classmethod
|
|
15
|
+
def get_processors(cls, kind: str, parameters: InfrahubObjectParameters) -> Sequence[DataProcessor]:
|
|
16
|
+
processors: list[DataProcessor] = []
|
|
17
|
+
if parameters.expand_range:
|
|
18
|
+
processors.append(RangeExpandDataProcessor())
|
|
19
|
+
if kind in PROCESSOR_PER_KIND:
|
|
20
|
+
processors.append(PROCESSOR_PER_KIND[kind])
|
|
21
|
+
|
|
22
|
+
return processors
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
async def process_data(
|
|
26
|
+
cls,
|
|
27
|
+
kind: str,
|
|
28
|
+
data: list[dict[str, Any]],
|
|
29
|
+
parameters: InfrahubObjectParameters,
|
|
30
|
+
) -> list[dict[str, Any]]:
|
|
31
|
+
processors = cls.get_processors(kind=kind, parameters=parameters)
|
|
32
|
+
for processor in processors:
|
|
33
|
+
data = await processor.process_data(data=data)
|
|
34
|
+
return data
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import copy
|
|
4
|
+
import logging
|
|
5
|
+
import re
|
|
6
|
+
from typing import Any
|
|
7
|
+
|
|
8
|
+
from ...exceptions import ValidationError
|
|
9
|
+
from ..range_expansion import MATCH_PATTERN, range_expansion
|
|
10
|
+
from .data_processor import DataProcessor
|
|
11
|
+
|
|
12
|
+
log = logging.getLogger("infrahub_sdk")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class RangeExpandDataProcessor(DataProcessor):
|
|
16
|
+
"""Process data with range expansion"""
|
|
17
|
+
|
|
18
|
+
@classmethod
|
|
19
|
+
async def process_data(
|
|
20
|
+
cls,
|
|
21
|
+
data: list[dict[str, Any]],
|
|
22
|
+
) -> list[dict[str, Any]]:
|
|
23
|
+
"""Expand any item in data with range pattern in any value. Supports multiple fields, requires equal expansion length."""
|
|
24
|
+
range_pattern = re.compile(MATCH_PATTERN)
|
|
25
|
+
expanded = []
|
|
26
|
+
for item in data:
|
|
27
|
+
# Find all fields to expand
|
|
28
|
+
expand_fields = {}
|
|
29
|
+
for key, value in item.items():
|
|
30
|
+
if isinstance(value, str) and range_pattern.search(value):
|
|
31
|
+
try:
|
|
32
|
+
expand_fields[key] = range_expansion(value)
|
|
33
|
+
except (ValueError, TypeError, KeyError):
|
|
34
|
+
# If expansion fails, treat as no expansion
|
|
35
|
+
log.debug(
|
|
36
|
+
f"Range expansion failed for value '{value}' in key '{key}'. Treating as no expansion."
|
|
37
|
+
)
|
|
38
|
+
expand_fields[key] = [value]
|
|
39
|
+
if not expand_fields:
|
|
40
|
+
expanded.append(item)
|
|
41
|
+
continue
|
|
42
|
+
# Check all expanded lists have the same length
|
|
43
|
+
lengths = [len(v) for v in expand_fields.values()]
|
|
44
|
+
if len(set(lengths)) > 1:
|
|
45
|
+
raise ValidationError(
|
|
46
|
+
identifier="range_expansion",
|
|
47
|
+
message=f"Range expansion mismatch: fields expanded to different lengths: {lengths}",
|
|
48
|
+
)
|
|
49
|
+
n = lengths[0]
|
|
50
|
+
# Zip expanded values and produce new items
|
|
51
|
+
for i in range(n):
|
|
52
|
+
new_item = copy.deepcopy(item)
|
|
53
|
+
for key, values in expand_fields.items():
|
|
54
|
+
new_item[key] = values[i]
|
|
55
|
+
expanded.append(new_item)
|
|
56
|
+
return expanded
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import itertools
|
|
2
|
+
import re
|
|
3
|
+
|
|
4
|
+
MATCH_PATTERN = r"(\[[\w,-]*[-,][\w,-]*\])"
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def _escape_brackets(s: str) -> str:
|
|
8
|
+
return s.replace("\\[", "__LBRACK__").replace("\\]", "__RBRACK__")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def _unescape_brackets(s: str) -> str:
|
|
12
|
+
return s.replace("__LBRACK__", "[").replace("__RBRACK__", "]")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _char_range_expand(char_range_str: str) -> list[str]:
|
|
16
|
+
"""Expands a string of numbers or single-character letters."""
|
|
17
|
+
expanded_values: list[str] = []
|
|
18
|
+
# Special case: if no dash and no comma, and multiple characters, error if not all alphanumeric
|
|
19
|
+
if "," not in char_range_str and "-" not in char_range_str and len(char_range_str) > 1:
|
|
20
|
+
if not char_range_str.isalnum():
|
|
21
|
+
raise ValueError(f"Invalid non-alphanumeric range: [{char_range_str}]")
|
|
22
|
+
return list(char_range_str)
|
|
23
|
+
|
|
24
|
+
for value in char_range_str.split(","):
|
|
25
|
+
if not value:
|
|
26
|
+
# Malformed: empty part in comma-separated list
|
|
27
|
+
return [f"[{char_range_str}]"]
|
|
28
|
+
if "-" in value:
|
|
29
|
+
start_char, end_char = value.split("-", 1)
|
|
30
|
+
if not start_char or not end_char:
|
|
31
|
+
expanded_values.append(f"[{char_range_str}]")
|
|
32
|
+
return expanded_values
|
|
33
|
+
# Check if it's a numeric range
|
|
34
|
+
if start_char.isdigit() and end_char.isdigit():
|
|
35
|
+
start_num = int(start_char)
|
|
36
|
+
end_num = int(end_char)
|
|
37
|
+
step = 1 if start_num <= end_num else -1
|
|
38
|
+
expanded_values.extend(str(i) for i in range(start_num, end_num + step, step))
|
|
39
|
+
# Check if it's an alphabetical range (single character)
|
|
40
|
+
elif len(start_char) == 1 and len(end_char) == 1 and start_char.isalpha() and end_char.isalpha():
|
|
41
|
+
start_ord = ord(start_char)
|
|
42
|
+
end_ord = ord(end_char)
|
|
43
|
+
step = 1 if start_ord <= end_ord else -1
|
|
44
|
+
is_upper = start_char.isupper()
|
|
45
|
+
for i in range(start_ord, end_ord + step, step):
|
|
46
|
+
char = chr(i)
|
|
47
|
+
expanded_values.append(char.upper() if is_upper else char)
|
|
48
|
+
else:
|
|
49
|
+
# Mixed or unsupported range type, append as-is
|
|
50
|
+
expanded_values.append(value)
|
|
51
|
+
else:
|
|
52
|
+
# If the value is a single character or valid alphanumeric string, append
|
|
53
|
+
if not value.isalnum():
|
|
54
|
+
raise ValueError(f"Invalid non-alphanumeric value: [{value}]")
|
|
55
|
+
expanded_values.append(value)
|
|
56
|
+
return expanded_values
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _extract_constants(pattern: str, re_compiled: re.Pattern) -> tuple[list[int], list[list[str]]]:
|
|
60
|
+
cartesian_list = []
|
|
61
|
+
interface_constant = [0]
|
|
62
|
+
for match in re_compiled.finditer(pattern):
|
|
63
|
+
interface_constant.append(match.start())
|
|
64
|
+
interface_constant.append(match.end())
|
|
65
|
+
cartesian_list.append(_char_range_expand(match.group()[1:-1]))
|
|
66
|
+
return interface_constant, cartesian_list
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _expand_interfaces(pattern: str, interface_constant: list[int], cartesian_list: list[list[str]]) -> list[str]:
|
|
70
|
+
def _pairwise(lst: list[int]) -> list[tuple[int, int]]:
|
|
71
|
+
it = iter(lst)
|
|
72
|
+
return list(zip(it, it))
|
|
73
|
+
|
|
74
|
+
if interface_constant[-1] < len(pattern):
|
|
75
|
+
interface_constant.append(len(pattern))
|
|
76
|
+
interface_constant_out = _pairwise(interface_constant)
|
|
77
|
+
expanded_interfaces = []
|
|
78
|
+
for element in itertools.product(*cartesian_list):
|
|
79
|
+
current_interface = ""
|
|
80
|
+
for count, item in enumerate(interface_constant_out):
|
|
81
|
+
current_interface += pattern[item[0] : item[1]]
|
|
82
|
+
if count < len(element):
|
|
83
|
+
current_interface += element[count]
|
|
84
|
+
expanded_interfaces.append(_unescape_brackets(current_interface))
|
|
85
|
+
return expanded_interfaces
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def range_expansion(interface_pattern: str) -> list[str]:
|
|
89
|
+
"""Expand string pattern into a list of strings, supporting both
|
|
90
|
+
number and single-character alphabet ranges. Heavily inspired by
|
|
91
|
+
Netutils interface_range_expansion but adapted to support letters.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
interface_pattern: The string pattern that will be parsed to create the list of interfaces.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Contains the expanded list of interfaces.
|
|
98
|
+
|
|
99
|
+
Examples:
|
|
100
|
+
>>> from infrahub_sdk.spec.range_expansion import range_expansion
|
|
101
|
+
>>> range_expansion("Device [A-C]")
|
|
102
|
+
['Device A', 'Device B', 'Device C']
|
|
103
|
+
>>> range_expansion("FastEthernet[1-2]/0/[10-15]")
|
|
104
|
+
['FastEthernet1/0/10', 'FastEthernet1/0/11', 'FastEthernet1/0/12',
|
|
105
|
+
'FastEthernet1/0/13', 'FastEthernet1/0/14', 'FastEthernet1/0/15',
|
|
106
|
+
'FastEthernet2/0/10', 'FastEthernet2/0/11', 'FastEthernet2/0/12',
|
|
107
|
+
'FastEthernet2/0/13', 'FastEthernet2/0/14', 'FastEthernet2/0/15']
|
|
108
|
+
>>> range_expansion("GigabitEthernet[a-c]/0/1")
|
|
109
|
+
['GigabitEtherneta/0/1', 'GigabitEthernetb/0/1', 'GigabitEthernetc/0/1']
|
|
110
|
+
>>> range_expansion("Eth[a,c,e]/0/1")
|
|
111
|
+
['Etha/0/1', 'Ethc/0/1', 'Ethe/0/1']
|
|
112
|
+
"""
|
|
113
|
+
pattern_escaped = _escape_brackets(interface_pattern)
|
|
114
|
+
re_compiled = re.compile(MATCH_PATTERN)
|
|
115
|
+
if not re_compiled.search(pattern_escaped):
|
|
116
|
+
return [_unescape_brackets(pattern_escaped)]
|
|
117
|
+
interface_constant, cartesian_list = _extract_constants(pattern_escaped, re_compiled)
|
|
118
|
+
return _expand_interfaces(pattern_escaped, interface_constant, cartesian_list)
|
infrahub_sdk/task/models.py
CHANGED
|
@@ -49,12 +49,14 @@ class Task(BaseModel):
|
|
|
49
49
|
related_nodes: list[TaskRelatedNode] = []
|
|
50
50
|
logs: list[TaskLog] = []
|
|
51
51
|
|
|
52
|
-
if
|
|
53
|
-
|
|
52
|
+
if "related_nodes" in data:
|
|
53
|
+
if data.get("related_nodes"):
|
|
54
|
+
related_nodes = [TaskRelatedNode(**item) for item in data["related_nodes"]]
|
|
54
55
|
del data["related_nodes"]
|
|
55
56
|
|
|
56
|
-
if
|
|
57
|
-
|
|
57
|
+
if "logs" in data:
|
|
58
|
+
if data.get("logs"):
|
|
59
|
+
logs = [TaskLog(**item["node"]) for item in data["logs"]["edges"]]
|
|
58
60
|
del data["logs"]
|
|
59
61
|
|
|
60
62
|
return cls(**data, related_nodes=related_nodes, logs=logs)
|
infrahub_sdk/timestamp.py
CHANGED
|
@@ -3,14 +3,22 @@ from __future__ import annotations
|
|
|
3
3
|
import re
|
|
4
4
|
import warnings
|
|
5
5
|
from datetime import datetime, timezone
|
|
6
|
-
from typing import Literal
|
|
6
|
+
from typing import Literal, TypedDict
|
|
7
7
|
|
|
8
|
+
from typing_extensions import NotRequired
|
|
8
9
|
from whenever import Date, Instant, LocalDateTime, OffsetDateTime, Time, ZonedDateTime
|
|
9
10
|
|
|
10
11
|
from .exceptions import TimestampFormatError
|
|
11
12
|
|
|
12
13
|
UTC = timezone.utc # Required for older versions of Python
|
|
13
14
|
|
|
15
|
+
|
|
16
|
+
class SubstractParams(TypedDict):
|
|
17
|
+
seconds: NotRequired[float]
|
|
18
|
+
minutes: NotRequired[float]
|
|
19
|
+
hours: NotRequired[float]
|
|
20
|
+
|
|
21
|
+
|
|
14
22
|
REGEX_MAPPING = {
|
|
15
23
|
"seconds": r"(\d+)(s|sec|second|seconds)",
|
|
16
24
|
"minutes": r"(\d+)(m|min|minute|minutes)",
|
|
@@ -43,8 +51,7 @@ class Timestamp:
|
|
|
43
51
|
@classmethod
|
|
44
52
|
def _parse_string(cls, value: str) -> ZonedDateTime:
|
|
45
53
|
try:
|
|
46
|
-
|
|
47
|
-
return zoned_date
|
|
54
|
+
return ZonedDateTime.parse_common_iso(value)
|
|
48
55
|
except ValueError:
|
|
49
56
|
pass
|
|
50
57
|
|
|
@@ -73,14 +80,19 @@ class Timestamp:
|
|
|
73
80
|
except ValueError:
|
|
74
81
|
pass
|
|
75
82
|
|
|
76
|
-
params:
|
|
83
|
+
params: SubstractParams = {}
|
|
77
84
|
for key, regex in REGEX_MAPPING.items():
|
|
78
85
|
match = re.search(regex, value)
|
|
79
86
|
if match:
|
|
80
|
-
|
|
87
|
+
if key == "seconds":
|
|
88
|
+
params["seconds"] = float(match.group(1))
|
|
89
|
+
elif key == "minutes":
|
|
90
|
+
params["minutes"] = float(match.group(1))
|
|
91
|
+
elif key == "hours":
|
|
92
|
+
params["hours"] = float(match.group(1))
|
|
81
93
|
|
|
82
94
|
if params:
|
|
83
|
-
return ZonedDateTime.now("UTC").subtract(**params)
|
|
95
|
+
return ZonedDateTime.now("UTC").subtract(**params)
|
|
84
96
|
|
|
85
97
|
raise TimestampFormatError(f"Invalid time format for {value}")
|
|
86
98
|
|
infrahub_sdk/transforms.py
CHANGED
|
@@ -1,25 +1,25 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: infrahub-server
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.5.0
|
|
4
4
|
Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
|
|
5
5
|
License: Apache-2.0
|
|
6
6
|
Author: OpsMill
|
|
7
7
|
Author-email: info@opsmill.com
|
|
8
|
-
Requires-Python: >=3.
|
|
8
|
+
Requires-Python: >=3.12,<3.13
|
|
9
9
|
Classifier: Intended Audience :: Developers
|
|
10
10
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
11
11
|
Classifier: Programming Language :: Python :: 3
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
14
12
|
Classifier: Programming Language :: Python :: 3.12
|
|
15
13
|
Requires-Dist: Jinja2 (>=3,<4)
|
|
16
14
|
Requires-Dist: aio-pika (>=9.4,<9.5)
|
|
17
15
|
Requires-Dist: aiodataloader (==0.4.0)
|
|
16
|
+
Requires-Dist: ariadne-codegen (==0.15.3)
|
|
18
17
|
Requires-Dist: asgi-correlation-id (==4.2.0)
|
|
19
18
|
Requires-Dist: authlib (==1.6.5)
|
|
20
19
|
Requires-Dist: bcrypt (>=4.1,<4.2)
|
|
21
20
|
Requires-Dist: boto3 (==1.34.129)
|
|
22
21
|
Requires-Dist: cachetools-async (>=0.0.5,<0.0.6)
|
|
22
|
+
Requires-Dist: click (==8.1.7)
|
|
23
23
|
Requires-Dist: copier (>=9.8.0,<10.0.0)
|
|
24
24
|
Requires-Dist: dulwich (>=0.22.7,<0.23.0)
|
|
25
25
|
Requires-Dist: email-validator (>=2.1,<2.2)
|
|
@@ -35,15 +35,14 @@ Requires-Dist: neo4j (>=5.28,<5.29)
|
|
|
35
35
|
Requires-Dist: neo4j-rust-ext (>=5.28,<5.29)
|
|
36
36
|
Requires-Dist: netaddr (==1.3.0)
|
|
37
37
|
Requires-Dist: netutils (==1.12.0)
|
|
38
|
-
Requires-Dist: numpy (>=1.
|
|
39
|
-
Requires-Dist: numpy (>=1.26.2,<2.0.0) ; python_version >= "3.12"
|
|
38
|
+
Requires-Dist: numpy (>=1.26.2,<2.0.0)
|
|
40
39
|
Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (==1.28.1)
|
|
41
40
|
Requires-Dist: opentelemetry-exporter-otlp-proto-http (==1.28.1)
|
|
42
41
|
Requires-Dist: opentelemetry-instrumentation-aio-pika (==0.49b1)
|
|
43
42
|
Requires-Dist: opentelemetry-instrumentation-fastapi (==0.49b1)
|
|
44
|
-
Requires-Dist: prefect (==3.4.
|
|
45
|
-
Requires-Dist: prefect-redis (==0.2.
|
|
46
|
-
Requires-Dist: pyarrow (>=14
|
|
43
|
+
Requires-Dist: prefect (==3.4.23)
|
|
44
|
+
Requires-Dist: prefect-redis (==0.2.5)
|
|
45
|
+
Requires-Dist: pyarrow (>=14)
|
|
47
46
|
Requires-Dist: pydantic (>=2.10,<2.11)
|
|
48
47
|
Requires-Dist: pydantic-settings (>=2.8,<2.9)
|
|
49
48
|
Requires-Dist: pyjwt (>=2.8,<2.9)
|
|
@@ -54,7 +53,7 @@ Requires-Dist: redis[hiredis] (>=6.0.0,<7.0.0)
|
|
|
54
53
|
Requires-Dist: rich (>=13,<14)
|
|
55
54
|
Requires-Dist: starlette-exporter (>=0.23,<0.24)
|
|
56
55
|
Requires-Dist: structlog (==24.1.0)
|
|
57
|
-
Requires-Dist:
|
|
56
|
+
Requires-Dist: tomli (>=1.1.0) ; python_version < "3.11"
|
|
58
57
|
Requires-Dist: typer (==0.12.5)
|
|
59
58
|
Requires-Dist: ujson (>=5,<6)
|
|
60
59
|
Requires-Dist: uvicorn[standard] (>=0.32,<0.33)
|