infrahub-server 1.2.0b1__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/dependencies.py +6 -6
- infrahub/api/diff/validation_models.py +7 -7
- infrahub/api/schema.py +1 -1
- infrahub/artifacts/models.py +1 -3
- infrahub/artifacts/tasks.py +1 -3
- infrahub/cli/__init__.py +13 -9
- infrahub/cli/constants.py +3 -0
- infrahub/cli/db.py +165 -183
- infrahub/cli/upgrade.py +146 -0
- infrahub/computed_attribute/gather.py +185 -0
- infrahub/computed_attribute/models.py +239 -11
- infrahub/computed_attribute/tasks.py +77 -442
- infrahub/computed_attribute/triggers.py +11 -45
- infrahub/config.py +43 -32
- infrahub/context.py +14 -0
- infrahub/core/account.py +4 -4
- infrahub/core/attribute.py +57 -57
- infrahub/core/branch/tasks.py +12 -9
- infrahub/core/changelog/diff.py +16 -8
- infrahub/core/changelog/models.py +189 -26
- infrahub/core/constants/__init__.py +5 -1
- infrahub/core/constants/infrahubkind.py +2 -0
- infrahub/core/constraint/node/runner.py +9 -8
- infrahub/core/diff/branch_differ.py +10 -10
- infrahub/core/diff/ipam_diff_parser.py +4 -5
- infrahub/core/diff/model/diff.py +27 -27
- infrahub/core/diff/model/path.py +3 -3
- infrahub/core/diff/query/merge.py +20 -17
- infrahub/core/diff/query_parser.py +4 -4
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +1 -10
- infrahub/core/ipam/constants.py +3 -4
- infrahub/core/ipam/reconciler.py +12 -12
- infrahub/core/ipam/utilization.py +10 -13
- infrahub/core/manager.py +34 -34
- infrahub/core/merge.py +7 -7
- infrahub/core/migrations/__init__.py +2 -3
- infrahub/core/migrations/graph/__init__.py +9 -4
- infrahub/core/migrations/graph/m017_add_core_profile.py +1 -5
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +4 -4
- infrahub/core/migrations/graph/m020_duplicate_edges.py +160 -0
- infrahub/core/migrations/graph/m021_missing_hierarchy_merge.py +51 -0
- infrahub/core/migrations/graph/{m020_add_generate_template_attr.py → m022_add_generate_template_attr.py} +3 -3
- infrahub/core/migrations/graph/m023_deduplicate_cardinality_one_relationships.py +96 -0
- infrahub/core/migrations/query/attribute_add.py +2 -2
- infrahub/core/migrations/query/node_duplicate.py +18 -21
- infrahub/core/migrations/query/schema_attribute_update.py +2 -2
- infrahub/core/migrations/schema/models.py +19 -4
- infrahub/core/migrations/schema/tasks.py +2 -2
- infrahub/core/migrations/shared.py +16 -16
- infrahub/core/models.py +15 -6
- infrahub/core/node/__init__.py +29 -28
- infrahub/core/node/base.py +2 -4
- infrahub/core/node/constraints/attribute_uniqueness.py +2 -2
- infrahub/core/node/constraints/grouped_uniqueness.py +99 -47
- infrahub/core/node/constraints/interface.py +1 -2
- infrahub/core/node/delete_validator.py +3 -5
- infrahub/core/node/ipam.py +4 -4
- infrahub/core/node/permissions.py +7 -7
- infrahub/core/node/resource_manager/ip_address_pool.py +6 -6
- infrahub/core/node/resource_manager/ip_prefix_pool.py +6 -6
- infrahub/core/node/resource_manager/number_pool.py +3 -3
- infrahub/core/path.py +12 -12
- infrahub/core/property.py +11 -11
- infrahub/core/protocols.py +5 -0
- infrahub/core/protocols_base.py +21 -21
- infrahub/core/query/__init__.py +33 -33
- infrahub/core/query/attribute.py +6 -4
- infrahub/core/query/diff.py +3 -3
- infrahub/core/query/node.py +82 -32
- infrahub/core/query/relationship.py +24 -24
- infrahub/core/query/resource_manager.py +2 -0
- infrahub/core/query/standard_node.py +3 -3
- infrahub/core/query/subquery.py +9 -9
- infrahub/core/registry.py +13 -15
- infrahub/core/relationship/constraints/count.py +3 -4
- infrahub/core/relationship/constraints/peer_kind.py +3 -4
- infrahub/core/relationship/constraints/profiles_kind.py +2 -2
- infrahub/core/relationship/model.py +40 -46
- infrahub/core/schema/attribute_schema.py +9 -9
- infrahub/core/schema/basenode_schema.py +93 -44
- infrahub/core/schema/computed_attribute.py +3 -3
- infrahub/core/schema/definitions/core/__init__.py +13 -19
- infrahub/core/schema/definitions/core/account.py +151 -148
- infrahub/core/schema/definitions/core/artifact.py +122 -113
- infrahub/core/schema/definitions/core/builtin.py +19 -16
- infrahub/core/schema/definitions/core/check.py +61 -53
- infrahub/core/schema/definitions/core/core.py +17 -0
- infrahub/core/schema/definitions/core/generator.py +89 -85
- infrahub/core/schema/definitions/core/graphql_query.py +72 -70
- infrahub/core/schema/definitions/core/group.py +96 -93
- infrahub/core/schema/definitions/core/ipam.py +176 -235
- infrahub/core/schema/definitions/core/lineage.py +18 -16
- infrahub/core/schema/definitions/core/menu.py +42 -40
- infrahub/core/schema/definitions/core/permission.py +144 -142
- infrahub/core/schema/definitions/core/profile.py +16 -27
- infrahub/core/schema/definitions/core/propose_change.py +88 -79
- infrahub/core/schema/definitions/core/propose_change_comment.py +170 -165
- infrahub/core/schema/definitions/core/propose_change_validator.py +290 -288
- infrahub/core/schema/definitions/core/repository.py +231 -225
- infrahub/core/schema/definitions/core/resource_pool.py +156 -166
- infrahub/core/schema/definitions/core/template.py +27 -12
- infrahub/core/schema/definitions/core/transform.py +85 -76
- infrahub/core/schema/definitions/core/webhook.py +127 -101
- infrahub/core/schema/definitions/internal.py +16 -16
- infrahub/core/schema/dropdown.py +3 -4
- infrahub/core/schema/generated/attribute_schema.py +15 -18
- infrahub/core/schema/generated/base_node_schema.py +12 -14
- infrahub/core/schema/generated/node_schema.py +3 -5
- infrahub/core/schema/generated/relationship_schema.py +9 -11
- infrahub/core/schema/generic_schema.py +2 -2
- infrahub/core/schema/manager.py +20 -9
- infrahub/core/schema/node_schema.py +4 -2
- infrahub/core/schema/relationship_schema.py +7 -7
- infrahub/core/schema/schema_branch.py +276 -138
- infrahub/core/schema/schema_branch_computed.py +41 -4
- infrahub/core/task/task.py +3 -3
- infrahub/core/task/user_task.py +15 -15
- infrahub/core/utils.py +20 -18
- infrahub/core/validators/__init__.py +1 -3
- infrahub/core/validators/aggregated_checker.py +2 -2
- infrahub/core/validators/attribute/choices.py +2 -2
- infrahub/core/validators/attribute/enum.py +2 -2
- infrahub/core/validators/attribute/kind.py +2 -2
- infrahub/core/validators/attribute/length.py +2 -2
- infrahub/core/validators/attribute/optional.py +2 -2
- infrahub/core/validators/attribute/regex.py +2 -2
- infrahub/core/validators/attribute/unique.py +2 -2
- infrahub/core/validators/checks_runner.py +25 -2
- infrahub/core/validators/determiner.py +1 -3
- infrahub/core/validators/interface.py +6 -2
- infrahub/core/validators/model.py +22 -3
- infrahub/core/validators/models/validate_migration.py +17 -4
- infrahub/core/validators/node/attribute.py +2 -2
- infrahub/core/validators/node/generate_profile.py +2 -2
- infrahub/core/validators/node/hierarchy.py +3 -5
- infrahub/core/validators/node/inherit_from.py +27 -5
- infrahub/core/validators/node/relationship.py +2 -2
- infrahub/core/validators/relationship/count.py +4 -4
- infrahub/core/validators/relationship/optional.py +2 -2
- infrahub/core/validators/relationship/peer.py +2 -2
- infrahub/core/validators/shared.py +2 -2
- infrahub/core/validators/tasks.py +8 -0
- infrahub/core/validators/uniqueness/checker.py +22 -21
- infrahub/core/validators/uniqueness/index.py +2 -2
- infrahub/core/validators/uniqueness/model.py +11 -11
- infrahub/database/__init__.py +26 -22
- infrahub/database/metrics.py +7 -1
- infrahub/dependencies/builder/constraint/grouped/node_runner.py +1 -3
- infrahub/dependencies/component/registry.py +2 -2
- infrahub/events/__init__.py +25 -2
- infrahub/events/artifact_action.py +13 -25
- infrahub/events/branch_action.py +26 -18
- infrahub/events/generator.py +71 -0
- infrahub/events/group_action.py +10 -24
- infrahub/events/models.py +10 -16
- infrahub/events/node_action.py +87 -32
- infrahub/events/repository_action.py +5 -18
- infrahub/events/schema_action.py +4 -9
- infrahub/events/utils.py +16 -0
- infrahub/events/validator_action.py +55 -0
- infrahub/exceptions.py +23 -24
- infrahub/generators/models.py +1 -3
- infrahub/git/base.py +7 -7
- infrahub/git/integrator.py +26 -25
- infrahub/git/models.py +22 -9
- infrahub/git/repository.py +3 -3
- infrahub/git/tasks.py +67 -49
- infrahub/git/utils.py +48 -0
- infrahub/git/worktree.py +1 -2
- infrahub/git_credential/askpass.py +1 -2
- infrahub/graphql/analyzer.py +12 -0
- infrahub/graphql/app.py +13 -15
- infrahub/graphql/context.py +6 -0
- infrahub/graphql/initialization.py +3 -0
- infrahub/graphql/loaders/node.py +2 -12
- infrahub/graphql/loaders/peers.py +77 -0
- infrahub/graphql/loaders/shared.py +13 -0
- infrahub/graphql/manager.py +13 -10
- infrahub/graphql/mutations/artifact_definition.py +5 -5
- infrahub/graphql/mutations/computed_attribute.py +4 -5
- infrahub/graphql/mutations/graphql_query.py +5 -5
- infrahub/graphql/mutations/ipam.py +50 -70
- infrahub/graphql/mutations/main.py +164 -141
- infrahub/graphql/mutations/menu.py +5 -5
- infrahub/graphql/mutations/models.py +2 -4
- infrahub/graphql/mutations/node_getter/by_default_filter.py +10 -10
- infrahub/graphql/mutations/node_getter/by_hfid.py +1 -3
- infrahub/graphql/mutations/node_getter/by_id.py +1 -3
- infrahub/graphql/mutations/node_getter/interface.py +1 -2
- infrahub/graphql/mutations/proposed_change.py +7 -7
- infrahub/graphql/mutations/relationship.py +67 -35
- infrahub/graphql/mutations/repository.py +8 -8
- infrahub/graphql/mutations/resource_manager.py +3 -3
- infrahub/graphql/mutations/schema.py +4 -4
- infrahub/graphql/mutations/webhook.py +137 -0
- infrahub/graphql/parser.py +4 -4
- infrahub/graphql/queries/diff/tree.py +4 -4
- infrahub/graphql/queries/ipam.py +2 -2
- infrahub/graphql/queries/relationship.py +2 -2
- infrahub/graphql/queries/search.py +2 -2
- infrahub/graphql/resolvers/many_relationship.py +264 -0
- infrahub/graphql/resolvers/resolver.py +13 -110
- infrahub/graphql/subscription/graphql_query.py +2 -0
- infrahub/graphql/types/event.py +20 -11
- infrahub/graphql/types/node.py +2 -2
- infrahub/graphql/utils.py +2 -2
- infrahub/groups/ancestors.py +29 -0
- infrahub/groups/parsers.py +107 -0
- infrahub/menu/generator.py +7 -7
- infrahub/menu/menu.py +0 -10
- infrahub/menu/models.py +117 -16
- infrahub/menu/repository.py +111 -0
- infrahub/menu/utils.py +5 -8
- infrahub/message_bus/messages/__init__.py +1 -11
- infrahub/message_bus/messages/check_generator_run.py +2 -0
- infrahub/message_bus/messages/finalize_validator_execution.py +3 -0
- infrahub/message_bus/messages/request_generatordefinition_check.py +2 -0
- infrahub/message_bus/operations/__init__.py +0 -2
- infrahub/message_bus/operations/check/generator.py +1 -0
- infrahub/message_bus/operations/event/__init__.py +2 -2
- infrahub/message_bus/operations/finalize/validator.py +51 -1
- infrahub/message_bus/operations/requests/generator_definition.py +19 -19
- infrahub/message_bus/operations/requests/proposed_change.py +3 -1
- infrahub/pools/number.py +2 -4
- infrahub/proposed_change/tasks.py +37 -28
- infrahub/pytest_plugin.py +13 -10
- infrahub/server.py +1 -2
- infrahub/services/adapters/event/__init__.py +1 -1
- infrahub/task_manager/event.py +23 -9
- infrahub/tasks/artifact.py +2 -4
- infrahub/telemetry/__init__.py +0 -0
- infrahub/telemetry/constants.py +9 -0
- infrahub/telemetry/database.py +86 -0
- infrahub/telemetry/models.py +65 -0
- infrahub/telemetry/task_manager.py +77 -0
- infrahub/{tasks/telemetry.py → telemetry/tasks.py} +49 -56
- infrahub/telemetry/utils.py +11 -0
- infrahub/trace.py +4 -4
- infrahub/transformations/tasks.py +2 -2
- infrahub/trigger/catalogue.py +2 -5
- infrahub/trigger/constants.py +0 -8
- infrahub/trigger/models.py +14 -1
- infrahub/trigger/setup.py +90 -0
- infrahub/trigger/tasks.py +35 -90
- infrahub/utils.py +11 -1
- infrahub/validators/__init__.py +0 -0
- infrahub/validators/events.py +42 -0
- infrahub/validators/tasks.py +41 -0
- infrahub/webhook/gather.py +17 -0
- infrahub/webhook/models.py +22 -5
- infrahub/webhook/tasks.py +44 -19
- infrahub/webhook/triggers.py +22 -5
- infrahub/workers/infrahub_async.py +2 -2
- infrahub/workers/utils.py +2 -2
- infrahub/workflows/catalogue.py +28 -20
- infrahub/workflows/initialization.py +1 -3
- infrahub/workflows/models.py +1 -1
- infrahub/workflows/utils.py +10 -1
- infrahub_sdk/client.py +27 -8
- infrahub_sdk/config.py +3 -0
- infrahub_sdk/context.py +13 -0
- infrahub_sdk/exceptions.py +6 -0
- infrahub_sdk/generator.py +4 -1
- infrahub_sdk/graphql.py +45 -13
- infrahub_sdk/node.py +69 -20
- infrahub_sdk/protocols_base.py +32 -11
- infrahub_sdk/query_groups.py +6 -35
- infrahub_sdk/schema/__init__.py +55 -26
- infrahub_sdk/schema/main.py +8 -0
- infrahub_sdk/task/__init__.py +10 -0
- infrahub_sdk/task/manager.py +12 -6
- infrahub_sdk/testing/schemas/animal.py +9 -0
- infrahub_sdk/timestamp.py +12 -4
- {infrahub_server-1.2.0b1.dist-info → infrahub_server-1.2.1.dist-info}/METADATA +3 -2
- {infrahub_server-1.2.0b1.dist-info → infrahub_server-1.2.1.dist-info}/RECORD +289 -260
- {infrahub_server-1.2.0b1.dist-info → infrahub_server-1.2.1.dist-info}/entry_points.txt +1 -0
- infrahub_testcontainers/constants.py +2 -0
- infrahub_testcontainers/container.py +157 -12
- infrahub_testcontainers/docker-compose.test.yml +31 -6
- infrahub_testcontainers/helpers.py +18 -73
- infrahub_testcontainers/host.py +41 -0
- infrahub_testcontainers/measurements.py +93 -0
- infrahub_testcontainers/models.py +38 -0
- infrahub_testcontainers/performance_test.py +166 -0
- infrahub_testcontainers/plugin.py +136 -0
- infrahub_testcontainers/prometheus.yml +30 -0
- infrahub/message_bus/messages/event_branch_create.py +0 -11
- infrahub/message_bus/messages/event_branch_delete.py +0 -11
- infrahub/message_bus/messages/event_branch_rebased.py +0 -9
- infrahub/message_bus/messages/event_node_mutated.py +0 -15
- infrahub/message_bus/messages/event_schema_update.py +0 -9
- infrahub/message_bus/operations/event/node.py +0 -20
- infrahub/message_bus/operations/event/schema.py +0 -17
- infrahub/webhook/constants.py +0 -1
- {infrahub_server-1.2.0b1.dist-info → infrahub_server-1.2.1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.0b1.dist-info → infrahub_server-1.2.1.dist-info}/WHEEL +0 -0
infrahub/core/node/__init__.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from enum import Enum
|
|
4
|
-
from typing import TYPE_CHECKING, Any,
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Sequence, TypeVar, overload
|
|
5
5
|
|
|
6
6
|
from infrahub_sdk.utils import is_valid_uuid
|
|
7
7
|
from infrahub_sdk.uuidt import UUIDT
|
|
@@ -63,7 +63,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
63
63
|
_meta.default_filter = default_filter
|
|
64
64
|
super().__init_subclass_with_meta__(_meta=_meta, **options)
|
|
65
65
|
|
|
66
|
-
def get_schema(self) ->
|
|
66
|
+
def get_schema(self) -> NodeSchema | ProfileSchema | TemplateSchema:
|
|
67
67
|
return self._schema
|
|
68
68
|
|
|
69
69
|
def get_kind(self) -> str:
|
|
@@ -80,17 +80,18 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
80
80
|
def get_updated_at(self) -> Timestamp | None:
|
|
81
81
|
return self._updated_at
|
|
82
82
|
|
|
83
|
-
async def get_hfid(self, db: InfrahubDatabase, include_kind: bool = False) ->
|
|
83
|
+
async def get_hfid(self, db: InfrahubDatabase, include_kind: bool = False) -> list[str] | None:
|
|
84
84
|
"""Return the Human friendly id of the node."""
|
|
85
85
|
if not self._schema.human_friendly_id:
|
|
86
86
|
return None
|
|
87
87
|
|
|
88
|
-
|
|
88
|
+
hfid_values = [await self.get_path_value(db=db, path=item) for item in self._schema.human_friendly_id]
|
|
89
|
+
hfid = [value for value in hfid_values if value is not None]
|
|
89
90
|
if include_kind:
|
|
90
91
|
return [self.get_kind()] + hfid
|
|
91
92
|
return hfid
|
|
92
93
|
|
|
93
|
-
async def get_hfid_as_string(self, db: InfrahubDatabase, include_kind: bool = False) ->
|
|
94
|
+
async def get_hfid_as_string(self, db: InfrahubDatabase, include_kind: bool = False) -> str | None:
|
|
94
95
|
"""Return the Human friendly id of the node in string format separated with a dunder (__) ."""
|
|
95
96
|
hfid = await self.get_hfid(db=db, include_kind=include_kind)
|
|
96
97
|
if not hfid:
|
|
@@ -160,18 +161,18 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
160
161
|
|
|
161
162
|
return f"{self.get_kind()}(ID: {str(self.id)})"
|
|
162
163
|
|
|
163
|
-
def __init__(self, schema:
|
|
164
|
-
self._schema:
|
|
164
|
+
def __init__(self, schema: NodeSchema | ProfileSchema | TemplateSchema, branch: Branch, at: Timestamp):
|
|
165
|
+
self._schema: NodeSchema | ProfileSchema | TemplateSchema = schema
|
|
165
166
|
self._branch: Branch = branch
|
|
166
167
|
self._at: Timestamp = at
|
|
167
168
|
self._existing: bool = False
|
|
168
169
|
|
|
169
|
-
self._updated_at:
|
|
170
|
+
self._updated_at: Timestamp | None = None
|
|
170
171
|
self.id: str = None
|
|
171
172
|
self.db_id: str = None
|
|
172
173
|
|
|
173
|
-
self._source:
|
|
174
|
-
self._owner:
|
|
174
|
+
self._source: Node | None = None
|
|
175
|
+
self._owner: Node | None = None
|
|
175
176
|
self._is_protected: bool = None
|
|
176
177
|
self._computed_jinja2_attributes: list[str] = []
|
|
177
178
|
|
|
@@ -191,10 +192,10 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
191
192
|
@classmethod
|
|
192
193
|
async def init(
|
|
193
194
|
cls,
|
|
194
|
-
schema:
|
|
195
|
+
schema: NodeSchema | ProfileSchema | TemplateSchema | str,
|
|
195
196
|
db: InfrahubDatabase,
|
|
196
|
-
branch:
|
|
197
|
-
at:
|
|
197
|
+
branch: Branch | str | None = ...,
|
|
198
|
+
at: Timestamp | str | None = ...,
|
|
198
199
|
) -> Self: ...
|
|
199
200
|
|
|
200
201
|
@overload
|
|
@@ -203,17 +204,17 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
203
204
|
cls,
|
|
204
205
|
schema: type[SchemaProtocol],
|
|
205
206
|
db: InfrahubDatabase,
|
|
206
|
-
branch:
|
|
207
|
-
at:
|
|
207
|
+
branch: Branch | str | None = ...,
|
|
208
|
+
at: Timestamp | str | None = ...,
|
|
208
209
|
) -> SchemaProtocol: ...
|
|
209
210
|
|
|
210
211
|
@classmethod
|
|
211
212
|
async def init(
|
|
212
213
|
cls,
|
|
213
|
-
schema:
|
|
214
|
+
schema: NodeSchema | ProfileSchema | TemplateSchema | str | type[SchemaProtocol],
|
|
214
215
|
db: InfrahubDatabase,
|
|
215
|
-
branch:
|
|
216
|
-
at:
|
|
216
|
+
branch: Branch | str | None = None,
|
|
217
|
+
at: Timestamp | str | None = None,
|
|
217
218
|
) -> Self | SchemaProtocol:
|
|
218
219
|
attrs: dict[str, Any] = {}
|
|
219
220
|
|
|
@@ -546,7 +547,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
546
547
|
)
|
|
547
548
|
return attr
|
|
548
549
|
|
|
549
|
-
async def process_label(self, db:
|
|
550
|
+
async def process_label(self, db: InfrahubDatabase | None = None) -> None: # noqa: ARG002
|
|
550
551
|
# If there label and name are both defined for this node
|
|
551
552
|
# if label is not define, we'll automatically populate it with a human friendy vesion of name
|
|
552
553
|
if not self._existing and hasattr(self, "label") and hasattr(self, "name"):
|
|
@@ -554,7 +555,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
554
555
|
self.label.value = " ".join([word.title() for word in self.name.value.split("_")])
|
|
555
556
|
self.label.is_default = False
|
|
556
557
|
|
|
557
|
-
async def new(self, db: InfrahubDatabase, id:
|
|
558
|
+
async def new(self, db: InfrahubDatabase, id: str | None = None, **kwargs: Any) -> Self:
|
|
558
559
|
if id and not is_valid_uuid(id):
|
|
559
560
|
raise ValidationError({"id": f"{id} is not a valid UUID"})
|
|
560
561
|
if id:
|
|
@@ -577,9 +578,9 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
577
578
|
async def load(
|
|
578
579
|
self,
|
|
579
580
|
db: InfrahubDatabase,
|
|
580
|
-
id:
|
|
581
|
-
db_id:
|
|
582
|
-
updated_at:
|
|
581
|
+
id: str | None = None,
|
|
582
|
+
db_id: str | None = None,
|
|
583
|
+
updated_at: Timestamp | str | None = None,
|
|
583
584
|
**kwargs: Any,
|
|
584
585
|
) -> Self:
|
|
585
586
|
self.id = id
|
|
@@ -630,7 +631,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
630
631
|
return node_changelog
|
|
631
632
|
|
|
632
633
|
async def _update(
|
|
633
|
-
self, db: InfrahubDatabase, at:
|
|
634
|
+
self, db: InfrahubDatabase, at: Timestamp | None = None, fields: list[str] | None = None
|
|
634
635
|
) -> NodeChangelog:
|
|
635
636
|
"""Update the node in the database if needed."""
|
|
636
637
|
|
|
@@ -665,7 +666,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
665
666
|
node_changelog.display_label = await self.render_display_label(db=db)
|
|
666
667
|
return node_changelog
|
|
667
668
|
|
|
668
|
-
async def save(self, db: InfrahubDatabase, at:
|
|
669
|
+
async def save(self, db: InfrahubDatabase, at: Timestamp | None = None, fields: list[str] | None = None) -> Self:
|
|
669
670
|
"""Create or Update the Node in the database."""
|
|
670
671
|
|
|
671
672
|
save_at = Timestamp(at)
|
|
@@ -677,7 +678,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
677
678
|
self._node_changelog = await self._create(at=save_at, db=db)
|
|
678
679
|
return self
|
|
679
680
|
|
|
680
|
-
async def delete(self, db: InfrahubDatabase, at:
|
|
681
|
+
async def delete(self, db: InfrahubDatabase, at: Timestamp | None = None) -> None:
|
|
681
682
|
"""Delete the Node in the database."""
|
|
682
683
|
|
|
683
684
|
delete_at = Timestamp(at)
|
|
@@ -770,7 +771,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
770
771
|
response[field_name] = None
|
|
771
772
|
continue
|
|
772
773
|
|
|
773
|
-
field:
|
|
774
|
+
field: BaseAttribute | None = getattr(self, field_name, None)
|
|
774
775
|
|
|
775
776
|
if not field:
|
|
776
777
|
response[field_name] = None
|
|
@@ -832,7 +833,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
832
833
|
|
|
833
834
|
return changed
|
|
834
835
|
|
|
835
|
-
async def render_display_label(self, db:
|
|
836
|
+
async def render_display_label(self, db: InfrahubDatabase | None = None) -> str: # noqa: ARG002
|
|
836
837
|
if not self._schema.display_labels:
|
|
837
838
|
return repr(self)
|
|
838
839
|
|
infrahub/core/node/base.py
CHANGED
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import Optional
|
|
4
|
-
|
|
5
3
|
from ..utils import SubclassWithMeta, SubclassWithMeta_Meta
|
|
6
4
|
|
|
7
5
|
|
|
8
6
|
class BaseOptions:
|
|
9
|
-
name:
|
|
10
|
-
description:
|
|
7
|
+
name: str | None = None
|
|
8
|
+
description: str | None = None
|
|
11
9
|
|
|
12
10
|
_frozen: bool = False
|
|
13
11
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import TYPE_CHECKING
|
|
1
|
+
from typing import TYPE_CHECKING
|
|
2
2
|
|
|
3
3
|
from infrahub.core import registry
|
|
4
4
|
from infrahub.core.branch import Branch
|
|
@@ -18,7 +18,7 @@ class NodeAttributeUniquenessConstraint(NodeConstraintInterface):
|
|
|
18
18
|
self.db = db
|
|
19
19
|
self.branch = branch
|
|
20
20
|
|
|
21
|
-
async def check(self, node: Node, at:
|
|
21
|
+
async def check(self, node: Node, at: Timestamp | None = None, filters: list[str] | None = None) -> None:
|
|
22
22
|
at = Timestamp(at)
|
|
23
23
|
node_schema = node.get_schema()
|
|
24
24
|
for unique_attr in node_schema.unique_attributes:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import TYPE_CHECKING, Iterable
|
|
3
|
+
from typing import TYPE_CHECKING, Iterable
|
|
4
4
|
|
|
5
5
|
from infrahub.core import registry
|
|
6
6
|
from infrahub.core.constants import NULL_VALUE
|
|
@@ -9,6 +9,11 @@ from infrahub.core.schema import (
|
|
|
9
9
|
SchemaAttributePath,
|
|
10
10
|
SchemaAttributePathValue,
|
|
11
11
|
)
|
|
12
|
+
from infrahub.core.schema.basenode_schema import (
|
|
13
|
+
SchemaUniquenessConstraintPath,
|
|
14
|
+
UniquenessConstraintType,
|
|
15
|
+
UniquenessConstraintViolation,
|
|
16
|
+
)
|
|
12
17
|
from infrahub.core.validators.uniqueness.index import UniquenessQueryResultsIndex
|
|
13
18
|
from infrahub.core.validators.uniqueness.model import (
|
|
14
19
|
NodeUniquenessQueryRequest,
|
|
@@ -16,7 +21,7 @@ from infrahub.core.validators.uniqueness.model import (
|
|
|
16
21
|
QueryRelationshipAttributePath,
|
|
17
22
|
)
|
|
18
23
|
from infrahub.core.validators.uniqueness.query import NodeUniqueAttributeConstraintQuery
|
|
19
|
-
from infrahub.exceptions import ValidationError
|
|
24
|
+
from infrahub.exceptions import HFIDViolatedError, ValidationError
|
|
20
25
|
|
|
21
26
|
from .interface import NodeConstraintInterface
|
|
22
27
|
|
|
@@ -39,15 +44,15 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
|
|
|
39
44
|
self,
|
|
40
45
|
updated_node: Node,
|
|
41
46
|
node_schema: MainSchemaTypes,
|
|
42
|
-
|
|
43
|
-
filters:
|
|
47
|
+
uniqueness_constraint_paths: list[SchemaUniquenessConstraintPath],
|
|
48
|
+
filters: list[str] | None = None,
|
|
44
49
|
) -> NodeUniquenessQueryRequest:
|
|
45
50
|
query_request = NodeUniquenessQueryRequest(kind=node_schema.kind)
|
|
46
|
-
for
|
|
51
|
+
for uniqueness_constraint_path in uniqueness_constraint_paths:
|
|
47
52
|
include_in_query = not filters
|
|
48
53
|
query_relationship_paths: set[QueryRelationshipAttributePath] = set()
|
|
49
54
|
query_attribute_paths: set[QueryAttributePath] = set()
|
|
50
|
-
for attribute_path in
|
|
55
|
+
for attribute_path in uniqueness_constraint_path.attributes_paths:
|
|
51
56
|
if attribute_path.related_schema and attribute_path.relationship_schema:
|
|
52
57
|
if filters and attribute_path.relationship_schema.name in filters:
|
|
53
58
|
include_in_query = True
|
|
@@ -118,71 +123,118 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
|
|
|
118
123
|
)
|
|
119
124
|
return node_value_combination
|
|
120
125
|
|
|
121
|
-
def
|
|
122
|
-
self, schema_attribute_path_values: list[SchemaAttributePathValue], results_index: UniquenessQueryResultsIndex
|
|
123
|
-
) -> None:
|
|
124
|
-
# constraint cannot be violated if this node is missing any values
|
|
125
|
-
if any(sapv.value is None for sapv in schema_attribute_path_values):
|
|
126
|
-
return
|
|
127
|
-
|
|
128
|
-
matching_node_ids = results_index.get_node_ids_for_value_group(schema_attribute_path_values)
|
|
129
|
-
if not matching_node_ids:
|
|
130
|
-
return
|
|
131
|
-
uniqueness_constraint_fields = []
|
|
132
|
-
for sapv in schema_attribute_path_values:
|
|
133
|
-
if sapv.relationship_schema:
|
|
134
|
-
uniqueness_constraint_fields.append(sapv.relationship_schema.name)
|
|
135
|
-
elif sapv.attribute_schema:
|
|
136
|
-
uniqueness_constraint_fields.append(sapv.attribute_schema.name)
|
|
137
|
-
uniqueness_constraint_string = "-".join(uniqueness_constraint_fields)
|
|
138
|
-
error_msg = f"Violates uniqueness constraint '{uniqueness_constraint_string}'"
|
|
139
|
-
errors = [ValidationError({field_name: error_msg}) for field_name in uniqueness_constraint_fields]
|
|
140
|
-
raise ValidationError(errors)
|
|
141
|
-
|
|
142
|
-
async def _check_results(
|
|
126
|
+
async def _get_violations(
|
|
143
127
|
self,
|
|
144
128
|
updated_node: Node,
|
|
145
|
-
|
|
129
|
+
uniqueness_constraint_paths: list[SchemaUniquenessConstraintPath],
|
|
146
130
|
query_results: Iterable[QueryResult],
|
|
147
|
-
) ->
|
|
131
|
+
) -> list[UniquenessConstraintViolation]:
|
|
148
132
|
results_index = UniquenessQueryResultsIndex(
|
|
149
133
|
query_results=query_results, exclude_node_ids={updated_node.get_id()}
|
|
150
134
|
)
|
|
151
|
-
|
|
135
|
+
violations = []
|
|
136
|
+
for uniqueness_constraint_path in uniqueness_constraint_paths:
|
|
137
|
+
# path_group = one constraint (that can contain multiple items)
|
|
152
138
|
schema_attribute_path_values = await self._get_node_attribute_path_values(
|
|
153
|
-
updated_node=updated_node, path_group=
|
|
139
|
+
updated_node=updated_node, path_group=uniqueness_constraint_path.attributes_paths
|
|
154
140
|
)
|
|
155
|
-
|
|
156
|
-
|
|
141
|
+
|
|
142
|
+
# constraint cannot be violated if this node is missing any values
|
|
143
|
+
if any(sapv.value is None for sapv in schema_attribute_path_values):
|
|
144
|
+
continue
|
|
145
|
+
|
|
146
|
+
matching_node_ids = results_index.get_node_ids_for_value_group(schema_attribute_path_values)
|
|
147
|
+
if not matching_node_ids:
|
|
148
|
+
continue
|
|
149
|
+
|
|
150
|
+
uniqueness_constraint_fields = []
|
|
151
|
+
for sapv in schema_attribute_path_values:
|
|
152
|
+
if sapv.relationship_schema:
|
|
153
|
+
uniqueness_constraint_fields.append(sapv.relationship_schema.name)
|
|
154
|
+
elif sapv.attribute_schema:
|
|
155
|
+
uniqueness_constraint_fields.append(sapv.attribute_schema.name)
|
|
156
|
+
|
|
157
|
+
violations.append(
|
|
158
|
+
UniquenessConstraintViolation(
|
|
159
|
+
nodes_ids=matching_node_ids,
|
|
160
|
+
fields=uniqueness_constraint_fields,
|
|
161
|
+
typ=uniqueness_constraint_path.typ,
|
|
162
|
+
)
|
|
157
163
|
)
|
|
158
164
|
|
|
159
|
-
|
|
165
|
+
return violations
|
|
166
|
+
|
|
167
|
+
async def _get_single_schema_violations(
|
|
160
168
|
self,
|
|
161
169
|
node: Node,
|
|
162
170
|
node_schema: MainSchemaTypes,
|
|
163
|
-
at:
|
|
164
|
-
filters:
|
|
165
|
-
) ->
|
|
171
|
+
at: Timestamp | None = None,
|
|
172
|
+
filters: list[str] | None = None,
|
|
173
|
+
) -> list[UniquenessConstraintViolation]:
|
|
166
174
|
schema_branch = self.db.schema.get_schema_branch(name=self.branch.name)
|
|
167
|
-
|
|
175
|
+
|
|
176
|
+
uniqueness_constraint_paths = node_schema.get_unique_constraint_schema_attribute_paths(
|
|
177
|
+
schema_branch=schema_branch
|
|
178
|
+
)
|
|
168
179
|
query_request = await self._build_query_request(
|
|
169
|
-
updated_node=node,
|
|
180
|
+
updated_node=node,
|
|
181
|
+
node_schema=node_schema,
|
|
182
|
+
uniqueness_constraint_paths=uniqueness_constraint_paths,
|
|
183
|
+
filters=filters,
|
|
170
184
|
)
|
|
171
185
|
if not query_request:
|
|
172
|
-
return
|
|
186
|
+
return []
|
|
187
|
+
|
|
173
188
|
query = await NodeUniqueAttributeConstraintQuery.init(
|
|
174
189
|
db=self.db, branch=self.branch, at=at, query_request=query_request, min_count_required=0
|
|
175
190
|
)
|
|
176
191
|
await query.execute(db=self.db)
|
|
177
|
-
await self.
|
|
192
|
+
return await self._get_violations(
|
|
193
|
+
updated_node=node,
|
|
194
|
+
uniqueness_constraint_paths=uniqueness_constraint_paths,
|
|
195
|
+
query_results=query.get_results(),
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
async def check(self, node: Node, at: Timestamp | None = None, filters: list[str] | None = None) -> None:
|
|
199
|
+
def _frozen_constraints(schema: MainSchemaTypes) -> frozenset[frozenset[str]]:
|
|
200
|
+
if not schema.uniqueness_constraints:
|
|
201
|
+
return frozenset()
|
|
202
|
+
return frozenset(frozenset(uc) for uc in schema.uniqueness_constraints)
|
|
178
203
|
|
|
179
|
-
async def check(self, node: Node, at: Optional[Timestamp] = None, filters: Optional[list[str]] = None) -> None:
|
|
180
204
|
node_schema = node.get_schema()
|
|
181
|
-
|
|
205
|
+
include_node_schema = True
|
|
206
|
+
frozen_node_constraints = _frozen_constraints(node_schema)
|
|
207
|
+
schemas_to_check: list[MainSchemaTypes] = []
|
|
182
208
|
if node_schema.inherit_from:
|
|
183
209
|
for parent_schema_name in node_schema.inherit_from:
|
|
184
210
|
parent_schema = self.schema_branch.get(name=parent_schema_name, duplicate=False)
|
|
185
|
-
if parent_schema.uniqueness_constraints:
|
|
186
|
-
|
|
211
|
+
if not parent_schema.uniqueness_constraints:
|
|
212
|
+
continue
|
|
213
|
+
schemas_to_check.append(parent_schema)
|
|
214
|
+
frozen_parent_constraints = _frozen_constraints(parent_schema)
|
|
215
|
+
if frozen_node_constraints <= frozen_parent_constraints:
|
|
216
|
+
include_node_schema = False
|
|
217
|
+
|
|
218
|
+
if include_node_schema:
|
|
219
|
+
schemas_to_check.append(node_schema)
|
|
220
|
+
|
|
221
|
+
violations = []
|
|
187
222
|
for schema in schemas_to_check:
|
|
188
|
-
await self.
|
|
223
|
+
schema_violations = await self._get_single_schema_violations(
|
|
224
|
+
node=node, node_schema=schema, at=at, filters=filters
|
|
225
|
+
)
|
|
226
|
+
violations.extend(schema_violations)
|
|
227
|
+
|
|
228
|
+
is_hfid_violated = any(violation.typ == UniquenessConstraintType.HFID for violation in violations)
|
|
229
|
+
|
|
230
|
+
for violation in violations:
|
|
231
|
+
if violation.typ == UniquenessConstraintType.STANDARD or (
|
|
232
|
+
violation.typ == UniquenessConstraintType.SUBSET_OF_HFID and not is_hfid_violated
|
|
233
|
+
):
|
|
234
|
+
error_msg = f"Violates uniqueness constraint '{'-'.join(violation.fields)}'"
|
|
235
|
+
raise ValidationError(error_msg)
|
|
236
|
+
|
|
237
|
+
for violation in violations:
|
|
238
|
+
if violation.typ == UniquenessConstraintType.HFID:
|
|
239
|
+
error_msg = f"Violates uniqueness constraint '{'-'.join(violation.fields)}'"
|
|
240
|
+
raise HFIDViolatedError(error_msg, matching_nodes_ids=violation.nodes_ids)
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import Optional
|
|
3
2
|
|
|
4
3
|
from infrahub.core.node import Node
|
|
5
4
|
from infrahub.core.timestamp import Timestamp
|
|
@@ -7,4 +6,4 @@ from infrahub.core.timestamp import Timestamp
|
|
|
7
6
|
|
|
8
7
|
class NodeConstraintInterface(ABC):
|
|
9
8
|
@abstractmethod
|
|
10
|
-
async def check(self, node: Node, at:
|
|
9
|
+
async def check(self, node: Node, at: Timestamp | None = None, filters: list[str] | None = None) -> None: ...
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from collections import defaultdict
|
|
2
2
|
from enum import Enum
|
|
3
|
-
from typing import Iterable
|
|
3
|
+
from typing import Iterable
|
|
4
4
|
|
|
5
5
|
from infrahub.core import registry
|
|
6
6
|
from infrahub.core.branch import Branch
|
|
@@ -124,16 +124,14 @@ class NodeDeleteValidator:
|
|
|
124
124
|
self._all_schemas_map = schema_branch.get_all(duplicate=False)
|
|
125
125
|
self.index: NodeDeleteIndex = NodeDeleteIndex(all_schemas_map=self._all_schemas_map)
|
|
126
126
|
|
|
127
|
-
async def get_ids_to_delete(self, nodes: Iterable[Node], at:
|
|
127
|
+
async def get_ids_to_delete(self, nodes: Iterable[Node], at: Timestamp | str | None = None) -> set[str]:
|
|
128
128
|
start_schemas = {node.get_schema() for node in nodes}
|
|
129
129
|
self.index.index(start_schemas=start_schemas)
|
|
130
130
|
at = Timestamp(at)
|
|
131
131
|
|
|
132
132
|
return await self._analyze_delete_dependencies(start_nodes=nodes, at=at)
|
|
133
133
|
|
|
134
|
-
async def _analyze_delete_dependencies(
|
|
135
|
-
self, start_nodes: Iterable[Node], at: Optional[Union[Timestamp, str]]
|
|
136
|
-
) -> set[str]:
|
|
134
|
+
async def _analyze_delete_dependencies(self, start_nodes: Iterable[Node], at: Timestamp | str | None) -> set[str]:
|
|
137
135
|
full_relationship_identifiers = self.index.get_relationship_identifiers()
|
|
138
136
|
if not full_relationship_identifiers:
|
|
139
137
|
return {node.get_id() for node in start_nodes}
|
infrahub/core/node/ipam.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
from infrahub.core.ipam.size import get_prefix_space
|
|
6
6
|
from infrahub.core.ipam.utilization import PrefixUtilizationGetter
|
|
@@ -16,10 +16,10 @@ class BuiltinIPPrefix(Node):
|
|
|
16
16
|
async def to_graphql(
|
|
17
17
|
self,
|
|
18
18
|
db: InfrahubDatabase,
|
|
19
|
-
fields:
|
|
20
|
-
related_node_ids:
|
|
19
|
+
fields: dict | None = None,
|
|
20
|
+
related_node_ids: set | None = None,
|
|
21
21
|
filter_sensitive: bool = False,
|
|
22
|
-
permissions:
|
|
22
|
+
permissions: dict | None = None, # noqa: ARG002
|
|
23
23
|
include_properties: bool = True,
|
|
24
24
|
) -> dict:
|
|
25
25
|
response = await super().to_graphql(
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
from infrahub.permissions.constants import PermissionDecisionFlag
|
|
6
6
|
|
|
@@ -14,10 +14,10 @@ class CoreGlobalPermission(Node):
|
|
|
14
14
|
async def to_graphql(
|
|
15
15
|
self,
|
|
16
16
|
db: InfrahubDatabase,
|
|
17
|
-
fields:
|
|
18
|
-
related_node_ids:
|
|
17
|
+
fields: dict | None = None,
|
|
18
|
+
related_node_ids: set | None = None,
|
|
19
19
|
filter_sensitive: bool = False,
|
|
20
|
-
permissions:
|
|
20
|
+
permissions: dict | None = None,
|
|
21
21
|
include_properties: bool = True,
|
|
22
22
|
) -> dict:
|
|
23
23
|
response = await super().to_graphql(
|
|
@@ -41,10 +41,10 @@ class CoreObjectPermission(Node):
|
|
|
41
41
|
async def to_graphql(
|
|
42
42
|
self,
|
|
43
43
|
db: InfrahubDatabase,
|
|
44
|
-
fields:
|
|
45
|
-
related_node_ids:
|
|
44
|
+
fields: dict | None = None,
|
|
45
|
+
related_node_ids: set | None = None,
|
|
46
46
|
filter_sensitive: bool = False,
|
|
47
|
-
permissions:
|
|
47
|
+
permissions: dict | None = None,
|
|
48
48
|
include_properties: bool = True,
|
|
49
49
|
) -> dict:
|
|
50
50
|
response = await super().to_graphql(
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import ipaddress
|
|
4
|
-
from typing import TYPE_CHECKING, Any
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
5
|
|
|
6
6
|
from infrahub.core import registry
|
|
7
7
|
from infrahub.core.ipam.reconciler import IpamReconciler
|
|
@@ -26,10 +26,10 @@ class CoreIPAddressPool(Node):
|
|
|
26
26
|
self,
|
|
27
27
|
db: InfrahubDatabase,
|
|
28
28
|
branch: Branch,
|
|
29
|
-
identifier:
|
|
30
|
-
data:
|
|
31
|
-
address_type:
|
|
32
|
-
prefixlen:
|
|
29
|
+
identifier: str | None = None,
|
|
30
|
+
data: dict[str, Any] | None = None,
|
|
31
|
+
address_type: str | None = None,
|
|
32
|
+
prefixlen: int | None = None,
|
|
33
33
|
) -> Node:
|
|
34
34
|
# Check if there is already a resource allocated with this identifier
|
|
35
35
|
# if not, pull all existing prefixes and allocated the next available
|
|
@@ -80,7 +80,7 @@ class CoreIPAddressPool(Node):
|
|
|
80
80
|
|
|
81
81
|
return node
|
|
82
82
|
|
|
83
|
-
async def get_next(self, db: InfrahubDatabase, prefixlen:
|
|
83
|
+
async def get_next(self, db: InfrahubDatabase, prefixlen: int | None = None) -> IPAddressType:
|
|
84
84
|
# Measure utilization of all prefixes identified as resources
|
|
85
85
|
resources = await self.resources.get_peers(db=db) # type: ignore[attr-defined]
|
|
86
86
|
ip_namespace = await self.ip_namespace.get_peer(db=db) # type: ignore[attr-defined]
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import ipaddress
|
|
4
|
-
from typing import TYPE_CHECKING, Any
|
|
4
|
+
from typing import TYPE_CHECKING, Any
|
|
5
5
|
|
|
6
6
|
from netaddr import IPSet
|
|
7
7
|
|
|
@@ -28,11 +28,11 @@ class CoreIPPrefixPool(Node):
|
|
|
28
28
|
self,
|
|
29
29
|
db: InfrahubDatabase,
|
|
30
30
|
branch: Branch,
|
|
31
|
-
identifier:
|
|
32
|
-
data:
|
|
33
|
-
prefixlen:
|
|
34
|
-
member_type:
|
|
35
|
-
prefix_type:
|
|
31
|
+
identifier: str | None = None,
|
|
32
|
+
data: dict[str, Any] | None = None,
|
|
33
|
+
prefixlen: int | None = None,
|
|
34
|
+
member_type: str | None = None,
|
|
35
|
+
prefix_type: str | None = None,
|
|
36
36
|
) -> Node:
|
|
37
37
|
# Check if there is already a resource allocated with this identifier
|
|
38
38
|
# if not, pull all existing prefixes and allocated the next available
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import TYPE_CHECKING
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
from infrahub.core.query.resource_manager import NumberPoolGetReserved, NumberPoolGetUsed, NumberPoolSetReserved
|
|
6
6
|
from infrahub.exceptions import PoolExhaustedError
|
|
@@ -18,7 +18,7 @@ class CoreNumberPool(Node):
|
|
|
18
18
|
db: InfrahubDatabase,
|
|
19
19
|
branch: Branch,
|
|
20
20
|
node: Node,
|
|
21
|
-
identifier:
|
|
21
|
+
identifier: str | None = None,
|
|
22
22
|
) -> int:
|
|
23
23
|
identifier = identifier or node.get_id()
|
|
24
24
|
# Check if there is already a resource allocated with this identifier
|
|
@@ -55,7 +55,7 @@ class CoreNumberPool(Node):
|
|
|
55
55
|
return next_number
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
def find_next_free(start: int, end: int, taken: list[int | None]) ->
|
|
58
|
+
def find_next_free(start: int, end: int, taken: list[int | None]) -> int | None:
|
|
59
59
|
used_numbers = [number for number in taken if number is not None]
|
|
60
60
|
used_set = set(used_numbers)
|
|
61
61
|
|
infrahub/core/path.py
CHANGED
|
@@ -2,7 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from collections import defaultdict
|
|
4
4
|
from itertools import chain
|
|
5
|
-
from typing import TYPE_CHECKING, Any
|
|
5
|
+
from typing import TYPE_CHECKING, Any
|
|
6
6
|
|
|
7
7
|
from pydantic import BaseModel, Field
|
|
8
8
|
from typing_extensions import Self
|
|
@@ -56,12 +56,12 @@ class DataPath(InfrahubPath):
|
|
|
56
56
|
path_type: PathType
|
|
57
57
|
node_id: str = Field(..., description="Kind of the model in the schema")
|
|
58
58
|
kind: str = Field(..., description="Kind of the main node")
|
|
59
|
-
field_name:
|
|
59
|
+
field_name: str | None = Field(
|
|
60
60
|
default=None, description="Name of the field (either an attribute or a relationship)"
|
|
61
61
|
)
|
|
62
|
-
property_name:
|
|
63
|
-
peer_id:
|
|
64
|
-
value:
|
|
62
|
+
property_name: str | None = Field(default=None, description="Name of the property")
|
|
63
|
+
peer_id: str | None = Field(default=None, description="")
|
|
64
|
+
value: Any | None = Field(default=None, description="Optional value of the resource")
|
|
65
65
|
|
|
66
66
|
@property
|
|
67
67
|
def resource_type(self) -> PathResourceType:
|
|
@@ -109,11 +109,11 @@ class GroupedDataPaths:
|
|
|
109
109
|
class SchemaPath(InfrahubPath):
|
|
110
110
|
path_type: SchemaPathType
|
|
111
111
|
schema_kind: str = Field(..., description="Kind of the model in the schema")
|
|
112
|
-
schema_id:
|
|
113
|
-
field_name:
|
|
112
|
+
schema_id: str | None = Field(default=None, description="UUID of the model in the schema")
|
|
113
|
+
field_name: str | None = Field(
|
|
114
114
|
default=None, description="Name of the field (either an attribute or a relationship)"
|
|
115
115
|
)
|
|
116
|
-
property_name:
|
|
116
|
+
property_name: str | None = Field(default=None, description="Name of the property")
|
|
117
117
|
|
|
118
118
|
@property
|
|
119
119
|
def resource_type(self) -> PathResourceType:
|
|
@@ -133,10 +133,10 @@ class SchemaPath(InfrahubPath):
|
|
|
133
133
|
@classmethod
|
|
134
134
|
def init(
|
|
135
135
|
cls,
|
|
136
|
-
schema:
|
|
137
|
-
schema_id:
|
|
138
|
-
field_name:
|
|
139
|
-
property_name:
|
|
136
|
+
schema: NodeSchema | GenericSchema,
|
|
137
|
+
schema_id: str | None = None,
|
|
138
|
+
field_name: str | None = None,
|
|
139
|
+
property_name: str | None = None,
|
|
140
140
|
) -> Self:
|
|
141
141
|
if field_name and not schema.get_field(name=field_name, raise_on_error=False):
|
|
142
142
|
raise ValueError(f"Field : {field_name} is not valid for {schema.kind}")
|