infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +8 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/oauth2.py +13 -19
- infrahub/api/oidc.py +15 -21
- infrahub/api/schema.py +24 -3
- infrahub/artifacts/models.py +2 -1
- infrahub/auth.py +137 -3
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +103 -98
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +30 -3
- infrahub/computed_attribute/tasks.py +20 -8
- infrahub/core/attribute.py +13 -5
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +7 -3
- infrahub/core/branch/tasks.py +70 -8
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/__init__.py +3 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +5 -2
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +6 -3
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +12 -11
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
- infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
- infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +35 -4
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +52 -19
- infrahub/core/node/__init__.py +158 -51
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/create.py +46 -63
- infrahub/core/node/lock_utils.py +70 -44
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/protocols.py +7 -1
- infrahub/core/query/attribute.py +55 -0
- infrahub/core/query/ipam.py +1 -0
- infrahub/core/query/node.py +23 -4
- infrahub/core/query/relationship.py +1 -0
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +1 -1
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -0
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/internal.py +16 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +22 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/schema_branch.py +300 -8
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +7 -0
- infrahub/generators/tasks.py +34 -22
- infrahub/git/base.py +4 -1
- infrahub/git/integrator.py +23 -15
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +22 -5
- infrahub/git/tasks.py +66 -10
- infrahub/git/utils.py +123 -1
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/api/endpoints.py +14 -4
- infrahub/graphql/manager.py +4 -9
- infrahub/graphql/mutations/convert_object_type.py +11 -1
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +54 -35
- infrahub/graphql/mutations/main.py +27 -28
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +4 -0
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +67 -16
- infrahub/message_bus/types.py +2 -1
- infrahub/middleware.py +26 -1
- infrahub/permissions/constants.py +2 -0
- infrahub/proposed_change/tasks.py +35 -17
- infrahub/server.py +21 -4
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/workers/dependencies.py +10 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +80 -0
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +13 -10
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/ctl/cli_commands.py +2 -0
- infrahub_sdk/ctl/generator.py +4 -0
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/schema.py +28 -9
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/protocols.py +14 -0
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +53 -44
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
- infrahub_testcontainers/container.py +115 -3
- infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
- infrahub_testcontainers/docker-compose.test.yml +6 -1
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from typing import TYPE_CHECKING, Any, Sequence
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Sequence, TypeAlias
|
|
4
4
|
|
|
5
5
|
from pydantic import BaseModel, ConfigDict, Field
|
|
6
6
|
from typing_extensions import Self
|
|
@@ -8,17 +8,10 @@ from typing_extensions import Self
|
|
|
8
8
|
from infrahub.core import registry
|
|
9
9
|
from infrahub.core.path import SchemaPath # noqa: TC001
|
|
10
10
|
from infrahub.core.query import Query # noqa: TC001
|
|
11
|
-
from infrahub.core.schema import
|
|
12
|
-
AttributeSchema,
|
|
13
|
-
GenericSchema,
|
|
14
|
-
NodeSchema,
|
|
15
|
-
RelationshipSchema,
|
|
16
|
-
SchemaRoot,
|
|
17
|
-
internal_schema,
|
|
18
|
-
)
|
|
11
|
+
from infrahub.core.schema import AttributeSchema, MainSchemaTypes, RelationshipSchema, SchemaRoot, internal_schema
|
|
19
12
|
from infrahub.core.timestamp import Timestamp
|
|
20
13
|
|
|
21
|
-
from .query import
|
|
14
|
+
from .query import MigrationBaseQuery # noqa: TC001
|
|
22
15
|
|
|
23
16
|
if TYPE_CHECKING:
|
|
24
17
|
from infrahub.core.branch import Branch
|
|
@@ -41,10 +34,12 @@ class MigrationResult(BaseModel):
|
|
|
41
34
|
class SchemaMigration(BaseModel):
|
|
42
35
|
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
43
36
|
name: str = Field(..., description="Name of the migration")
|
|
44
|
-
queries: Sequence[type[
|
|
37
|
+
queries: Sequence[type[MigrationBaseQuery]] = Field(
|
|
38
|
+
..., description="List of queries to execute for this migration"
|
|
39
|
+
)
|
|
45
40
|
|
|
46
|
-
new_node_schema:
|
|
47
|
-
previous_node_schema:
|
|
41
|
+
new_node_schema: MainSchemaTypes | None = None
|
|
42
|
+
previous_node_schema: MainSchemaTypes | None = None
|
|
48
43
|
schema_path: SchemaPath
|
|
49
44
|
|
|
50
45
|
async def execute_pre_queries(
|
|
@@ -66,9 +61,14 @@ class SchemaMigration(BaseModel):
|
|
|
66
61
|
return result
|
|
67
62
|
|
|
68
63
|
async def execute_queries(
|
|
69
|
-
self,
|
|
64
|
+
self,
|
|
65
|
+
db: InfrahubDatabase,
|
|
66
|
+
result: MigrationResult,
|
|
67
|
+
branch: Branch,
|
|
68
|
+
at: Timestamp,
|
|
69
|
+
queries: Sequence[type[MigrationBaseQuery]],
|
|
70
70
|
) -> MigrationResult:
|
|
71
|
-
for migration_query in
|
|
71
|
+
for migration_query in queries:
|
|
72
72
|
try:
|
|
73
73
|
query = await migration_query.init(db=db, branch=branch, at=at, migration=self)
|
|
74
74
|
await query.execute(db=db)
|
|
@@ -79,31 +79,40 @@ class SchemaMigration(BaseModel):
|
|
|
79
79
|
|
|
80
80
|
return result
|
|
81
81
|
|
|
82
|
-
async def execute(
|
|
82
|
+
async def execute(
|
|
83
|
+
self,
|
|
84
|
+
db: InfrahubDatabase,
|
|
85
|
+
branch: Branch,
|
|
86
|
+
at: Timestamp | str | None = None,
|
|
87
|
+
queries: Sequence[type[MigrationBaseQuery]] | None = None,
|
|
88
|
+
) -> MigrationResult:
|
|
83
89
|
async with db.start_transaction() as ts:
|
|
84
90
|
result = MigrationResult()
|
|
85
91
|
at = Timestamp(at)
|
|
86
92
|
|
|
87
93
|
await self.execute_pre_queries(db=ts, result=result, branch=branch, at=at)
|
|
88
|
-
|
|
94
|
+
queries_to_execute = queries or self.queries
|
|
95
|
+
await self.execute_queries(db=ts, result=result, branch=branch, at=at, queries=queries_to_execute)
|
|
89
96
|
await self.execute_post_queries(db=ts, result=result, branch=branch, at=at)
|
|
90
97
|
|
|
91
98
|
return result
|
|
92
99
|
|
|
93
100
|
@property
|
|
94
|
-
def new_schema(self) ->
|
|
101
|
+
def new_schema(self) -> MainSchemaTypes:
|
|
95
102
|
if self.new_node_schema:
|
|
96
103
|
return self.new_node_schema
|
|
97
104
|
raise ValueError("new_node_schema hasn't been initialized")
|
|
98
105
|
|
|
99
106
|
@property
|
|
100
|
-
def previous_schema(self) ->
|
|
107
|
+
def previous_schema(self) -> MainSchemaTypes:
|
|
101
108
|
if self.previous_node_schema:
|
|
102
109
|
return self.previous_node_schema
|
|
103
110
|
raise ValueError("previous_node_schema hasn't been initialized")
|
|
104
111
|
|
|
105
112
|
|
|
106
113
|
class AttributeSchemaMigration(SchemaMigration):
|
|
114
|
+
uuids: list[str] | None = None
|
|
115
|
+
|
|
107
116
|
@property
|
|
108
117
|
def new_attribute_schema(self) -> AttributeSchema:
|
|
109
118
|
if not self.schema_path.field_name:
|
|
@@ -215,3 +224,27 @@ class ArbitraryMigration(BaseModel):
|
|
|
215
224
|
|
|
216
225
|
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
217
226
|
raise NotImplementedError()
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
class MigrationRequiringRebase(BaseModel):
|
|
230
|
+
model_config = ConfigDict(arbitrary_types_allowed=True)
|
|
231
|
+
name: str = Field(..., description="Name of the migration")
|
|
232
|
+
minimum_version: int = Field(..., description="Minimum version of the graph to execute this migration")
|
|
233
|
+
|
|
234
|
+
@classmethod
|
|
235
|
+
def init(cls, **kwargs: dict[str, Any]) -> Self:
|
|
236
|
+
return cls(**kwargs) # type: ignore[arg-type]
|
|
237
|
+
|
|
238
|
+
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult:
|
|
239
|
+
raise NotImplementedError()
|
|
240
|
+
|
|
241
|
+
async def execute_against_branch(self, db: InfrahubDatabase, branch: Branch) -> MigrationResult:
|
|
242
|
+
"""Method that will be run against non-default branches, it assumes that the branches have been rebased."""
|
|
243
|
+
raise NotImplementedError()
|
|
244
|
+
|
|
245
|
+
async def execute(self, db: InfrahubDatabase) -> MigrationResult:
|
|
246
|
+
"""Method that will be run against the default branch."""
|
|
247
|
+
raise NotImplementedError()
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
MigrationTypes: TypeAlias = GraphMigration | InternalSchemaMigration | ArbitraryMigration | MigrationRequiringRebase
|
infrahub/core/node/__init__.py
CHANGED
|
@@ -47,6 +47,7 @@ from ..query.relationship import RelationshipDeleteAllQuery
|
|
|
47
47
|
from ..relationship import RelationshipManager
|
|
48
48
|
from ..utils import update_relationships_to
|
|
49
49
|
from .base import BaseNode, BaseNodeMeta, BaseNodeOptions
|
|
50
|
+
from .node_property_attribute import DisplayLabel, HumanFriendlyIdentifier
|
|
50
51
|
|
|
51
52
|
if TYPE_CHECKING:
|
|
52
53
|
from typing_extensions import Self
|
|
@@ -79,6 +80,29 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
79
80
|
_meta.default_filter = default_filter
|
|
80
81
|
super().__init_subclass_with_meta__(_meta=_meta, **options)
|
|
81
82
|
|
|
83
|
+
def __init__(self, schema: NodeSchema | ProfileSchema | TemplateSchema, branch: Branch, at: Timestamp):
|
|
84
|
+
self._schema: NodeSchema | ProfileSchema | TemplateSchema = schema
|
|
85
|
+
self._branch: Branch = branch
|
|
86
|
+
self._at: Timestamp = at
|
|
87
|
+
self._existing: bool = False
|
|
88
|
+
|
|
89
|
+
self._updated_at: Timestamp | None = None
|
|
90
|
+
self.id: str = None
|
|
91
|
+
self.db_id: str = None
|
|
92
|
+
|
|
93
|
+
self._source: Node | None = None
|
|
94
|
+
self._owner: Node | None = None
|
|
95
|
+
self._is_protected: bool = None
|
|
96
|
+
self._computed_jinja2_attributes: list[str] = []
|
|
97
|
+
|
|
98
|
+
self._display_label: DisplayLabel | None = None
|
|
99
|
+
self._human_friendly_id: HumanFriendlyIdentifier | None = None
|
|
100
|
+
|
|
101
|
+
# Lists of attributes and relationships names
|
|
102
|
+
self._attributes: list[str] = []
|
|
103
|
+
self._relationships: list[str] = []
|
|
104
|
+
self._node_changelog: NodeChangelog | None = None
|
|
105
|
+
|
|
82
106
|
def get_schema(self) -> NonGenericSchemaTypes:
|
|
83
107
|
return self._schema
|
|
84
108
|
|
|
@@ -126,11 +150,14 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
126
150
|
if not self._schema.human_friendly_id:
|
|
127
151
|
return None
|
|
128
152
|
|
|
129
|
-
hfid_values
|
|
153
|
+
hfid_values: list[str] | None = None
|
|
154
|
+
if self._human_friendly_id:
|
|
155
|
+
hfid_values = self._human_friendly_id.get_value(node=self, at=self._at)
|
|
156
|
+
if not hfid_values:
|
|
157
|
+
hfid_values = [await self.get_path_value(db=db, path=item) for item in self._schema.human_friendly_id]
|
|
158
|
+
|
|
130
159
|
hfid = [value for value in hfid_values if value is not None]
|
|
131
|
-
if include_kind
|
|
132
|
-
return [self.get_kind()] + hfid
|
|
133
|
-
return hfid
|
|
160
|
+
return [self.get_kind()] + hfid if include_kind else hfid
|
|
134
161
|
|
|
135
162
|
async def get_hfid_as_string(self, db: InfrahubDatabase, include_kind: bool = False) -> str | None:
|
|
136
163
|
"""Return the Human friendly id of the node in string format separated with a dunder (__) ."""
|
|
@@ -139,6 +166,37 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
139
166
|
return None
|
|
140
167
|
return "__".join(hfid)
|
|
141
168
|
|
|
169
|
+
def has_human_friendly_id(self) -> bool:
|
|
170
|
+
return self._human_friendly_id is not None
|
|
171
|
+
|
|
172
|
+
async def add_human_friendly_id(self, db: InfrahubDatabase) -> None:
|
|
173
|
+
if not self._schema.human_friendly_id or self._human_friendly_id:
|
|
174
|
+
return
|
|
175
|
+
|
|
176
|
+
self._human_friendly_id = HumanFriendlyIdentifier(
|
|
177
|
+
node_schema=self._schema, template=self._schema.human_friendly_id
|
|
178
|
+
)
|
|
179
|
+
await self._human_friendly_id.compute(db=db, node=self)
|
|
180
|
+
|
|
181
|
+
async def get_display_label(self, db: InfrahubDatabase) -> str:
|
|
182
|
+
if self._display_label:
|
|
183
|
+
if isinstance(self._display_label._value, str):
|
|
184
|
+
return self._display_label._value
|
|
185
|
+
if self._display_label._value:
|
|
186
|
+
return self._display_label._value.value
|
|
187
|
+
|
|
188
|
+
return await self.render_display_label(db=db)
|
|
189
|
+
|
|
190
|
+
def has_display_label(self) -> bool:
|
|
191
|
+
return self._display_label is not None
|
|
192
|
+
|
|
193
|
+
async def add_display_label(self, db: InfrahubDatabase) -> None:
|
|
194
|
+
if not self._schema.display_label or self._display_label:
|
|
195
|
+
return
|
|
196
|
+
|
|
197
|
+
self._display_label = DisplayLabel(node_schema=self._schema, template=self._schema.display_label)
|
|
198
|
+
await self._display_label.compute(db=db, node=self)
|
|
199
|
+
|
|
142
200
|
async def get_path_value(self, db: InfrahubDatabase, path: str) -> str:
|
|
143
201
|
schema_path = self._schema.parse_schema_path(
|
|
144
202
|
path=path, schema=db.schema.get_schema_branch(name=self._branch.name)
|
|
@@ -197,30 +255,8 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
197
255
|
return self._branch
|
|
198
256
|
|
|
199
257
|
def __repr__(self) -> str:
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
return f"{self.get_kind()}(ID: {str(self.id)})"
|
|
204
|
-
|
|
205
|
-
def __init__(self, schema: NodeSchema | ProfileSchema | TemplateSchema, branch: Branch, at: Timestamp):
|
|
206
|
-
self._schema: NodeSchema | ProfileSchema | TemplateSchema = schema
|
|
207
|
-
self._branch: Branch = branch
|
|
208
|
-
self._at: Timestamp = at
|
|
209
|
-
self._existing: bool = False
|
|
210
|
-
|
|
211
|
-
self._updated_at: Timestamp | None = None
|
|
212
|
-
self.id: str = None
|
|
213
|
-
self.db_id: str = None
|
|
214
|
-
|
|
215
|
-
self._source: Node | None = None
|
|
216
|
-
self._owner: Node | None = None
|
|
217
|
-
self._is_protected: bool = None
|
|
218
|
-
self._computed_jinja2_attributes: list[str] = []
|
|
219
|
-
|
|
220
|
-
# Lists of attributes and relationships names
|
|
221
|
-
self._attributes: list[str] = []
|
|
222
|
-
self._relationships: list[str] = []
|
|
223
|
-
self._node_changelog: NodeChangelog | None = None
|
|
258
|
+
v = f"{self.get_kind()}(ID: {str(self.id)})"
|
|
259
|
+
return v if self._existing else f"{v}[NEW]"
|
|
224
260
|
|
|
225
261
|
@property
|
|
226
262
|
def node_changelog(self) -> NodeChangelog:
|
|
@@ -278,7 +314,9 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
278
314
|
|
|
279
315
|
return cls(**attrs)
|
|
280
316
|
|
|
281
|
-
async def handle_pool(
|
|
317
|
+
async def handle_pool(
|
|
318
|
+
self, db: InfrahubDatabase, attribute: BaseAttribute, errors: list, allocate_resources: bool = True
|
|
319
|
+
) -> None:
|
|
282
320
|
"""Evaluate if a resource has been requested from a pool and apply the resource
|
|
283
321
|
|
|
284
322
|
This method only works on number pools, currently Integer is the only type that has the from_pool
|
|
@@ -289,7 +327,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
289
327
|
attribute.from_pool = {"id": attribute.schema.parameters.number_pool_id}
|
|
290
328
|
attribute.is_default = False
|
|
291
329
|
|
|
292
|
-
if not attribute.from_pool:
|
|
330
|
+
if not attribute.from_pool or not allocate_resources:
|
|
293
331
|
return
|
|
294
332
|
|
|
295
333
|
try:
|
|
@@ -449,7 +487,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
449
487
|
elif relationship_peers := await relationship.get_peers(db=db):
|
|
450
488
|
fields[relationship_name] = [{"id": peer_id} for peer_id in relationship_peers]
|
|
451
489
|
|
|
452
|
-
async def _process_fields(self, fields: dict, db: InfrahubDatabase) -> None:
|
|
490
|
+
async def _process_fields(self, fields: dict, db: InfrahubDatabase, process_pools: bool = True) -> None:
|
|
453
491
|
errors = []
|
|
454
492
|
|
|
455
493
|
if "_source" in fields.keys():
|
|
@@ -503,7 +541,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
503
541
|
# Generate Attribute and Relationship and assign them
|
|
504
542
|
# -------------------------------------------
|
|
505
543
|
errors.extend(await self._process_fields_relationships(fields=fields, db=db))
|
|
506
|
-
errors.extend(await self._process_fields_attributes(fields=fields, db=db))
|
|
544
|
+
errors.extend(await self._process_fields_attributes(fields=fields, db=db, process_pools=process_pools))
|
|
507
545
|
|
|
508
546
|
if errors:
|
|
509
547
|
raise ValidationError(errors)
|
|
@@ -540,7 +578,9 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
540
578
|
|
|
541
579
|
return errors
|
|
542
580
|
|
|
543
|
-
async def _process_fields_attributes(
|
|
581
|
+
async def _process_fields_attributes(
|
|
582
|
+
self, fields: dict, db: InfrahubDatabase, process_pools: bool
|
|
583
|
+
) -> list[ValidationError]:
|
|
544
584
|
errors: list[ValidationError] = []
|
|
545
585
|
|
|
546
586
|
for attr_schema in self._schema.attributes:
|
|
@@ -565,9 +605,10 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
565
605
|
)
|
|
566
606
|
if not self._existing:
|
|
567
607
|
attribute: BaseAttribute = getattr(self, attr_schema.name)
|
|
568
|
-
await self.handle_pool(db=db, attribute=attribute, errors=errors)
|
|
608
|
+
await self.handle_pool(db=db, attribute=attribute, errors=errors, allocate_resources=process_pools)
|
|
569
609
|
|
|
570
|
-
|
|
610
|
+
if process_pools or attribute.from_pool is None:
|
|
611
|
+
attribute.validate(value=attribute.value, name=attribute.name, schema=attribute.schema)
|
|
571
612
|
except ValidationError as exc:
|
|
572
613
|
errors.append(exc)
|
|
573
614
|
|
|
@@ -695,7 +736,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
695
736
|
self.label.value = " ".join([word.title() for word in self.name.value.split("_")])
|
|
696
737
|
self.label.is_default = False
|
|
697
738
|
|
|
698
|
-
async def new(self, db: InfrahubDatabase, id: str | None = None, **kwargs: Any) -> Self:
|
|
739
|
+
async def new(self, db: InfrahubDatabase, id: str | None = None, process_pools: bool = True, **kwargs: Any) -> Self:
|
|
699
740
|
if id and not is_valid_uuid(id):
|
|
700
741
|
raise ValidationError({"id": f"{id} is not a valid UUID"})
|
|
701
742
|
if id:
|
|
@@ -705,7 +746,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
705
746
|
|
|
706
747
|
self.id = id or str(UUIDT())
|
|
707
748
|
|
|
708
|
-
await self._process_fields(db=db, fields=kwargs)
|
|
749
|
+
await self._process_fields(db=db, fields=kwargs, process_pools=process_pools)
|
|
709
750
|
await self._process_macros(db=db)
|
|
710
751
|
|
|
711
752
|
return self
|
|
@@ -733,12 +774,26 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
733
774
|
)
|
|
734
775
|
self._updated_at = Timestamp(updated_at)
|
|
735
776
|
|
|
777
|
+
if not self._schema.is_schema_node:
|
|
778
|
+
if hfid := kwargs.pop("human_friendly_id", None):
|
|
779
|
+
self._human_friendly_id = HumanFriendlyIdentifier(
|
|
780
|
+
node_schema=self._schema, template=self._schema.human_friendly_id, value=hfid
|
|
781
|
+
)
|
|
782
|
+
if display_label := kwargs.pop("display_label", None):
|
|
783
|
+
self._display_label = DisplayLabel(
|
|
784
|
+
node_schema=self._schema, template=self._schema.display_label, value=display_label
|
|
785
|
+
)
|
|
786
|
+
|
|
736
787
|
await self._process_fields(db=db, fields=kwargs)
|
|
737
788
|
return self
|
|
738
789
|
|
|
739
790
|
async def _create(self, db: InfrahubDatabase, at: Timestamp | None = None) -> NodeChangelog:
|
|
740
791
|
create_at = Timestamp(at)
|
|
741
792
|
|
|
793
|
+
if not self._schema.is_schema_node:
|
|
794
|
+
await self.add_human_friendly_id(db=db)
|
|
795
|
+
await self.add_display_label(db=db)
|
|
796
|
+
|
|
742
797
|
query = await NodeCreateAllQuery.init(db=db, node=self, at=create_at)
|
|
743
798
|
await query.execute(db=db)
|
|
744
799
|
|
|
@@ -750,6 +805,13 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
750
805
|
new_ids = query.get_ids()
|
|
751
806
|
node_changelog = NodeChangelog(node_id=self.get_id(), node_kind=self.get_kind(), display_label="")
|
|
752
807
|
|
|
808
|
+
if self._human_friendly_id:
|
|
809
|
+
node_changelog.create_attribute(
|
|
810
|
+
attribute=self._human_friendly_id.get_node_attribute(node=self, at=create_at)
|
|
811
|
+
)
|
|
812
|
+
if self._display_label:
|
|
813
|
+
node_changelog.create_attribute(attribute=self._display_label.get_node_attribute(node=self, at=create_at))
|
|
814
|
+
|
|
753
815
|
# Go over the list of Attribute and assign the new IDs one by one
|
|
754
816
|
for name in self._attributes:
|
|
755
817
|
attr: BaseAttribute = getattr(self, name)
|
|
@@ -762,12 +824,10 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
762
824
|
relm: RelationshipManager = getattr(self, name)
|
|
763
825
|
for rel in relm._relationships:
|
|
764
826
|
identifier = f"{rel.schema.identifier}::{rel.peer_id}"
|
|
765
|
-
|
|
766
827
|
rel.id, rel.db_id = new_ids[identifier]
|
|
767
|
-
|
|
768
828
|
node_changelog.create_relationship(relationship=rel)
|
|
769
829
|
|
|
770
|
-
node_changelog.display_label = await self.
|
|
830
|
+
node_changelog.display_label = await self.get_display_label(db=db)
|
|
771
831
|
return node_changelog
|
|
772
832
|
|
|
773
833
|
async def _update(
|
|
@@ -803,19 +863,41 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
803
863
|
if parent := await rel.get_parent(db=db):
|
|
804
864
|
node_changelog.add_parent_from_relationship(parent=parent)
|
|
805
865
|
|
|
806
|
-
|
|
866
|
+
# Update the HFID if one of its variables is being updated
|
|
867
|
+
if self._human_friendly_id and (
|
|
868
|
+
(fields and "human_friendly_id" in fields) or self._human_friendly_id.needs_update(fields=fields)
|
|
869
|
+
):
|
|
870
|
+
await self._human_friendly_id.compute(db=db, node=self)
|
|
871
|
+
updated_attribute = await self._human_friendly_id.get_node_attribute(node=self, at=update_at).save(
|
|
872
|
+
at=update_at, db=db
|
|
873
|
+
)
|
|
874
|
+
if updated_attribute:
|
|
875
|
+
node_changelog.add_attribute(attribute=updated_attribute)
|
|
876
|
+
|
|
877
|
+
# Update the display label if one of its variables is being updated
|
|
878
|
+
if self._display_label and (
|
|
879
|
+
(fields and "display_label" in fields) or self._display_label.needs_update(fields=fields)
|
|
880
|
+
):
|
|
881
|
+
await self._display_label.compute(db=db, node=self)
|
|
882
|
+
self._display_label.get_node_attribute(node=self, at=update_at).get_create_data(node_schema=self._schema)
|
|
883
|
+
updated_attribute = await self._display_label.get_node_attribute(node=self, at=update_at).save(
|
|
884
|
+
at=update_at, db=db
|
|
885
|
+
)
|
|
886
|
+
if updated_attribute:
|
|
887
|
+
node_changelog.add_attribute(attribute=updated_attribute)
|
|
888
|
+
|
|
889
|
+
node_changelog.display_label = await self.get_display_label(db=db)
|
|
807
890
|
return node_changelog
|
|
808
891
|
|
|
809
892
|
async def save(self, db: InfrahubDatabase, at: Timestamp | None = None, fields: list[str] | None = None) -> Self:
|
|
810
893
|
"""Create or Update the Node in the database."""
|
|
811
|
-
|
|
812
894
|
save_at = Timestamp(at)
|
|
813
895
|
|
|
814
896
|
if self._existing:
|
|
815
897
|
self._node_changelog = await self._update(at=save_at, db=db, fields=fields)
|
|
816
|
-
|
|
898
|
+
else:
|
|
899
|
+
self._node_changelog = await self._create(at=save_at, db=db)
|
|
817
900
|
|
|
818
|
-
self._node_changelog = await self._create(at=save_at, db=db)
|
|
819
901
|
return self
|
|
820
902
|
|
|
821
903
|
async def delete(self, db: InfrahubDatabase, at: Timestamp | None = None) -> None:
|
|
@@ -824,13 +906,24 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
824
906
|
delete_at = Timestamp(at)
|
|
825
907
|
|
|
826
908
|
node_changelog = NodeChangelog(
|
|
827
|
-
node_id=self.get_id(), node_kind=self.get_kind(), display_label=await self.
|
|
909
|
+
node_id=self.get_id(), node_kind=self.get_kind(), display_label=await self.get_display_label(db=db)
|
|
828
910
|
)
|
|
829
911
|
# Go over the list of Attribute and update them one by one
|
|
830
912
|
for name in self._attributes:
|
|
831
913
|
attr: BaseAttribute = getattr(self, name)
|
|
832
|
-
deleted_attribute
|
|
833
|
-
|
|
914
|
+
if deleted_attribute := await attr.delete(at=delete_at, db=db):
|
|
915
|
+
node_changelog.add_attribute(attribute=deleted_attribute)
|
|
916
|
+
|
|
917
|
+
if self._human_friendly_id:
|
|
918
|
+
if deleted_attribute := await self._human_friendly_id.get_node_attribute(node=self, at=delete_at).delete(
|
|
919
|
+
at=delete_at, db=db
|
|
920
|
+
):
|
|
921
|
+
node_changelog.add_attribute(attribute=deleted_attribute)
|
|
922
|
+
|
|
923
|
+
if self._display_label:
|
|
924
|
+
if deleted_attribute := await self._display_label.get_node_attribute(node=self, at=delete_at).delete(
|
|
925
|
+
at=delete_at, db=db
|
|
926
|
+
):
|
|
834
927
|
node_changelog.add_attribute(attribute=deleted_attribute)
|
|
835
928
|
|
|
836
929
|
branch = self.get_branch_based_on_support_type()
|
|
@@ -898,7 +991,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
898
991
|
continue
|
|
899
992
|
|
|
900
993
|
if field_name == "display_label":
|
|
901
|
-
response[field_name] = await self.
|
|
994
|
+
response[field_name] = await self.get_display_label(db=db)
|
|
902
995
|
continue
|
|
903
996
|
|
|
904
997
|
if field_name == "hfid":
|
|
@@ -958,7 +1051,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
958
1051
|
|
|
959
1052
|
return response
|
|
960
1053
|
|
|
961
|
-
async def from_graphql(self, data: dict, db: InfrahubDatabase) -> bool:
|
|
1054
|
+
async def from_graphql(self, data: dict, db: InfrahubDatabase, process_pools: bool = True) -> bool:
|
|
962
1055
|
"""Update object from a GraphQL payload."""
|
|
963
1056
|
|
|
964
1057
|
changed = False
|
|
@@ -966,7 +1059,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
966
1059
|
for key, value in data.items():
|
|
967
1060
|
if key in self._attributes and isinstance(value, dict):
|
|
968
1061
|
attribute = getattr(self, key)
|
|
969
|
-
changed |= await attribute.from_graphql(data=value, db=db)
|
|
1062
|
+
changed |= await attribute.from_graphql(data=value, db=db, process_pools=process_pools)
|
|
970
1063
|
|
|
971
1064
|
if key in self._relationships:
|
|
972
1065
|
rel: RelationshipManager = getattr(self, key)
|
|
@@ -1001,6 +1094,20 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
1001
1094
|
return repr(self)
|
|
1002
1095
|
return display_label.strip()
|
|
1003
1096
|
|
|
1097
|
+
async def set_human_friendly_id(self, value: list[str] | None) -> None:
|
|
1098
|
+
"""Set the human friendly ID of this node if one is set. `save()` must be called to commit the change in the database."""
|
|
1099
|
+
if self._human_friendly_id is None:
|
|
1100
|
+
return
|
|
1101
|
+
|
|
1102
|
+
self._human_friendly_id.set_value(value=value, manually_assigned=True)
|
|
1103
|
+
|
|
1104
|
+
async def set_display_label(self, value: str | None) -> None:
|
|
1105
|
+
"""Set the display label of this node if one is set. `save()` must be called to commit the change in the database."""
|
|
1106
|
+
if self._display_label is None:
|
|
1107
|
+
return
|
|
1108
|
+
|
|
1109
|
+
self._display_label.set_value(value=value, manually_assigned=True)
|
|
1110
|
+
|
|
1004
1111
|
def _get_parent_relationship_name(self) -> str | None:
|
|
1005
1112
|
"""Return the name of the parent relationship is one is present"""
|
|
1006
1113
|
for relationship in self._schema.relationships:
|
|
@@ -1010,7 +1117,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
1010
1117
|
return None
|
|
1011
1118
|
|
|
1012
1119
|
async def get_object_template(self, db: InfrahubDatabase) -> CoreObjectTemplate | None:
|
|
1013
|
-
object_template: RelationshipManager = getattr(self, OBJECT_TEMPLATE_RELATIONSHIP_NAME, None)
|
|
1120
|
+
object_template: RelationshipManager | None = getattr(self, OBJECT_TEMPLATE_RELATIONSHIP_NAME, None)
|
|
1014
1121
|
return (
|
|
1015
1122
|
await object_template.get_peer(db=db, peer_type=CoreObjectTemplate) if object_template is not None else None
|
|
1016
1123
|
)
|
|
@@ -29,7 +29,9 @@ class NodeAttributeUniquenessConstraint(NodeConstraintInterface):
|
|
|
29
29
|
attr = getattr(node, unique_attr.name)
|
|
30
30
|
if unique_attr.inherited:
|
|
31
31
|
for generic_parent_schema_name in node_schema.inherit_from:
|
|
32
|
-
generic_parent_schema = self.db.schema.get(
|
|
32
|
+
generic_parent_schema = self.db.schema.get(
|
|
33
|
+
generic_parent_schema_name, branch=self.branch, duplicate=False
|
|
34
|
+
)
|
|
33
35
|
parent_attr = generic_parent_schema.get_attribute_or_none(unique_attr.name)
|
|
34
36
|
if parent_attr is None:
|
|
35
37
|
continue
|
infrahub/core/node/create.py
CHANGED
|
@@ -7,7 +7,7 @@ from infrahub.core import registry
|
|
|
7
7
|
from infrahub.core.constants import RelationshipCardinality, RelationshipKind
|
|
8
8
|
from infrahub.core.constraint.node.runner import NodeConstraintRunner
|
|
9
9
|
from infrahub.core.node import Node
|
|
10
|
-
from infrahub.core.node.lock_utils import
|
|
10
|
+
from infrahub.core.node.lock_utils import get_lock_names_on_object_mutation
|
|
11
11
|
from infrahub.core.protocols import CoreObjectTemplate
|
|
12
12
|
from infrahub.core.schema import GenericSchema
|
|
13
13
|
from infrahub.dependencies.registry import get_component_registry
|
|
@@ -62,14 +62,25 @@ async def extract_peer_data(
|
|
|
62
62
|
|
|
63
63
|
for rel in template_peer.get_schema().relationship_names:
|
|
64
64
|
rel_manager: RelationshipManager = getattr(template_peer, rel)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
or rel_manager.schema.name not in obj_peer_schema.relationship_names
|
|
68
|
-
):
|
|
65
|
+
|
|
66
|
+
if rel_manager.schema.name not in obj_peer_schema.relationship_names:
|
|
69
67
|
continue
|
|
70
68
|
|
|
71
|
-
|
|
69
|
+
peers_map = await rel_manager.get_peers(db=db)
|
|
70
|
+
if rel_manager.schema.kind in [RelationshipKind.COMPONENT, RelationshipKind.PARENT] and list(
|
|
71
|
+
peers_map.keys()
|
|
72
|
+
) == [current_template.id]:
|
|
72
73
|
obj_peer_data[rel] = {"id": parent_obj.id}
|
|
74
|
+
continue
|
|
75
|
+
|
|
76
|
+
rel_peer_ids = []
|
|
77
|
+
for peer_id, peer_object in peers_map.items():
|
|
78
|
+
# deeper templates are handled in the next level of recursion
|
|
79
|
+
if peer_object.get_schema().is_template_schema:
|
|
80
|
+
continue
|
|
81
|
+
rel_peer_ids.append({"id": peer_id})
|
|
82
|
+
|
|
83
|
+
obj_peer_data[rel] = rel_peer_ids
|
|
73
84
|
|
|
74
85
|
return obj_peer_data
|
|
75
86
|
|
|
@@ -160,45 +171,6 @@ async def _do_create_node(
|
|
|
160
171
|
return obj
|
|
161
172
|
|
|
162
173
|
|
|
163
|
-
async def _do_create_node_with_lock(
|
|
164
|
-
node_class: type[Node],
|
|
165
|
-
node_constraint_runner: NodeConstraintRunner,
|
|
166
|
-
db: InfrahubDatabase,
|
|
167
|
-
schema: NonGenericSchemaTypes,
|
|
168
|
-
branch: Branch,
|
|
169
|
-
fields_to_validate: list[str],
|
|
170
|
-
data: dict[str, Any],
|
|
171
|
-
at: Timestamp | None = None,
|
|
172
|
-
) -> Node:
|
|
173
|
-
schema_branch = registry.schema.get_schema_branch(name=branch.name)
|
|
174
|
-
lock_names = get_kind_lock_names_on_object_mutation(
|
|
175
|
-
kind=schema.kind, branch=branch, schema_branch=schema_branch, data=dict(data)
|
|
176
|
-
)
|
|
177
|
-
|
|
178
|
-
if lock_names:
|
|
179
|
-
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names):
|
|
180
|
-
return await _do_create_node(
|
|
181
|
-
node_class=node_class,
|
|
182
|
-
node_constraint_runner=node_constraint_runner,
|
|
183
|
-
db=db,
|
|
184
|
-
schema=schema,
|
|
185
|
-
branch=branch,
|
|
186
|
-
fields_to_validate=fields_to_validate,
|
|
187
|
-
data=data,
|
|
188
|
-
at=at,
|
|
189
|
-
)
|
|
190
|
-
return await _do_create_node(
|
|
191
|
-
node_class=node_class,
|
|
192
|
-
node_constraint_runner=node_constraint_runner,
|
|
193
|
-
db=db,
|
|
194
|
-
schema=schema,
|
|
195
|
-
branch=branch,
|
|
196
|
-
fields_to_validate=fields_to_validate,
|
|
197
|
-
data=data,
|
|
198
|
-
at=at,
|
|
199
|
-
)
|
|
200
|
-
|
|
201
|
-
|
|
202
174
|
async def create_node(
|
|
203
175
|
data: dict[str, Any],
|
|
204
176
|
db: InfrahubDatabase,
|
|
@@ -212,37 +184,48 @@ async def create_node(
|
|
|
212
184
|
raise ValueError(f"Node of generic schema `{schema.name=}` can not be instantiated.")
|
|
213
185
|
|
|
214
186
|
component_registry = get_component_registry()
|
|
215
|
-
node_constraint_runner = await component_registry.get_component(
|
|
216
|
-
NodeConstraintRunner, db=db.start_session() if not db.is_transaction else db, branch=branch
|
|
217
|
-
)
|
|
218
187
|
node_class = Node
|
|
219
188
|
if schema.kind in registry.node:
|
|
220
189
|
node_class = registry.node[schema.kind]
|
|
221
190
|
|
|
222
191
|
fields_to_validate = list(data)
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
async with db.start_transaction() as dbt:
|
|
236
|
-
obj = await _do_create_node_with_lock(
|
|
192
|
+
|
|
193
|
+
preview_obj = await node_class.init(db=db, schema=schema, branch=branch)
|
|
194
|
+
await preview_obj.new(db=db, process_pools=False, **data)
|
|
195
|
+
schema_branch = db.schema.get_schema_branch(name=branch.name)
|
|
196
|
+
lock_names = get_lock_names_on_object_mutation(node=preview_obj, schema_branch=schema_branch)
|
|
197
|
+
|
|
198
|
+
obj: Node
|
|
199
|
+
async with InfrahubMultiLock(lock_registry=lock.registry, locks=lock_names, metrics=False):
|
|
200
|
+
if db.is_transaction:
|
|
201
|
+
node_constraint_runner = await component_registry.get_component(NodeConstraintRunner, db=db, branch=branch)
|
|
202
|
+
|
|
203
|
+
obj = await _do_create_node(
|
|
237
204
|
node_class=node_class,
|
|
238
205
|
node_constraint_runner=node_constraint_runner,
|
|
239
|
-
db=
|
|
206
|
+
db=db,
|
|
240
207
|
schema=schema,
|
|
241
208
|
branch=branch,
|
|
242
209
|
fields_to_validate=fields_to_validate,
|
|
243
210
|
data=data,
|
|
244
211
|
at=at,
|
|
245
212
|
)
|
|
213
|
+
else:
|
|
214
|
+
async with db.start_transaction() as dbt:
|
|
215
|
+
node_constraint_runner = await component_registry.get_component(
|
|
216
|
+
NodeConstraintRunner, db=dbt, branch=branch
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
obj = await _do_create_node(
|
|
220
|
+
node_class=node_class,
|
|
221
|
+
node_constraint_runner=node_constraint_runner,
|
|
222
|
+
db=dbt,
|
|
223
|
+
schema=schema,
|
|
224
|
+
branch=branch,
|
|
225
|
+
fields_to_validate=fields_to_validate,
|
|
226
|
+
data=data,
|
|
227
|
+
at=at,
|
|
228
|
+
)
|
|
246
229
|
|
|
247
230
|
if await get_profile_ids(db=db, obj=obj):
|
|
248
231
|
node_profiles_applier = NodeProfilesApplier(db=db, branch=branch)
|