infrahub-server 1.2.1__py3-none-any.whl → 1.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/computed_attribute/tasks.py +71 -67
- infrahub/config.py +3 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/migrations/graph/__init__.py +4 -1
- infrahub/core/migrations/graph/m024_missing_hierarchy_backfill.py +69 -0
- infrahub/core/models.py +6 -0
- infrahub/core/node/__init__.py +4 -4
- infrahub/core/node/constraints/grouped_uniqueness.py +24 -9
- infrahub/core/query/ipam.py +1 -1
- infrahub/core/query/node.py +16 -5
- infrahub/core/schema/schema_branch.py +14 -5
- infrahub/exceptions.py +30 -2
- infrahub/git/base.py +80 -29
- infrahub/git/integrator.py +9 -31
- infrahub/menu/repository.py +6 -6
- infrahub/trigger/tasks.py +19 -18
- infrahub/workflows/utils.py +5 -5
- infrahub_sdk/client.py +6 -6
- infrahub_sdk/ctl/cli_commands.py +32 -37
- infrahub_sdk/ctl/render.py +39 -0
- infrahub_sdk/exceptions.py +6 -2
- infrahub_sdk/generator.py +1 -1
- infrahub_sdk/node.py +41 -12
- infrahub_sdk/protocols_base.py +8 -1
- infrahub_sdk/pytest_plugin/items/jinja2_transform.py +22 -26
- infrahub_sdk/store.py +351 -75
- infrahub_sdk/template/__init__.py +209 -0
- infrahub_sdk/template/exceptions.py +38 -0
- infrahub_sdk/template/filters.py +151 -0
- infrahub_sdk/template/models.py +10 -0
- infrahub_sdk/utils.py +7 -0
- {infrahub_server-1.2.1.dist-info → infrahub_server-1.2.3.dist-info}/METADATA +2 -1
- {infrahub_server-1.2.1.dist-info → infrahub_server-1.2.3.dist-info}/RECORD +39 -36
- infrahub_testcontainers/container.py +2 -0
- infrahub_testcontainers/docker-compose.test.yml +1 -0
- infrahub_testcontainers/haproxy.cfg +3 -3
- infrahub/support/__init__.py +0 -0
- infrahub/support/macro.py +0 -69
- {infrahub_server-1.2.1.dist-info → infrahub_server-1.2.3.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.1.dist-info → infrahub_server-1.2.3.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.1.dist-info → infrahub_server-1.2.3.dist-info}/entry_points.txt +0 -0
|
@@ -6,6 +6,7 @@ from infrahub_sdk.protocols import (
|
|
|
6
6
|
CoreNode, # noqa: TC002
|
|
7
7
|
CoreTransformPython,
|
|
8
8
|
)
|
|
9
|
+
from infrahub_sdk.template import Jinja2Template
|
|
9
10
|
from prefect import flow
|
|
10
11
|
from prefect.client.orchestration import get_client
|
|
11
12
|
from prefect.logging import get_run_logger
|
|
@@ -16,7 +17,6 @@ from infrahub.core.registry import registry
|
|
|
16
17
|
from infrahub.events import BranchDeletedEvent
|
|
17
18
|
from infrahub.git.repository import get_initialized_repo
|
|
18
19
|
from infrahub.services import InfrahubServices # noqa: TC001 needed for prefect flow
|
|
19
|
-
from infrahub.support.macro import MacroDefinition
|
|
20
20
|
from infrahub.trigger.models import TriggerType
|
|
21
21
|
from infrahub.trigger.setup import setup_triggers
|
|
22
22
|
from infrahub.workflows.catalogue import (
|
|
@@ -173,15 +173,15 @@ async def update_computed_attribute_value_jinja2(
|
|
|
173
173
|
|
|
174
174
|
await add_tags(branches=[branch_name], nodes=[obj.id], db_change=True)
|
|
175
175
|
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
for variable in
|
|
176
|
+
jinja_template = Jinja2Template(template=template_value)
|
|
177
|
+
variables = {}
|
|
178
|
+
for variable in jinja_template.get_variables():
|
|
179
179
|
components = variable.split("__")
|
|
180
180
|
if len(components) == 2:
|
|
181
181
|
property_name = components[0]
|
|
182
182
|
property_value = components[1]
|
|
183
183
|
attribute_property = getattr(obj, property_name)
|
|
184
|
-
|
|
184
|
+
variables[variable] = getattr(attribute_property, property_value)
|
|
185
185
|
elif len(components) == 3:
|
|
186
186
|
relationship_name = components[0]
|
|
187
187
|
property_name = components[1]
|
|
@@ -189,11 +189,11 @@ async def update_computed_attribute_value_jinja2(
|
|
|
189
189
|
relationship = getattr(obj, relationship_name)
|
|
190
190
|
try:
|
|
191
191
|
attribute_property = getattr(relationship.peer, property_name)
|
|
192
|
-
|
|
192
|
+
variables[variable] = getattr(attribute_property, property_value)
|
|
193
193
|
except ValueError:
|
|
194
|
-
|
|
194
|
+
variables[variable] = ""
|
|
195
195
|
|
|
196
|
-
value =
|
|
196
|
+
value = await jinja_template.render(variables=variables)
|
|
197
197
|
existing_value = getattr(obj, attribute_name).value
|
|
198
198
|
if value == existing_value:
|
|
199
199
|
log.debug(f"Ignoring to update {obj} with existing value on {attribute_name}={value}")
|
|
@@ -311,35 +311,36 @@ async def trigger_update_jinja2_computed_attributes(
|
|
|
311
311
|
async def computed_attribute_setup_jinja2(
|
|
312
312
|
service: InfrahubServices, context: InfrahubContext, branch_name: str | None = None, event_name: str | None = None
|
|
313
313
|
) -> None:
|
|
314
|
-
|
|
314
|
+
async with service.database.start_session() as db:
|
|
315
|
+
log = get_run_logger()
|
|
315
316
|
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
triggers = await gather_trigger_computed_attribute_jinja2()
|
|
321
|
-
|
|
322
|
-
for trigger in triggers:
|
|
323
|
-
if event_name != BranchDeletedEvent.event_name and trigger.branch == branch_name:
|
|
324
|
-
await service.workflow.submit_workflow(
|
|
325
|
-
workflow=TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
|
|
326
|
-
context=context,
|
|
327
|
-
parameters={
|
|
328
|
-
"branch_name": trigger.branch,
|
|
329
|
-
"computed_attribute_name": trigger.computed_attribute.attribute.name,
|
|
330
|
-
"computed_attribute_kind": trigger.computed_attribute.kind,
|
|
331
|
-
},
|
|
332
|
-
)
|
|
317
|
+
if branch_name:
|
|
318
|
+
await add_tags(branches=[branch_name])
|
|
319
|
+
await wait_for_schema_to_converge(branch_name=branch_name, component=service.component, db=db, log=log)
|
|
333
320
|
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
321
|
+
triggers = await gather_trigger_computed_attribute_jinja2()
|
|
322
|
+
|
|
323
|
+
for trigger in triggers:
|
|
324
|
+
if event_name != BranchDeletedEvent.event_name and trigger.branch == branch_name:
|
|
325
|
+
await service.workflow.submit_workflow(
|
|
326
|
+
workflow=TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
|
|
327
|
+
context=context,
|
|
328
|
+
parameters={
|
|
329
|
+
"branch_name": trigger.branch,
|
|
330
|
+
"computed_attribute_name": trigger.computed_attribute.attribute.name,
|
|
331
|
+
"computed_attribute_kind": trigger.computed_attribute.kind,
|
|
332
|
+
},
|
|
333
|
+
)
|
|
341
334
|
|
|
342
|
-
|
|
335
|
+
# Configure all ComputedAttrJinja2Trigger in Prefect
|
|
336
|
+
async with get_client(sync_client=False) as prefect_client:
|
|
337
|
+
await setup_triggers(
|
|
338
|
+
client=prefect_client,
|
|
339
|
+
triggers=triggers,
|
|
340
|
+
trigger_type=TriggerType.COMPUTED_ATTR_JINJA2,
|
|
341
|
+
) # type: ignore[misc]
|
|
342
|
+
|
|
343
|
+
log.info(f"{len(triggers)} Computed Attribute for Jinja2 automation configuration completed")
|
|
343
344
|
|
|
344
345
|
|
|
345
346
|
@flow(
|
|
@@ -353,46 +354,49 @@ async def computed_attribute_setup_python(
|
|
|
353
354
|
event_name: str | None = None,
|
|
354
355
|
commit: str | None = None, # noqa: ARG001
|
|
355
356
|
) -> None:
|
|
356
|
-
|
|
357
|
+
async with service.database.start_session() as db:
|
|
358
|
+
log = get_run_logger()
|
|
359
|
+
|
|
360
|
+
branch_name = branch_name or registry.default_branch
|
|
357
361
|
|
|
358
|
-
|
|
362
|
+
if branch_name:
|
|
363
|
+
await add_tags(branches=[branch_name])
|
|
364
|
+
await wait_for_schema_to_converge(branch_name=branch_name, component=service.component, db=db, log=log)
|
|
359
365
|
|
|
360
|
-
|
|
361
|
-
await add_tags(branches=[branch_name])
|
|
362
|
-
await wait_for_schema_to_converge(branch_name=branch_name, service=service, log=log)
|
|
366
|
+
triggers_python, triggers_python_query = await gather_trigger_computed_attribute_python(db=db)
|
|
363
367
|
|
|
364
|
-
|
|
368
|
+
for trigger in triggers_python:
|
|
369
|
+
if event_name != BranchDeletedEvent.event_name and trigger.branch == branch_name:
|
|
370
|
+
log.info(
|
|
371
|
+
f"Triggering update for {trigger.computed_attribute.computed_attribute.attribute.name} on {branch_name}"
|
|
372
|
+
)
|
|
373
|
+
await service.workflow.submit_workflow(
|
|
374
|
+
workflow=TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
|
|
375
|
+
context=context,
|
|
376
|
+
parameters={
|
|
377
|
+
"branch_name": branch_name,
|
|
378
|
+
"computed_attribute_name": trigger.computed_attribute.computed_attribute.attribute.name,
|
|
379
|
+
"computed_attribute_kind": trigger.computed_attribute.computed_attribute.kind,
|
|
380
|
+
},
|
|
381
|
+
)
|
|
365
382
|
|
|
366
|
-
|
|
367
|
-
|
|
383
|
+
async with get_client(sync_client=False) as prefect_client:
|
|
384
|
+
await setup_triggers(
|
|
385
|
+
client=prefect_client,
|
|
386
|
+
triggers=triggers_python,
|
|
387
|
+
trigger_type=TriggerType.COMPUTED_ATTR_PYTHON,
|
|
388
|
+
) # type: ignore[misc]
|
|
389
|
+
log.info(f"{len(triggers_python)} Computed Attribute for Python automation configuration completed")
|
|
390
|
+
|
|
391
|
+
await setup_triggers(
|
|
392
|
+
client=prefect_client,
|
|
393
|
+
triggers=triggers_python_query,
|
|
394
|
+
trigger_type=TriggerType.COMPUTED_ATTR_PYTHON_QUERY,
|
|
395
|
+
) # type: ignore[misc]
|
|
368
396
|
log.info(
|
|
369
|
-
f"
|
|
370
|
-
)
|
|
371
|
-
await service.workflow.submit_workflow(
|
|
372
|
-
workflow=TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
|
|
373
|
-
context=context,
|
|
374
|
-
parameters={
|
|
375
|
-
"branch_name": branch_name,
|
|
376
|
-
"computed_attribute_name": trigger.computed_attribute.computed_attribute.attribute.name,
|
|
377
|
-
"computed_attribute_kind": trigger.computed_attribute.computed_attribute.kind,
|
|
378
|
-
},
|
|
397
|
+
f"{len(triggers_python_query)} Computed Attribute for Python Query automation configuration completed"
|
|
379
398
|
)
|
|
380
399
|
|
|
381
|
-
async with get_client(sync_client=False) as prefect_client:
|
|
382
|
-
await setup_triggers(
|
|
383
|
-
client=prefect_client,
|
|
384
|
-
triggers=triggers_python,
|
|
385
|
-
trigger_type=TriggerType.COMPUTED_ATTR_PYTHON,
|
|
386
|
-
) # type: ignore[misc]
|
|
387
|
-
log.info(f"{len(triggers_python)} Computed Attribute for Python automation configuration completed")
|
|
388
|
-
|
|
389
|
-
await setup_triggers(
|
|
390
|
-
client=prefect_client,
|
|
391
|
-
triggers=triggers_python_query,
|
|
392
|
-
trigger_type=TriggerType.COMPUTED_ATTR_PYTHON_QUERY,
|
|
393
|
-
) # type: ignore[misc]
|
|
394
|
-
log.info(f"{len(triggers_python_query)} Computed Attribute for Python Query automation configuration completed")
|
|
395
|
-
|
|
396
400
|
|
|
397
401
|
@flow(
|
|
398
402
|
name="query-computed-attribute-transform-targets",
|
infrahub/config.py
CHANGED
|
@@ -612,6 +612,9 @@ class SecuritySettings(BaseSettings):
|
|
|
612
612
|
oauth2_provider_settings: SecurityOAuth2ProviderSettings = Field(default_factory=SecurityOAuth2ProviderSettings)
|
|
613
613
|
oidc_providers: list[OIDCProvider] = Field(default_factory=list, description="The selected OIDC providers")
|
|
614
614
|
oidc_provider_settings: SecurityOIDCProviderSettings = Field(default_factory=SecurityOIDCProviderSettings)
|
|
615
|
+
restrict_untrusted_jinja2_filters: bool = Field(
|
|
616
|
+
default=True, description="Indicates if untrusted Jinja2 filters should be disallowd for computed attributes"
|
|
617
|
+
)
|
|
615
618
|
_oauth2_settings: dict[str, SecurityOAuth2Settings] = PrivateAttr(default_factory=dict)
|
|
616
619
|
_oidc_settings: dict[str, SecurityOIDCSettings] = PrivateAttr(default_factory=dict)
|
|
617
620
|
sso_user_default_group: str | None = Field(
|
infrahub/core/graph/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
GRAPH_VERSION =
|
|
1
|
+
GRAPH_VERSION = 24
|
|
@@ -24,6 +24,8 @@ from .m019_restore_rels_to_time import Migration019
|
|
|
24
24
|
from .m020_duplicate_edges import Migration020
|
|
25
25
|
from .m021_missing_hierarchy_merge import Migration021
|
|
26
26
|
from .m022_add_generate_template_attr import Migration022
|
|
27
|
+
from .m023_deduplicate_cardinality_one_relationships import Migration023
|
|
28
|
+
from .m024_missing_hierarchy_backfill import Migration024
|
|
27
29
|
|
|
28
30
|
if TYPE_CHECKING:
|
|
29
31
|
from infrahub.core.root import Root
|
|
@@ -53,7 +55,8 @@ MIGRATIONS: list[type[GraphMigration | InternalSchemaMigration | ArbitraryMigrat
|
|
|
53
55
|
Migration020,
|
|
54
56
|
Migration021,
|
|
55
57
|
Migration022,
|
|
56
|
-
|
|
58
|
+
Migration023,
|
|
59
|
+
Migration024,
|
|
57
60
|
]
|
|
58
61
|
|
|
59
62
|
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Sequence
|
|
4
|
+
|
|
5
|
+
from infrahub.core import registry
|
|
6
|
+
from infrahub.core.initialization import initialization
|
|
7
|
+
from infrahub.core.migrations.shared import GraphMigration, MigrationResult
|
|
8
|
+
from infrahub.lock import initialize_lock
|
|
9
|
+
from infrahub.log import get_logger
|
|
10
|
+
|
|
11
|
+
from ...query import Query, QueryType
|
|
12
|
+
|
|
13
|
+
if TYPE_CHECKING:
|
|
14
|
+
from infrahub.database import InfrahubDatabase
|
|
15
|
+
|
|
16
|
+
log = get_logger()
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class BackfillMissingHierarchyQuery(Query):
|
|
20
|
+
name = "backfill_missing_hierarchy"
|
|
21
|
+
type = QueryType.WRITE
|
|
22
|
+
insert_return = False
|
|
23
|
+
|
|
24
|
+
async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
|
|
25
|
+
# load schemas from database into registry
|
|
26
|
+
initialize_lock()
|
|
27
|
+
await initialization(db=db)
|
|
28
|
+
kind_hierarchy_map: dict[str, str] = {}
|
|
29
|
+
schema_branch = await registry.schema.load_schema_from_db(db=db)
|
|
30
|
+
for node_schema_kind in schema_branch.node_names:
|
|
31
|
+
node_schema = schema_branch.get_node(name=node_schema_kind, duplicate=False)
|
|
32
|
+
if node_schema.hierarchy:
|
|
33
|
+
kind_hierarchy_map[node_schema.kind] = node_schema.hierarchy
|
|
34
|
+
|
|
35
|
+
self.params = {"hierarchy_map": kind_hierarchy_map}
|
|
36
|
+
query = """
|
|
37
|
+
MATCH (r:Root)
|
|
38
|
+
WITH r.default_branch AS default_branch
|
|
39
|
+
MATCH (rel:Relationship {name: "parent__child"})-[e:IS_RELATED]-(n:Node)
|
|
40
|
+
WHERE e.hierarchy IS NULL
|
|
41
|
+
WITH DISTINCT rel, n, default_branch
|
|
42
|
+
CALL {
|
|
43
|
+
WITH rel, n, default_branch
|
|
44
|
+
MATCH (rel)-[e:IS_RELATED {branch: default_branch}]-(n)
|
|
45
|
+
RETURN e
|
|
46
|
+
ORDER BY e.from DESC
|
|
47
|
+
LIMIT 1
|
|
48
|
+
}
|
|
49
|
+
WITH rel, n, e
|
|
50
|
+
WHERE e.status = "active" AND e.hierarchy IS NULL
|
|
51
|
+
SET e.hierarchy = $hierarchy_map[n.kind]
|
|
52
|
+
"""
|
|
53
|
+
self.add_to_query(query)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class Migration024(GraphMigration):
|
|
57
|
+
"""
|
|
58
|
+
A bug in diff merge logic caused the hierarchy information on IS_RELATED edges to be lost when merged into
|
|
59
|
+
main. This migration backfills the missing hierarchy data and accounts for the case when the branch that
|
|
60
|
+
created the data has been deleted.
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
name: str = "024_backfill_hierarchy"
|
|
64
|
+
minimum_version: int = 23
|
|
65
|
+
queries: Sequence[type[Query]] = [BackfillMissingHierarchyQuery]
|
|
66
|
+
|
|
67
|
+
async def validate_migration(self, db: InfrahubDatabase) -> MigrationResult: # noqa: ARG002
|
|
68
|
+
result = MigrationResult()
|
|
69
|
+
return result
|
infrahub/core/models.py
CHANGED
|
@@ -19,6 +19,8 @@ if TYPE_CHECKING:
|
|
|
19
19
|
from infrahub.core.schema import MainSchemaTypes
|
|
20
20
|
from infrahub.core.schema.schema_branch import SchemaBranch
|
|
21
21
|
|
|
22
|
+
GENERIC_ATTRIBUTES_TO_IGNORE = ["namespace", "name", "branch"]
|
|
23
|
+
|
|
22
24
|
|
|
23
25
|
class NodeKind(BaseModel):
|
|
24
26
|
namespace: str
|
|
@@ -270,6 +272,10 @@ class SchemaUpdateValidationResult(BaseModel):
|
|
|
270
272
|
field_info = schema.model_fields[node_field_name]
|
|
271
273
|
field_update = str(field_info.json_schema_extra.get("update")) # type: ignore[union-attr]
|
|
272
274
|
|
|
275
|
+
# No need to execute a migration for generic nodes attributes because they are not stored in the database
|
|
276
|
+
if schema.is_generic_schema and node_field_name in GENERIC_ATTRIBUTES_TO_IGNORE:
|
|
277
|
+
return
|
|
278
|
+
|
|
273
279
|
schema_path = SchemaPath( # type: ignore[call-arg]
|
|
274
280
|
schema_kind=schema.kind,
|
|
275
281
|
path_type=SchemaPathType.NODE,
|
infrahub/core/node/__init__.py
CHANGED
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
from enum import Enum
|
|
4
4
|
from typing import TYPE_CHECKING, Any, Sequence, TypeVar, overload
|
|
5
5
|
|
|
6
|
+
from infrahub_sdk.template import Jinja2Template
|
|
6
7
|
from infrahub_sdk.utils import is_valid_uuid
|
|
7
8
|
from infrahub_sdk.uuidt import UUIDT
|
|
8
9
|
|
|
@@ -24,7 +25,6 @@ from infrahub.core.query.node import NodeCheckIDQuery, NodeCreateAllQuery, NodeD
|
|
|
24
25
|
from infrahub.core.schema import AttributeSchema, NodeSchema, ProfileSchema, RelationshipSchema, TemplateSchema
|
|
25
26
|
from infrahub.core.timestamp import Timestamp
|
|
26
27
|
from infrahub.exceptions import InitializationError, NodeNotFoundError, PoolExhaustedError, ValidationError
|
|
27
|
-
from infrahub.support.macro import MacroDefinition
|
|
28
28
|
from infrahub.types import ATTRIBUTE_TYPES
|
|
29
29
|
|
|
30
30
|
from ...graphql.constants import KIND_GRAPHQL_FIELD_NAME
|
|
@@ -458,9 +458,9 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
458
458
|
ValidationError({macro: f"{macro} is missing computational_logic for macro ({attr_schema.kind})"})
|
|
459
459
|
)
|
|
460
460
|
continue
|
|
461
|
-
macro_definition = MacroDefinition(macro=attr_schema.computed_attribute.jinja2_template)
|
|
462
461
|
|
|
463
|
-
|
|
462
|
+
jinja_template = Jinja2Template(template=attr_schema.computed_attribute.jinja2_template)
|
|
463
|
+
for variable in jinja_template.get_variables():
|
|
464
464
|
attribute_path = schema_branch.validate_schema_path(
|
|
465
465
|
node_schema=self._schema, path=variable, allowed_path_types=allowed_path_types
|
|
466
466
|
)
|
|
@@ -487,7 +487,7 @@ class Node(BaseNode, metaclass=BaseNodeMeta):
|
|
|
487
487
|
)
|
|
488
488
|
variables[variable] = attribute
|
|
489
489
|
|
|
490
|
-
content =
|
|
490
|
+
content = await jinja_template.render(variables=variables)
|
|
491
491
|
|
|
492
492
|
generator_method_name = "_generate_attribute_default"
|
|
493
493
|
if hasattr(self, f"generate_{attr_schema.name}"):
|
|
@@ -225,16 +225,31 @@ class NodeGroupedUniquenessConstraint(NodeConstraintInterface):
|
|
|
225
225
|
)
|
|
226
226
|
violations.extend(schema_violations)
|
|
227
227
|
|
|
228
|
-
|
|
228
|
+
hfid_violations = [violation for violation in violations if violation.typ == UniquenessConstraintType.HFID]
|
|
229
|
+
hfid_violation = hfid_violations[0] if len(hfid_violations) > 0 else None
|
|
229
230
|
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
):
|
|
234
|
-
error_msg = f"Violates uniqueness constraint '{'-'.join(violation.fields)}'"
|
|
235
|
-
raise ValidationError(error_msg)
|
|
231
|
+
# If there are both a hfid violation and another one, in case of an upsert, we still want to update the node in case other violations are:
|
|
232
|
+
# - either on subset fields of hfid, which would be necessarily violated too
|
|
233
|
+
# - or on uniqueness constraints with a matching node id being the id of the hfid violation
|
|
236
234
|
|
|
237
235
|
for violation in violations:
|
|
238
236
|
if violation.typ == UniquenessConstraintType.HFID:
|
|
239
|
-
|
|
240
|
-
|
|
237
|
+
continue
|
|
238
|
+
|
|
239
|
+
if hfid_violation:
|
|
240
|
+
if violation.typ == UniquenessConstraintType.SUBSET_OF_HFID:
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
if (
|
|
244
|
+
violation.typ == UniquenessConstraintType.STANDARD
|
|
245
|
+
and len(violation.nodes_ids) == 1
|
|
246
|
+
and next(iter(violation.nodes_ids)) == next(iter(hfid_violation.nodes_ids))
|
|
247
|
+
):
|
|
248
|
+
continue
|
|
249
|
+
|
|
250
|
+
error_msg = f"Violates uniqueness constraint '{'-'.join(violation.fields)}'"
|
|
251
|
+
raise ValidationError(error_msg)
|
|
252
|
+
|
|
253
|
+
if hfid_violation:
|
|
254
|
+
error_msg = f"Violates uniqueness constraint '{'-'.join(hfid_violation.fields)}'"
|
|
255
|
+
raise HFIDViolatedError(error_msg, matching_nodes_ids=hfid_violation.nodes_ids)
|
infrahub/core/query/ipam.py
CHANGED
|
@@ -362,7 +362,7 @@ class IPPrefixReconcileQuery(Query):
|
|
|
362
362
|
# possible prefix: highest possible prefix length for a match
|
|
363
363
|
possible_prefix_map: dict[str, int] = {}
|
|
364
364
|
start_prefixlen = prefixlen if is_address else prefixlen - 1
|
|
365
|
-
for max_prefix_len in range(start_prefixlen,
|
|
365
|
+
for max_prefix_len in range(start_prefixlen, -1, -1):
|
|
366
366
|
tmp_prefix = prefix_bin_host[:max_prefix_len]
|
|
367
367
|
possible_prefix = tmp_prefix.ljust(self.ip_value.max_prefixlen, "0")
|
|
368
368
|
if possible_prefix not in possible_prefix_map:
|
infrahub/core/query/node.py
CHANGED
|
@@ -8,6 +8,7 @@ from enum import Enum
|
|
|
8
8
|
from typing import TYPE_CHECKING, Any, AsyncIterator, Generator
|
|
9
9
|
|
|
10
10
|
from infrahub import config
|
|
11
|
+
from infrahub.core import registry
|
|
11
12
|
from infrahub.core.constants import (
|
|
12
13
|
AttributeDBNodeType,
|
|
13
14
|
RelationshipDirection,
|
|
@@ -713,14 +714,24 @@ class NodeGetKindQuery(Query):
|
|
|
713
714
|
super().__init__(**kwargs)
|
|
714
715
|
|
|
715
716
|
async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002
|
|
717
|
+
self.params["ids"] = self.ids
|
|
716
718
|
query = """
|
|
717
|
-
|
|
718
|
-
|
|
719
|
+
MATCH (n:Node)-[r:IS_PART_OF {status: "active"}]->(:Root)
|
|
720
|
+
WHERE toString(n.uuid) IN $ids
|
|
721
|
+
"""
|
|
722
|
+
# only add the branch filter logic if a branch is included in the query parameters
|
|
723
|
+
if branch := getattr(self, "branch", None):
|
|
724
|
+
branch = await registry.get_branch(db=db, branch=branch)
|
|
725
|
+
branch_filter, branch_params = branch.get_query_filter_path(at=self.at)
|
|
726
|
+
self.params.update(branch_params)
|
|
727
|
+
query += f"AND {branch_filter}"
|
|
728
|
+
query += """
|
|
729
|
+
WITH n.uuid AS node_id, n.kind AS node_kind
|
|
730
|
+
ORDER BY r.from DESC
|
|
731
|
+
WITH node_id, head(collect(node_kind)) AS node_kind
|
|
719
732
|
"""
|
|
720
733
|
self.add_to_query(query)
|
|
721
|
-
self.
|
|
722
|
-
|
|
723
|
-
self.return_labels = ["n.uuid AS node_id", "n.kind AS node_kind"]
|
|
734
|
+
self.return_labels = ["node_id", "node_kind"]
|
|
724
735
|
|
|
725
736
|
async def get_node_kind_map(self) -> dict[str, str]:
|
|
726
737
|
node_kind_map: dict[str, str] = {}
|
|
@@ -6,6 +6,8 @@ from collections import defaultdict
|
|
|
6
6
|
from itertools import chain, combinations
|
|
7
7
|
from typing import Any
|
|
8
8
|
|
|
9
|
+
from infrahub_sdk.template import Jinja2Template
|
|
10
|
+
from infrahub_sdk.template.exceptions import JinjaTemplateError, JinjaTemplateOperationViolationError
|
|
9
11
|
from infrahub_sdk.topological_sort import DependencyCycleExistsError, topological_sort
|
|
10
12
|
from infrahub_sdk.utils import compare_lists, deep_merge_dict, duplicates, intersection
|
|
11
13
|
from typing_extensions import Self
|
|
@@ -51,7 +53,6 @@ from infrahub.core.schema.definitions.core import core_profile_schema_definition
|
|
|
51
53
|
from infrahub.core.validators import CONSTRAINT_VALIDATOR_MAP
|
|
52
54
|
from infrahub.exceptions import SchemaNotFoundError, ValidationError
|
|
53
55
|
from infrahub.log import get_logger
|
|
54
|
-
from infrahub.support.macro import MacroDefinition
|
|
55
56
|
from infrahub.types import ATTRIBUTE_TYPES
|
|
56
57
|
from infrahub.utils import format_label
|
|
57
58
|
from infrahub.visuals import select_color
|
|
@@ -1037,14 +1038,22 @@ class SchemaBranch:
|
|
|
1037
1038
|
| SchemaElementPathType.REL_ONE_MANDATORY_ATTR_WITH_PROP
|
|
1038
1039
|
| SchemaElementPathType.REL_ONE_ATTR_WITH_PROP
|
|
1039
1040
|
)
|
|
1041
|
+
|
|
1042
|
+
jinja_template = Jinja2Template(template=attribute.computed_attribute.jinja2_template)
|
|
1040
1043
|
try:
|
|
1041
|
-
|
|
1042
|
-
|
|
1044
|
+
variables = jinja_template.get_variables()
|
|
1045
|
+
jinja_template.validate(restricted=config.SETTINGS.security.restrict_untrusted_jinja2_filters)
|
|
1046
|
+
except JinjaTemplateOperationViolationError as exc:
|
|
1047
|
+
raise ValueError(
|
|
1048
|
+
f"{node.kind}: Attribute {attribute.name!r} is assigned by a jinja2 template, but has an invalid template: {exc.message}"
|
|
1049
|
+
) from exc
|
|
1050
|
+
|
|
1051
|
+
except JinjaTemplateError as exc:
|
|
1043
1052
|
raise ValueError(
|
|
1044
|
-
f"{node.kind}: Attribute {attribute.name!r} is assigned by a jinja2 template, but has an invalid template"
|
|
1053
|
+
f"{node.kind}: Attribute {attribute.name!r} is assigned by a jinja2 template, but has an invalid template: : {exc.message}"
|
|
1045
1054
|
) from exc
|
|
1046
1055
|
|
|
1047
|
-
for variable in
|
|
1056
|
+
for variable in variables:
|
|
1048
1057
|
try:
|
|
1049
1058
|
schema_path = self.validate_schema_path(
|
|
1050
1059
|
node_schema=node, path=variable, allowed_path_types=allowed_path_types
|
infrahub/exceptions.py
CHANGED
|
@@ -76,6 +76,32 @@ class RepositoryError(Error):
|
|
|
76
76
|
super().__init__(self.message)
|
|
77
77
|
|
|
78
78
|
|
|
79
|
+
class RepositoryConnectionError(RepositoryError):
|
|
80
|
+
def __init__(self, identifier: str, message: str | None = None) -> None:
|
|
81
|
+
super().__init__(
|
|
82
|
+
identifier=identifier,
|
|
83
|
+
message=message
|
|
84
|
+
or f"Unable to clone the repository {identifier}, please check the address and the credential",
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class RepositoryCredentialsError(RepositoryError):
|
|
89
|
+
def __init__(self, identifier: str, message: str | None = None) -> None:
|
|
90
|
+
super().__init__(
|
|
91
|
+
identifier=identifier,
|
|
92
|
+
message=message or f"Authentication failed for {identifier}, please validate the credentials.",
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class RepositoryInvalidBranchError(RepositoryError):
|
|
97
|
+
def __init__(self, identifier: str, branch_name: str, location: str, message: str | None = None) -> None:
|
|
98
|
+
super().__init__(
|
|
99
|
+
identifier=identifier,
|
|
100
|
+
message=message
|
|
101
|
+
or f"The branch {branch_name} isn't a valid branch for the repository {identifier} at {location}.",
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
79
105
|
class RepositoryInvalidFileSystemError(RepositoryError):
|
|
80
106
|
def __init__(
|
|
81
107
|
self,
|
|
@@ -83,9 +109,11 @@ class RepositoryInvalidFileSystemError(RepositoryError):
|
|
|
83
109
|
directory: Path,
|
|
84
110
|
message: str | None = None,
|
|
85
111
|
) -> None:
|
|
86
|
-
super().__init__(
|
|
112
|
+
super().__init__(
|
|
113
|
+
identifier=identifier,
|
|
114
|
+
message=message or f"Invalid file system for {identifier}, Local directory {directory} missing.",
|
|
115
|
+
)
|
|
87
116
|
self.directory = directory
|
|
88
|
-
self.message = message or f"Invalid file system for {identifier}, Local directory {directory} missing."
|
|
89
117
|
|
|
90
118
|
|
|
91
119
|
class CommitNotFoundError(Error):
|