infrahub-server 1.7.0rc0__py3-none-any.whl → 1.7.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/gather.py +2 -2
- infrahub/api/query.py +3 -2
- infrahub/api/schema.py +5 -0
- infrahub/api/transformation.py +3 -3
- infrahub/cli/db.py +6 -2
- infrahub/computed_attribute/gather.py +2 -0
- infrahub/config.py +2 -2
- infrahub/core/attribute.py +21 -2
- infrahub/core/branch/models.py +11 -117
- infrahub/core/branch/tasks.py +7 -3
- infrahub/core/diff/merger/merger.py +5 -1
- infrahub/core/diff/model/path.py +43 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/graph/index.py +2 -0
- infrahub/core/initialization.py +2 -1
- infrahub/core/ipam/resource_allocator.py +229 -0
- infrahub/core/migrations/graph/__init__.py +10 -0
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +3 -2
- infrahub/core/migrations/graph/m015_diff_format_update.py +3 -2
- infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +3 -2
- infrahub/core/migrations/graph/m017_add_core_profile.py +6 -4
- infrahub/core/migrations/graph/m018_uniqueness_nulls.py +3 -4
- infrahub/core/migrations/graph/m020_duplicate_edges.py +3 -3
- infrahub/core/migrations/graph/m025_uniqueness_nulls.py +3 -4
- infrahub/core/migrations/graph/m026_0000_prefix_fix.py +4 -5
- infrahub/core/migrations/graph/m028_delete_diffs.py +3 -2
- infrahub/core/migrations/graph/m029_duplicates_cleanup.py +3 -2
- infrahub/core/migrations/graph/m031_check_number_attributes.py +4 -3
- infrahub/core/migrations/graph/m032_cleanup_orphaned_branch_relationships.py +3 -2
- infrahub/core/migrations/graph/m034_find_orphaned_schema_fields.py +3 -2
- infrahub/core/migrations/graph/m035_orphan_relationships.py +3 -3
- infrahub/core/migrations/graph/m036_drop_attr_value_index.py +3 -2
- infrahub/core/migrations/graph/m037_index_attr_vals.py +3 -2
- infrahub/core/migrations/graph/m038_redo_0000_prefix_fix.py +4 -5
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +3 -2
- infrahub/core/migrations/graph/m041_deleted_dup_edges.py +3 -2
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +5 -4
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +12 -5
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +15 -4
- infrahub/core/migrations/graph/m045_backfill_hfid_display_label_in_db_profile_template.py +10 -4
- infrahub/core/migrations/graph/m046_fill_agnostic_hfid_display_labels.py +6 -5
- infrahub/core/migrations/graph/m047_backfill_or_null_display_label.py +19 -5
- infrahub/core/migrations/graph/m048_undelete_rel_props.py +6 -4
- infrahub/core/migrations/graph/m049_remove_is_visible_relationship.py +3 -3
- infrahub/core/migrations/graph/m050_backfill_vertex_metadata.py +3 -3
- infrahub/core/migrations/graph/m051_subtract_branched_from_microsecond.py +39 -0
- infrahub/core/migrations/graph/m052_fix_global_branch_level.py +51 -0
- infrahub/core/migrations/graph/m053_fix_branch_level_zero.py +61 -0
- infrahub/core/migrations/graph/m054_cleanup_orphaned_nodes.py +87 -0
- infrahub/core/migrations/graph/m055_remove_webhook_validate_certificates_default.py +86 -0
- infrahub/core/migrations/runner.py +6 -3
- infrahub/core/migrations/schema/attribute_kind_update.py +8 -11
- infrahub/core/migrations/schema/attribute_supports_profile.py +3 -8
- infrahub/core/migrations/schema/models.py +8 -0
- infrahub/core/migrations/schema/node_attribute_add.py +24 -29
- infrahub/core/migrations/schema/tasks.py +7 -1
- infrahub/core/migrations/shared.py +37 -30
- infrahub/core/node/__init__.py +2 -1
- infrahub/core/node/lock_utils.py +23 -2
- infrahub/core/node/resource_manager/ip_address_pool.py +5 -11
- infrahub/core/node/resource_manager/ip_prefix_pool.py +5 -21
- infrahub/core/node/resource_manager/number_pool.py +109 -39
- infrahub/core/query/__init__.py +7 -1
- infrahub/core/query/branch.py +18 -2
- infrahub/core/query/ipam.py +629 -40
- infrahub/core/query/node.py +128 -0
- infrahub/core/query/resource_manager.py +114 -1
- infrahub/core/relationship/model.py +9 -3
- infrahub/core/schema/attribute_parameters.py +28 -1
- infrahub/core/schema/attribute_schema.py +9 -2
- infrahub/core/schema/definitions/core/webhook.py +0 -1
- infrahub/core/schema/definitions/internal.py +7 -4
- infrahub/core/schema/manager.py +50 -38
- infrahub/core/validators/attribute/kind.py +5 -2
- infrahub/core/validators/determiner.py +4 -0
- infrahub/graphql/analyzer.py +3 -1
- infrahub/graphql/app.py +7 -10
- infrahub/graphql/execution.py +95 -0
- infrahub/graphql/manager.py +8 -2
- infrahub/graphql/mutations/proposed_change.py +15 -0
- infrahub/graphql/parser.py +10 -7
- infrahub/graphql/queries/ipam.py +20 -25
- infrahub/graphql/queries/search.py +29 -9
- infrahub/lock.py +7 -0
- infrahub/proposed_change/tasks.py +2 -0
- infrahub/services/adapters/cache/redis.py +7 -0
- infrahub/services/adapters/http/httpx.py +27 -0
- infrahub/trigger/catalogue.py +2 -0
- infrahub/trigger/models.py +73 -4
- infrahub/trigger/setup.py +1 -1
- infrahub/trigger/system.py +36 -0
- infrahub/webhook/models.py +4 -2
- infrahub/webhook/tasks.py +2 -2
- infrahub/workflows/initialization.py +2 -2
- infrahub_sdk/analyzer.py +2 -2
- infrahub_sdk/branch.py +12 -39
- infrahub_sdk/checks.py +4 -4
- infrahub_sdk/client.py +36 -0
- infrahub_sdk/ctl/cli_commands.py +2 -1
- infrahub_sdk/ctl/graphql.py +15 -4
- infrahub_sdk/ctl/utils.py +2 -2
- infrahub_sdk/enums.py +6 -0
- infrahub_sdk/graphql/renderers.py +21 -0
- infrahub_sdk/graphql/utils.py +85 -0
- infrahub_sdk/node/attribute.py +12 -2
- infrahub_sdk/node/constants.py +11 -0
- infrahub_sdk/node/metadata.py +69 -0
- infrahub_sdk/node/node.py +65 -14
- infrahub_sdk/node/property.py +3 -0
- infrahub_sdk/node/related_node.py +24 -1
- infrahub_sdk/node/relationship.py +10 -1
- infrahub_sdk/operation.py +2 -2
- infrahub_sdk/schema/repository.py +1 -2
- infrahub_sdk/transforms.py +2 -2
- infrahub_sdk/types.py +18 -2
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.2.dist-info}/METADATA +8 -8
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.2.dist-info}/RECORD +123 -114
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.2.dist-info}/entry_points.txt +0 -1
- infrahub_testcontainers/docker-compose-cluster.test.yml +16 -10
- infrahub_testcontainers/docker-compose.test.yml +11 -10
- infrahub_testcontainers/performance_test.py +1 -1
- infrahub/pools/address.py +0 -16
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.7.0rc0.dist-info → infrahub_server-1.7.2.dist-info}/licenses/LICENSE.txt +0 -0
infrahub/actions/gather.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
from graphql import graphql
|
|
4
3
|
from infrahub_sdk.graphql import Query
|
|
5
4
|
from prefect import task
|
|
6
5
|
from prefect.cache_policies import NONE
|
|
@@ -8,6 +7,7 @@ from prefect.cache_policies import NONE
|
|
|
8
7
|
from infrahub.core.constants import InfrahubKind
|
|
9
8
|
from infrahub.core.registry import registry
|
|
10
9
|
from infrahub.database import InfrahubDatabase # noqa: TC001 needed for prefect flow
|
|
10
|
+
from infrahub.graphql.execution import execute_graphql_query
|
|
11
11
|
from infrahub.graphql.initialization import prepare_graphql_params
|
|
12
12
|
|
|
13
13
|
from .models import ActionTriggerRuleTriggerDefinition
|
|
@@ -95,7 +95,7 @@ async def gather_trigger_action_rules(db: InfrahubDatabase) -> list[ActionTrigge
|
|
|
95
95
|
db=db,
|
|
96
96
|
branch=registry.default_branch,
|
|
97
97
|
)
|
|
98
|
-
response = await
|
|
98
|
+
response = await execute_graphql_query(
|
|
99
99
|
schema=gql_params.schema,
|
|
100
100
|
source=trigger_query.render(),
|
|
101
101
|
context_value=gql_params.context,
|
infrahub/api/query.py
CHANGED
|
@@ -3,7 +3,6 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
5
|
from fastapi import APIRouter, Body, Depends, Path, Query, Request
|
|
6
|
-
from graphql import graphql
|
|
7
6
|
from pydantic import BaseModel, Field
|
|
8
7
|
|
|
9
8
|
from infrahub.api.dependencies import BranchParams, get_branch_params, get_current_user, get_db
|
|
@@ -14,6 +13,7 @@ from infrahub.core.protocols import CoreGraphQLQuery
|
|
|
14
13
|
from infrahub.database import InfrahubDatabase # noqa: TC001
|
|
15
14
|
from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer
|
|
16
15
|
from infrahub.graphql.api.dependencies import build_graphql_query_permission_checker
|
|
16
|
+
from infrahub.graphql.execution import cached_parse, execute_graphql_query
|
|
17
17
|
from infrahub.graphql.initialization import prepare_graphql_params
|
|
18
18
|
from infrahub.graphql.metrics import (
|
|
19
19
|
GRAPHQL_DURATION_METRICS,
|
|
@@ -75,6 +75,7 @@ async def execute_query(
|
|
|
75
75
|
schema=gql_params.schema,
|
|
76
76
|
schema_branch=schema_branch,
|
|
77
77
|
branch=branch_params.branch,
|
|
78
|
+
document=cached_parse(gql_query.query.value),
|
|
78
79
|
)
|
|
79
80
|
await permission_checker.check(
|
|
80
81
|
db=db,
|
|
@@ -93,7 +94,7 @@ async def execute_query(
|
|
|
93
94
|
}
|
|
94
95
|
|
|
95
96
|
with GRAPHQL_DURATION_METRICS.labels(**labels).time():
|
|
96
|
-
result = await
|
|
97
|
+
result = await execute_graphql_query(
|
|
97
98
|
schema=gql_params.schema,
|
|
98
99
|
source=gql_query.query.value,
|
|
99
100
|
context_value=gql_params.context,
|
infrahub/api/schema.py
CHANGED
|
@@ -36,6 +36,7 @@ from infrahub.core.schema import (
|
|
|
36
36
|
TemplateSchema,
|
|
37
37
|
)
|
|
38
38
|
from infrahub.core.schema.constants import SchemaNamespace # noqa: TC001
|
|
39
|
+
from infrahub.core.timestamp import Timestamp
|
|
39
40
|
from infrahub.core.validators.models.validate_migration import (
|
|
40
41
|
SchemaValidateMigrationData,
|
|
41
42
|
SchemaValidatorPathResponseData,
|
|
@@ -353,6 +354,8 @@ async def load_schema(
|
|
|
353
354
|
if error_messages:
|
|
354
355
|
raise SchemaNotValidError(",\n".join(error_messages))
|
|
355
356
|
|
|
357
|
+
schema_load_at = Timestamp()
|
|
358
|
+
|
|
356
359
|
# ----------------------------------------------------------
|
|
357
360
|
# Update the schema
|
|
358
361
|
# ----------------------------------------------------------
|
|
@@ -367,6 +370,7 @@ async def load_schema(
|
|
|
367
370
|
limit=result.diff.all,
|
|
368
371
|
update_db=True,
|
|
369
372
|
user_id=account_session.account_id,
|
|
373
|
+
at=schema_load_at,
|
|
370
374
|
)
|
|
371
375
|
branch.update_schema_hash()
|
|
372
376
|
log.info("Schema has been updated", branch=branch.name, hash=branch.active_schema_hash.main)
|
|
@@ -389,6 +393,7 @@ async def load_schema(
|
|
|
389
393
|
previous_schema=origin_schema,
|
|
390
394
|
migrations=result.migrations,
|
|
391
395
|
user_id=account_session.account_id,
|
|
396
|
+
at=schema_load_at,
|
|
392
397
|
)
|
|
393
398
|
migration_error_msgs = await service.workflow.execute_workflow(
|
|
394
399
|
workflow=SCHEMA_APPLY_MIGRATION,
|
infrahub/api/transformation.py
CHANGED
|
@@ -3,7 +3,6 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
5
|
from fastapi import APIRouter, Depends, Path, Request
|
|
6
|
-
from graphql import graphql
|
|
7
6
|
from starlette.responses import JSONResponse, PlainTextResponse
|
|
8
7
|
|
|
9
8
|
from infrahub.api.dependencies import (
|
|
@@ -22,6 +21,7 @@ from infrahub.core.protocols import (
|
|
|
22
21
|
)
|
|
23
22
|
from infrahub.database import InfrahubDatabase # noqa: TC001
|
|
24
23
|
from infrahub.exceptions import TransformError
|
|
24
|
+
from infrahub.graphql.execution import execute_graphql_query
|
|
25
25
|
from infrahub.graphql.initialization import prepare_graphql_params
|
|
26
26
|
from infrahub.graphql.utils import extract_data
|
|
27
27
|
from infrahub.transformations.models import TransformJinjaTemplateData, TransformPythonData
|
|
@@ -69,7 +69,7 @@ async def transform_python(
|
|
|
69
69
|
db=dbs, branch=branch_params.branch, at=branch_params.at, service=request.app.state.service
|
|
70
70
|
)
|
|
71
71
|
|
|
72
|
-
result = await
|
|
72
|
+
result = await execute_graphql_query(
|
|
73
73
|
schema=gql_params.schema,
|
|
74
74
|
source=query.query.value,
|
|
75
75
|
context_value=gql_params.context,
|
|
@@ -134,7 +134,7 @@ async def transform_jinja2(
|
|
|
134
134
|
db=dbs, branch=branch_params.branch, at=branch_params.at, service=request.app.state.service
|
|
135
135
|
)
|
|
136
136
|
|
|
137
|
-
result = await
|
|
137
|
+
result = await execute_graphql_query(
|
|
138
138
|
schema=gql_params.schema,
|
|
139
139
|
source=query.query.value,
|
|
140
140
|
context_value=gql_params.context,
|
infrahub/cli/db.py
CHANGED
|
@@ -40,10 +40,11 @@ from infrahub.core.migrations.exceptions import MigrationFailureError
|
|
|
40
40
|
from infrahub.core.migrations.graph import get_graph_migrations, get_migration_by_number
|
|
41
41
|
from infrahub.core.migrations.schema.models import SchemaApplyMigrationData
|
|
42
42
|
from infrahub.core.migrations.schema.tasks import schema_apply_migrations
|
|
43
|
-
from infrahub.core.migrations.shared import get_migration_console
|
|
43
|
+
from infrahub.core.migrations.shared import MigrationInput, get_migration_console
|
|
44
44
|
from infrahub.core.schema import SchemaRoot, core_models, internal_schema
|
|
45
45
|
from infrahub.core.schema.definitions.deprecated import deprecated_models
|
|
46
46
|
from infrahub.core.schema.manager import SchemaManager
|
|
47
|
+
from infrahub.core.timestamp import Timestamp
|
|
47
48
|
from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
|
|
48
49
|
from infrahub.core.validators.tasks import schema_validate_migrations
|
|
49
50
|
from infrahub.database import DatabaseType
|
|
@@ -357,7 +358,7 @@ async def migrate_database(
|
|
|
357
358
|
root_node = await get_root_node(db=db)
|
|
358
359
|
|
|
359
360
|
for migration in migrations:
|
|
360
|
-
execution_result = await migration.execute(db=db)
|
|
361
|
+
execution_result = await migration.execute(migration_input=MigrationInput(db=db))
|
|
361
362
|
validation_result = None
|
|
362
363
|
|
|
363
364
|
if execution_result.success:
|
|
@@ -502,6 +503,7 @@ async def update_core_schema(db: InfrahubDatabase, initialize: bool = True, debu
|
|
|
502
503
|
schema_default_branch.process()
|
|
503
504
|
registry.schema.set_schema_branch(name=default_branch.name, schema=schema_default_branch)
|
|
504
505
|
|
|
506
|
+
update_at = Timestamp()
|
|
505
507
|
async with db.start_transaction() as dbt:
|
|
506
508
|
await registry.schema.update_schema_branch(
|
|
507
509
|
schema=candidate_schema,
|
|
@@ -510,6 +512,7 @@ async def update_core_schema(db: InfrahubDatabase, initialize: bool = True, debu
|
|
|
510
512
|
diff=result.diff,
|
|
511
513
|
limit=result.diff.all,
|
|
512
514
|
update_db=True,
|
|
515
|
+
at=update_at,
|
|
513
516
|
)
|
|
514
517
|
default_branch.update_schema_hash()
|
|
515
518
|
get_migration_console().log(
|
|
@@ -527,6 +530,7 @@ async def update_core_schema(db: InfrahubDatabase, initialize: bool = True, debu
|
|
|
527
530
|
new_schema=candidate_schema,
|
|
528
531
|
previous_schema=origin_schema,
|
|
529
532
|
migrations=result.migrations,
|
|
533
|
+
at=update_at,
|
|
530
534
|
)
|
|
531
535
|
migration_error_msgs = await schema_apply_migrations(message=apply_migration_data)
|
|
532
536
|
|
|
@@ -14,6 +14,7 @@ from infrahub.core.registry import registry
|
|
|
14
14
|
from infrahub.database import InfrahubDatabase # noqa: TC001 needed for prefect flow
|
|
15
15
|
from infrahub.git.utils import get_repositories_commit_per_branch
|
|
16
16
|
from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer
|
|
17
|
+
from infrahub.graphql.execution import cached_parse
|
|
17
18
|
from infrahub.graphql.initialization import prepare_graphql_params
|
|
18
19
|
|
|
19
20
|
from .models import (
|
|
@@ -74,6 +75,7 @@ async def gather_python_transform_attributes(
|
|
|
74
75
|
branch=branch,
|
|
75
76
|
schema_branch=schema_branch,
|
|
76
77
|
schema=graphql_params.schema,
|
|
78
|
+
document=cached_parse(query.query.value),
|
|
77
79
|
)
|
|
78
80
|
for attribute in transform_attributes[transform.name.value]:
|
|
79
81
|
python_transform_computed_attribute = PythonTransformComputedAttribute(
|
infrahub/config.py
CHANGED
|
@@ -532,8 +532,8 @@ class HTTPSettings(BaseSettings):
|
|
|
532
532
|
|
|
533
533
|
return self
|
|
534
534
|
|
|
535
|
-
def get_tls_context(self) -> ssl.SSLContext:
|
|
536
|
-
if self.tls_insecure:
|
|
535
|
+
def get_tls_context(self, force_verify: bool = False) -> ssl.SSLContext:
|
|
536
|
+
if self.tls_insecure and not force_verify:
|
|
537
537
|
return ssl._create_unverified_context()
|
|
538
538
|
|
|
539
539
|
if not self.tls_ca_bundle:
|
infrahub/core/attribute.py
CHANGED
|
@@ -366,6 +366,24 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin, MetadataInterface):
|
|
|
366
366
|
|
|
367
367
|
return await self._update(db=db, user_id=user_id, at=save_at)
|
|
368
368
|
|
|
369
|
+
def get_branch_for_delete(self) -> Branch:
|
|
370
|
+
"""Get the appropriate branch for explicit attribute delete operations.
|
|
371
|
+
|
|
372
|
+
For branch-agnostic attributes on branch-aware nodes, use the current branch
|
|
373
|
+
to create branch-scoped deletion edges rather than global deletion.
|
|
374
|
+
|
|
375
|
+
Returns:
|
|
376
|
+
Branch: The branch to use for the delete operation
|
|
377
|
+
"""
|
|
378
|
+
if (
|
|
379
|
+
self.schema.branch == BranchSupportType.AGNOSTIC
|
|
380
|
+
and self.node is not None
|
|
381
|
+
and self.node._schema.branch == BranchSupportType.AWARE
|
|
382
|
+
):
|
|
383
|
+
return self.branch
|
|
384
|
+
|
|
385
|
+
return self.get_branch_based_on_support_type()
|
|
386
|
+
|
|
369
387
|
async def delete(
|
|
370
388
|
self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID, at: Timestamp | None = None
|
|
371
389
|
) -> AttributeChangelog | None:
|
|
@@ -373,7 +391,7 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin, MetadataInterface):
|
|
|
373
391
|
return None
|
|
374
392
|
|
|
375
393
|
delete_at = Timestamp(at)
|
|
376
|
-
branch = self.
|
|
394
|
+
branch = self.get_branch_for_delete()
|
|
377
395
|
|
|
378
396
|
query = await AttributeDeleteQuery.init(db=db, branch=branch, attr=self, user_id=user_id, at=delete_at)
|
|
379
397
|
await query.execute(db=db)
|
|
@@ -642,9 +660,10 @@ class BaseAttribute(FlagPropertyMixin, NodePropertyMixin, MetadataInterface):
|
|
|
642
660
|
hierarchy_level = branch.hierarchy_level
|
|
643
661
|
if self.schema.branch == BranchSupportType.AGNOSTIC:
|
|
644
662
|
branch = registry.get_global_branch()
|
|
663
|
+
hierarchy_level = 1
|
|
645
664
|
elif self.schema.branch == BranchSupportType.LOCAL and self.node._schema.branch == BranchSupportType.AGNOSTIC:
|
|
646
665
|
branch = registry.get_global_branch()
|
|
647
|
-
hierarchy_level =
|
|
666
|
+
hierarchy_level = 1
|
|
648
667
|
data = AttributeCreateData(
|
|
649
668
|
uuid=str(UUIDT()),
|
|
650
669
|
name=self.name,
|
infrahub/core/branch/models.py
CHANGED
|
@@ -333,7 +333,7 @@ class Branch(StandardNode):
|
|
|
333
333
|
f"({rel}.branch IN $branch{idx} AND {rel}.from <= $time{idx} AND {rel}.to IS NULL)"
|
|
334
334
|
)
|
|
335
335
|
filters_per_rel.append(
|
|
336
|
-
f"({rel}.branch IN $branch{idx} AND {rel}.from <= $time{idx} AND {rel}.to
|
|
336
|
+
f"({rel}.branch IN $branch{idx} AND {rel}.from <= $time{idx} AND {rel}.to > $time{idx})"
|
|
337
337
|
)
|
|
338
338
|
|
|
339
339
|
if not include_outside_parentheses:
|
|
@@ -366,7 +366,9 @@ class Branch(StandardNode):
|
|
|
366
366
|
at = Timestamp(at)
|
|
367
367
|
at_str = at.to_string()
|
|
368
368
|
if branch_agnostic:
|
|
369
|
-
filter_str =
|
|
369
|
+
filter_str = (
|
|
370
|
+
f"{variable_name}.from < ${pp}time1 AND ({variable_name}.to IS NULL or {variable_name}.to > ${pp}time1)"
|
|
371
|
+
)
|
|
370
372
|
params[f"{pp}time1"] = at_str
|
|
371
373
|
return filter_str, params
|
|
372
374
|
|
|
@@ -380,132 +382,24 @@ class Branch(StandardNode):
|
|
|
380
382
|
for idx in range(len(branches_times)):
|
|
381
383
|
filters.append(
|
|
382
384
|
f"({variable_name}.branch IN ${pp}branch{idx} "
|
|
383
|
-
f"AND {variable_name}.from
|
|
385
|
+
f"AND {variable_name}.from <= ${pp}time{idx} AND {variable_name}.to IS NULL)"
|
|
384
386
|
)
|
|
385
387
|
filters.append(
|
|
386
388
|
f"({variable_name}.branch IN ${pp}branch{idx} "
|
|
387
|
-
f"AND {variable_name}.from
|
|
388
|
-
f"AND {variable_name}.to
|
|
389
|
+
f"AND {variable_name}.from <= ${pp}time{idx} "
|
|
390
|
+
f"AND {variable_name}.to > ${pp}time{idx})"
|
|
389
391
|
)
|
|
390
392
|
|
|
391
393
|
filter_str = "(" + "\n OR ".join(filters) + ")"
|
|
392
394
|
|
|
393
395
|
return filter_str, params
|
|
394
396
|
|
|
395
|
-
def
|
|
396
|
-
self,
|
|
397
|
-
|
|
398
|
-
start_time: Timestamp,
|
|
399
|
-
end_time: Timestamp,
|
|
400
|
-
include_outside_parentheses: bool = False,
|
|
401
|
-
include_global: bool = False,
|
|
402
|
-
) -> tuple[list, dict]:
|
|
403
|
-
"""Generate a CYPHER Query filter based on a list of relationships to query a range of values in the graph.
|
|
404
|
-
The goal is to return all the values that are valid during this timerange.
|
|
405
|
-
"""
|
|
406
|
-
|
|
407
|
-
filters = []
|
|
408
|
-
params = {}
|
|
409
|
-
|
|
410
|
-
if not isinstance(rel_labels, list):
|
|
411
|
-
raise TypeError(f"rel_labels must be a list, not a {type(rel_labels)}")
|
|
412
|
-
|
|
413
|
-
start_time = Timestamp(start_time)
|
|
414
|
-
end_time = Timestamp(end_time)
|
|
415
|
-
|
|
416
|
-
if include_global:
|
|
417
|
-
branches_times = self.get_branches_and_times_to_query_global(at=start_time)
|
|
418
|
-
else:
|
|
419
|
-
branches_times = self.get_branches_and_times_to_query(at=start_time)
|
|
420
|
-
|
|
421
|
-
params["branches"] = list({branch for branches in branches_times for branch in branches})
|
|
422
|
-
params["start_time"] = start_time.to_string()
|
|
423
|
-
params["end_time"] = end_time.to_string()
|
|
424
|
-
|
|
425
|
-
for rel in rel_labels:
|
|
426
|
-
filters_per_rel = [
|
|
427
|
-
f"({rel}.branch IN $branches AND {rel}.from <= $end_time AND {rel}.to IS NULL)",
|
|
428
|
-
f"({rel}.branch IN $branches AND ({rel}.from <= $end_time OR ({rel}.to >= $start_time AND {rel}.to <= $end_time)))",
|
|
429
|
-
]
|
|
430
|
-
|
|
431
|
-
if not include_outside_parentheses:
|
|
432
|
-
filters.append("\n OR ".join(filters_per_rel))
|
|
433
|
-
|
|
434
|
-
filters.append("(" + "\n OR ".join(filters_per_rel) + ")")
|
|
435
|
-
|
|
436
|
-
return filters, params
|
|
437
|
-
|
|
438
|
-
def get_query_filter_relationships_diff(
|
|
439
|
-
self, rel_labels: list, diff_from: Timestamp, diff_to: Timestamp
|
|
440
|
-
) -> tuple[list, dict]:
|
|
441
|
-
"""
|
|
442
|
-
Generate a CYPHER Query filter to query all events that are applicable to a given branch based
|
|
443
|
-
- The time when the branch as created
|
|
444
|
-
- The branched_from time of the branch
|
|
445
|
-
- The diff_to and diff_from time as provided
|
|
446
|
-
"""
|
|
447
|
-
|
|
448
|
-
if not isinstance(rel_labels, list):
|
|
449
|
-
raise TypeError(f"rel_labels must be a list, not a {type(rel_labels)}")
|
|
450
|
-
|
|
451
|
-
start_times, end_times = self.get_branches_and_times_for_range(start_time=diff_from, end_time=diff_to)
|
|
452
|
-
|
|
453
|
-
filters = []
|
|
454
|
-
params = {}
|
|
455
|
-
|
|
456
|
-
for idx, branch_name in enumerate(start_times.keys()):
|
|
457
|
-
params[f"branch{idx}"] = branch_name
|
|
458
|
-
params[f"start_time{idx}"] = start_times[branch_name]
|
|
459
|
-
params[f"end_time{idx}"] = end_times[branch_name]
|
|
460
|
-
|
|
461
|
-
for rel in rel_labels:
|
|
462
|
-
filters_per_rel = []
|
|
463
|
-
for idx in range(len(start_times)):
|
|
464
|
-
filters_per_rel.extend(
|
|
465
|
-
[
|
|
466
|
-
f"""({rel}.branch = $branch{idx}
|
|
467
|
-
AND {rel}.from >= $start_time{idx}
|
|
468
|
-
AND {rel}.from <= $end_time{idx}
|
|
469
|
-
AND ( r2.to is NULL or r2.to >= $end_time{idx}))""",
|
|
470
|
-
f"""({rel}.branch = $branch{idx} AND {rel}.from >= $start_time{idx}
|
|
471
|
-
AND {rel}.to <= $start_time{idx})""",
|
|
472
|
-
]
|
|
473
|
-
)
|
|
474
|
-
|
|
475
|
-
filters.append("(" + "\n OR ".join(filters_per_rel) + ")")
|
|
476
|
-
|
|
477
|
-
return filters, params
|
|
478
|
-
|
|
479
|
-
def get_query_filter_range(self, rel_label: list, start_time: Timestamp, end_time: Timestamp) -> tuple[list, dict]:
|
|
480
|
-
"""
|
|
481
|
-
Generate a CYPHER Query filter to query a range of values in the graph between start_time and end_time."""
|
|
482
|
-
|
|
483
|
-
filters = []
|
|
484
|
-
params = {}
|
|
485
|
-
|
|
486
|
-
start_time = Timestamp(start_time)
|
|
487
|
-
end_time = Timestamp(end_time)
|
|
488
|
-
|
|
489
|
-
params["branches"] = self.get_branches_in_scope()
|
|
490
|
-
params["start_time"] = start_time.to_string()
|
|
491
|
-
params["end_time"] = end_time.to_string()
|
|
492
|
-
|
|
493
|
-
filters_per_rel = [
|
|
494
|
-
f"""({rel_label}.branch IN $branches AND {rel_label}.from >= $start_time
|
|
495
|
-
AND {rel_label}.from <= $end_time AND {rel_label}.to IS NULL)""",
|
|
496
|
-
f"""({rel_label}.branch IN $branches AND (({rel_label}.from >= $start_time
|
|
497
|
-
AND {rel_label}.from <= $end_time) OR ({rel_label}.to >= $start_time
|
|
498
|
-
AND {rel_label}.to <= $end_time)))""",
|
|
499
|
-
]
|
|
500
|
-
|
|
501
|
-
filters.append("(" + "\n OR ".join(filters_per_rel) + ")")
|
|
502
|
-
|
|
503
|
-
return filters, params
|
|
504
|
-
|
|
505
|
-
async def rebase(self, db: InfrahubDatabase, user_id: str = SYSTEM_USER_ID) -> None:
|
|
397
|
+
async def rebase(
|
|
398
|
+
self, db: InfrahubDatabase, at: Optional[Union[str, Timestamp]] = None, user_id: str = SYSTEM_USER_ID
|
|
399
|
+
) -> None:
|
|
506
400
|
"""Rebase the current Branch with its origin branch"""
|
|
507
401
|
|
|
508
|
-
at = Timestamp()
|
|
402
|
+
at = Timestamp(at)
|
|
509
403
|
|
|
510
404
|
await self.rebase_graph(db=db, at=at)
|
|
511
405
|
|
infrahub/core/branch/tasks.py
CHANGED
|
@@ -88,7 +88,7 @@ async def migrate_branch(branch: str, context: InfrahubContext, send_events: boo
|
|
|
88
88
|
|
|
89
89
|
try:
|
|
90
90
|
log.info(f"Running migrations for branch '{obj.name}'")
|
|
91
|
-
await migration_runner.run(db=db)
|
|
91
|
+
await migration_runner.run(db=db, at=Timestamp())
|
|
92
92
|
except MigrationFailureError as exc:
|
|
93
93
|
log.error(f"Failed to run migrations for branch '{obj.name}': {exc.errors}")
|
|
94
94
|
raise
|
|
@@ -170,7 +170,8 @@ async def rebase_branch(branch: str, context: InfrahubContext, send_events: bool
|
|
|
170
170
|
migrations = []
|
|
171
171
|
async with lock.registry.global_graph_lock():
|
|
172
172
|
async with db.start_transaction() as dbt:
|
|
173
|
-
|
|
173
|
+
rebase_at = Timestamp()
|
|
174
|
+
await obj.rebase(db=dbt, user_id=context.account.account_id, at=rebase_at)
|
|
174
175
|
log.info("Branch successfully rebased")
|
|
175
176
|
|
|
176
177
|
if obj.has_schema_changes:
|
|
@@ -199,6 +200,7 @@ async def rebase_branch(branch: str, context: InfrahubContext, send_events: bool
|
|
|
199
200
|
previous_schema=schema_in_main_before,
|
|
200
201
|
migrations=migrations,
|
|
201
202
|
user_id=context.account.account_id,
|
|
203
|
+
at=rebase_at,
|
|
202
204
|
)
|
|
203
205
|
)
|
|
204
206
|
for error in errors:
|
|
@@ -291,7 +293,8 @@ async def merge_branch(branch: str, context: InfrahubContext, proposed_change_id
|
|
|
291
293
|
diff_locker=DiffLocker(),
|
|
292
294
|
workflow=get_workflow(),
|
|
293
295
|
)
|
|
294
|
-
|
|
296
|
+
merge_at = Timestamp()
|
|
297
|
+
branch_diff = await merger.merge(at=merge_at)
|
|
295
298
|
await merger.update_schema()
|
|
296
299
|
|
|
297
300
|
changelog_collector = DiffChangelogCollector(diff=branch_diff, branch=obj, db=db)
|
|
@@ -304,6 +307,7 @@ async def merge_branch(branch: str, context: InfrahubContext, proposed_change_id
|
|
|
304
307
|
previous_schema=merger.initial_source_schema,
|
|
305
308
|
migrations=merger.migrations,
|
|
306
309
|
user_id=context.account.account_id,
|
|
310
|
+
at=merge_at,
|
|
307
311
|
)
|
|
308
312
|
)
|
|
309
313
|
for error in errors:
|
|
@@ -125,7 +125,11 @@ class DiffMerger:
|
|
|
125
125
|
)
|
|
126
126
|
await metadata_query.execute(db=self.db)
|
|
127
127
|
|
|
128
|
-
|
|
128
|
+
# set the branched_from time to the previous microsecond to prevent duplicated
|
|
129
|
+
# relationships on the branch after the merge
|
|
130
|
+
branched_from = at.subtract(microseconds=1)
|
|
131
|
+
|
|
132
|
+
self.source_branch.branched_from = branched_from.to_string()
|
|
129
133
|
await self.source_branch.save(db=self.db)
|
|
130
134
|
registry.branch[self.source_branch.name] = self.source_branch
|
|
131
135
|
return enriched_diff
|
infrahub/core/diff/model/path.py
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
from copy import deepcopy
|
|
3
4
|
from dataclasses import asdict, dataclass, field
|
|
4
5
|
from enum import Enum
|
|
5
6
|
from typing import TYPE_CHECKING, Any
|
|
@@ -269,6 +270,27 @@ class EnrichedDiffRelationship(BaseSummary):
|
|
|
269
270
|
def __hash__(self) -> int:
|
|
270
271
|
return hash(self.name)
|
|
271
272
|
|
|
273
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> EnrichedDiffRelationship:
|
|
274
|
+
"""Custom deepcopy to handle circular references with EnrichedDiffNode.
|
|
275
|
+
|
|
276
|
+
The default deepcopy can fail because it may call __hash__ on a partially
|
|
277
|
+
constructed instance (before 'name' is set) when handling circular references
|
|
278
|
+
through the nodes -> relationships cycle.
|
|
279
|
+
|
|
280
|
+
This implementation ensures 'name' is set and the instance is registered
|
|
281
|
+
in memo before deepcopying other attributes that may have circular references.
|
|
282
|
+
"""
|
|
283
|
+
new_obj = object.__new__(EnrichedDiffRelationship)
|
|
284
|
+
# Set the hashable attribute first (required for __hash__)
|
|
285
|
+
new_obj.name = self.name
|
|
286
|
+
# Register in memo BEFORE copying other attributes to handle circular refs
|
|
287
|
+
memo[id(self)] = new_obj
|
|
288
|
+
# Deepcopy all other attributes
|
|
289
|
+
for key, value in self.__dict__.items():
|
|
290
|
+
if key != "name":
|
|
291
|
+
setattr(new_obj, key, deepcopy(value, memo))
|
|
292
|
+
return new_obj
|
|
293
|
+
|
|
272
294
|
@property
|
|
273
295
|
def num_properties(self) -> int:
|
|
274
296
|
return sum(r.num_properties for r in self.relationships)
|
|
@@ -327,6 +349,27 @@ class EnrichedDiffNode(BaseSummary):
|
|
|
327
349
|
def __hash__(self) -> int:
|
|
328
350
|
return hash(self.identifier)
|
|
329
351
|
|
|
352
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> EnrichedDiffNode:
|
|
353
|
+
"""Custom deepcopy to handle circular references with EnrichedDiffRelationship.
|
|
354
|
+
|
|
355
|
+
The default deepcopy can fail because it may call __hash__ on a partially
|
|
356
|
+
constructed instance (before 'identifier' is set) when handling circular references
|
|
357
|
+
through the relationships -> nodes cycle.
|
|
358
|
+
|
|
359
|
+
This implementation ensures 'identifier' is set and the instance is registered
|
|
360
|
+
in memo before deepcopying other attributes that may have circular references.
|
|
361
|
+
"""
|
|
362
|
+
new_obj = object.__new__(EnrichedDiffNode)
|
|
363
|
+
# Set the hashable attribute first (required for __hash__)
|
|
364
|
+
new_obj.identifier = deepcopy(self.identifier, memo)
|
|
365
|
+
# Register in memo BEFORE copying other attributes to handle circular refs
|
|
366
|
+
memo[id(self)] = new_obj
|
|
367
|
+
# Deepcopy all other attributes
|
|
368
|
+
for key, value in self.__dict__.items():
|
|
369
|
+
if key != "identifier":
|
|
370
|
+
setattr(new_obj, key, deepcopy(value, memo))
|
|
371
|
+
return new_obj
|
|
372
|
+
|
|
330
373
|
@property
|
|
331
374
|
def uuid(self) -> str:
|
|
332
375
|
return self.identifier.uuid
|
infrahub/core/graph/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
GRAPH_VERSION =
|
|
1
|
+
GRAPH_VERSION = 55
|
infrahub/core/graph/index.py
CHANGED
|
@@ -13,6 +13,8 @@ node_indexes: list[IndexItem] = [
|
|
|
13
13
|
IndexItem(name="rel_uuid", label="Relationship", properties=["uuid"], type=IndexType.RANGE),
|
|
14
14
|
IndexItem(name="rel_identifier", label="Relationship", properties=["name"], type=IndexType.RANGE),
|
|
15
15
|
IndexItem(name="attr_value_indexed", label="AttributeValueIndexed", properties=["value"], type=IndexType.RANGE),
|
|
16
|
+
# TEXT index for efficient CONTAINS/STARTS WITH/ENDS WITH searches on attribute values
|
|
17
|
+
IndexItem(name="attr_value_indexed_text", label="AttributeValueIndexed", properties=["value"], type=IndexType.TEXT),
|
|
16
18
|
# diff indices
|
|
17
19
|
IndexItem(name="diff_uuid", label="DiffRoot", properties=["uuid"], type=IndexType.TEXT),
|
|
18
20
|
IndexItem(name="diff_node_uuid", label="DiffNode", properties=["uuid"], type=IndexType.TEXT),
|
infrahub/core/initialization.py
CHANGED
|
@@ -30,6 +30,7 @@ from infrahub.core.protocols import CoreAccount, CoreAccountGroup, CoreAccountRo
|
|
|
30
30
|
from infrahub.core.root import Root
|
|
31
31
|
from infrahub.core.schema import SchemaRoot, core_models, internal_schema
|
|
32
32
|
from infrahub.core.schema.manager import SchemaManager
|
|
33
|
+
from infrahub.core.timestamp import Timestamp
|
|
33
34
|
from infrahub.database import InfrahubDatabase
|
|
34
35
|
from infrahub.database.memgraph import IndexManagerMemgraph
|
|
35
36
|
from infrahub.database.neo4j import IndexManagerNeo4j
|
|
@@ -527,7 +528,7 @@ async def first_time_initialization(db: InfrahubDatabase) -> None:
|
|
|
527
528
|
schema_branch = registry.schema.register_schema(schema=schema, branch=default_branch.name)
|
|
528
529
|
schema_branch.load_schema(schema=SchemaRoot(**core_models))
|
|
529
530
|
schema_branch.process()
|
|
530
|
-
await registry.schema.load_schema_to_db(schema=schema_branch, branch=default_branch, db=db)
|
|
531
|
+
await registry.schema.load_schema_to_db(schema=schema_branch, branch=default_branch, db=db, at=Timestamp())
|
|
531
532
|
registry.schema.set_schema_branch(name=default_branch.name, schema=schema_branch)
|
|
532
533
|
default_branch.update_schema_hash()
|
|
533
534
|
await default_branch.save(db=db)
|