infrahub-server 1.2.7__py3-none-any.whl → 1.2.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/transformation.py +1 -0
- infrahub/artifacts/models.py +4 -0
- infrahub/cli/db.py +15 -6
- infrahub/computed_attribute/tasks.py +34 -12
- infrahub/config.py +2 -1
- infrahub/constants/__init__.py +0 -0
- infrahub/core/branch/tasks.py +0 -2
- infrahub/core/constants/__init__.py +1 -0
- infrahub/core/diff/calculator.py +4 -3
- infrahub/core/diff/combiner.py +1 -2
- infrahub/core/diff/coordinator.py +44 -28
- infrahub/core/diff/data_check_synchronizer.py +3 -2
- infrahub/core/diff/enricher/hierarchy.py +38 -27
- infrahub/core/diff/ipam_diff_parser.py +5 -4
- infrahub/core/diff/merger/merger.py +20 -18
- infrahub/core/diff/model/field_specifiers_map.py +64 -0
- infrahub/core/diff/model/path.py +55 -58
- infrahub/core/diff/parent_node_adder.py +14 -16
- infrahub/core/diff/query/drop_nodes.py +42 -0
- infrahub/core/diff/query/field_specifiers.py +8 -7
- infrahub/core/diff/query/filters.py +15 -1
- infrahub/core/diff/query/save.py +3 -0
- infrahub/core/diff/query_parser.py +49 -52
- infrahub/core/diff/repository/deserializer.py +36 -23
- infrahub/core/diff/repository/repository.py +31 -12
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/graph/index.py +3 -1
- infrahub/core/initialization.py +23 -7
- infrahub/core/manager.py +16 -5
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m014_remove_index_attr_value.py +9 -8
- infrahub/core/migrations/graph/m027_delete_isolated_nodes.py +50 -0
- infrahub/core/protocols.py +1 -0
- infrahub/core/query/branch.py +27 -17
- infrahub/core/query/diff.py +65 -38
- infrahub/core/query/node.py +111 -33
- infrahub/core/query/relationship.py +17 -3
- infrahub/core/query/subquery.py +2 -2
- infrahub/core/schema/definitions/core/builtin.py +2 -4
- infrahub/core/schema/definitions/core/transform.py +1 -0
- infrahub/core/schema/schema_branch.py +3 -0
- infrahub/core/validators/aggregated_checker.py +2 -2
- infrahub/core/validators/uniqueness/query.py +30 -9
- infrahub/database/__init__.py +1 -16
- infrahub/database/index.py +1 -1
- infrahub/database/memgraph.py +1 -12
- infrahub/database/neo4j.py +1 -13
- infrahub/git/integrator.py +27 -3
- infrahub/git/models.py +4 -0
- infrahub/git/tasks.py +3 -0
- infrahub/git_credential/helper.py +2 -2
- infrahub/graphql/mutations/computed_attribute.py +5 -1
- infrahub/graphql/queries/diff/tree.py +2 -1
- infrahub/message_bus/operations/requests/proposed_change.py +6 -0
- infrahub/message_bus/types.py +3 -0
- infrahub/patch/queries/consolidate_duplicated_nodes.py +109 -0
- infrahub/patch/queries/delete_duplicated_edges.py +138 -0
- infrahub/proposed_change/tasks.py +1 -0
- infrahub/server.py +1 -3
- infrahub/transformations/models.py +3 -0
- infrahub/transformations/tasks.py +1 -0
- infrahub/trigger/models.py +11 -1
- infrahub/trigger/setup.py +38 -13
- infrahub/trigger/tasks.py +1 -4
- infrahub/webhook/models.py +3 -0
- infrahub/workflows/initialization.py +1 -3
- infrahub_sdk/client.py +4 -4
- infrahub_sdk/config.py +17 -0
- infrahub_sdk/ctl/cli_commands.py +7 -1
- infrahub_sdk/ctl/generator.py +2 -2
- infrahub_sdk/generator.py +12 -66
- infrahub_sdk/operation.py +80 -0
- infrahub_sdk/protocols.py +12 -0
- infrahub_sdk/recorder.py +3 -0
- infrahub_sdk/schema/repository.py +4 -0
- infrahub_sdk/transforms.py +15 -27
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/METADATA +2 -2
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/RECORD +84 -78
- infrahub_testcontainers/container.py +1 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -1
- infrahub/database/manager.py +0 -15
- /infrahub/{database/constants.py → constants/database.py} +0 -0
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/WHEEL +0 -0
- {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/entry_points.txt +0 -0
infrahub/api/transformation.py
CHANGED
|
@@ -88,6 +88,7 @@ async def transform_python(
|
|
|
88
88
|
branch=branch_params.branch.name,
|
|
89
89
|
transform_location=f"{transform.file_path.value}::{transform.class_name.value}",
|
|
90
90
|
timeout=transform.timeout.value,
|
|
91
|
+
convert_query_response=transform.convert_query_response.value or False,
|
|
91
92
|
data=data,
|
|
92
93
|
)
|
|
93
94
|
|
infrahub/artifacts/models.py
CHANGED
|
@@ -12,6 +12,10 @@ class CheckArtifactCreate(BaseModel):
|
|
|
12
12
|
content_type: str = Field(..., description="Content type of the artifact")
|
|
13
13
|
transform_type: str = Field(..., description="The type of transform associated with this artifact")
|
|
14
14
|
transform_location: str = Field(..., description="The transforms location within the repository")
|
|
15
|
+
convert_query_response: bool = Field(
|
|
16
|
+
default=False,
|
|
17
|
+
description="Indicate if the query response should be converted to InfrahubNode objects for Python transforms",
|
|
18
|
+
)
|
|
15
19
|
repository_id: str = Field(..., description="The unique ID of the Repository")
|
|
16
20
|
repository_name: str = Field(..., description="The name of the Repository")
|
|
17
21
|
repository_kind: str = Field(..., description="The kind of the Repository")
|
infrahub/cli/db.py
CHANGED
|
@@ -23,7 +23,7 @@ from infrahub import config
|
|
|
23
23
|
from infrahub.core import registry
|
|
24
24
|
from infrahub.core.graph import GRAPH_VERSION
|
|
25
25
|
from infrahub.core.graph.constraints import ConstraintManagerBase, ConstraintManagerMemgraph, ConstraintManagerNeo4j
|
|
26
|
-
from infrahub.core.graph.index import node_indexes, rel_indexes
|
|
26
|
+
from infrahub.core.graph.index import attr_value_index, node_indexes, rel_indexes
|
|
27
27
|
from infrahub.core.graph.schema import (
|
|
28
28
|
GRAPH_SCHEMA,
|
|
29
29
|
GraphAttributeProperties,
|
|
@@ -48,6 +48,8 @@ from infrahub.core.utils import delete_all_nodes
|
|
|
48
48
|
from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
|
|
49
49
|
from infrahub.core.validators.tasks import schema_validate_migrations
|
|
50
50
|
from infrahub.database import DatabaseType
|
|
51
|
+
from infrahub.database.memgraph import IndexManagerMemgraph
|
|
52
|
+
from infrahub.database.neo4j import IndexManagerNeo4j
|
|
51
53
|
from infrahub.log import get_logger
|
|
52
54
|
from infrahub.services import InfrahubServices
|
|
53
55
|
from infrahub.services.adapters.message_bus.local import BusSimulator
|
|
@@ -59,6 +61,7 @@ from .patch import patch_app
|
|
|
59
61
|
if TYPE_CHECKING:
|
|
60
62
|
from infrahub.cli.context import CliContext
|
|
61
63
|
from infrahub.database import InfrahubDatabase
|
|
64
|
+
from infrahub.database.index import IndexManagerBase
|
|
62
65
|
|
|
63
66
|
app = AsyncTyper()
|
|
64
67
|
app.add_typer(patch_app, name="patch")
|
|
@@ -249,14 +252,20 @@ async def index(
|
|
|
249
252
|
|
|
250
253
|
context: CliContext = ctx.obj
|
|
251
254
|
dbdriver = await context.init_db(retry=1)
|
|
252
|
-
dbdriver.
|
|
255
|
+
if dbdriver.db_type is DatabaseType.MEMGRAPH:
|
|
256
|
+
index_manager: IndexManagerBase = IndexManagerMemgraph(db=dbdriver)
|
|
257
|
+
index_manager = IndexManagerNeo4j(db=dbdriver)
|
|
258
|
+
|
|
259
|
+
if config.SETTINGS.experimental_features.value_db_index:
|
|
260
|
+
node_indexes.append(attr_value_index)
|
|
261
|
+
index_manager.init(nodes=node_indexes, rels=rel_indexes)
|
|
253
262
|
|
|
254
263
|
if action == IndexAction.ADD:
|
|
255
|
-
await
|
|
264
|
+
await index_manager.add()
|
|
256
265
|
elif action == IndexAction.DROP:
|
|
257
|
-
await
|
|
266
|
+
await index_manager.drop()
|
|
258
267
|
|
|
259
|
-
indexes = await
|
|
268
|
+
indexes = await index_manager.list()
|
|
260
269
|
|
|
261
270
|
console = Console()
|
|
262
271
|
|
|
@@ -412,7 +421,7 @@ async def update_core_schema(
|
|
|
412
421
|
update_db=True,
|
|
413
422
|
)
|
|
414
423
|
default_branch.update_schema_hash()
|
|
415
|
-
rprint("The Core Schema has been updated")
|
|
424
|
+
rprint("The Core Schema has been updated, make sure to rebase any open branches after the upgrade")
|
|
416
425
|
if debug:
|
|
417
426
|
rprint(f"New schema hash: {default_branch.active_schema_hash.main}")
|
|
418
427
|
await default_branch.save(db=dbt)
|
|
@@ -113,6 +113,7 @@ async def process_transform(
|
|
|
113
113
|
location=f"{transform.file_path.value}::{transform.class_name.value}",
|
|
114
114
|
data=data,
|
|
115
115
|
client=service.client,
|
|
116
|
+
convert_query_response=transform.convert_query_response.value,
|
|
116
117
|
) # type: ignore[misc]
|
|
117
118
|
|
|
118
119
|
await service.client.execute_graphql(
|
|
@@ -301,15 +302,24 @@ async def computed_attribute_setup_jinja2(
|
|
|
301
302
|
|
|
302
303
|
triggers = await gather_trigger_computed_attribute_jinja2()
|
|
303
304
|
|
|
304
|
-
|
|
305
|
-
|
|
305
|
+
# Since we can have multiple trigger per NodeKind
|
|
306
|
+
# we need to extract the list of unique node that should be processed
|
|
307
|
+
# also
|
|
308
|
+
# Because the automation in Prefect doesn't capture all information about the computed attribute
|
|
309
|
+
# we can't tell right now if a given computed attribute has changed and need to be updated
|
|
310
|
+
unique_nodes: set[tuple[str, str, str]] = {
|
|
311
|
+
(trigger.branch, trigger.computed_attribute.kind, trigger.computed_attribute.attribute.name)
|
|
312
|
+
for trigger in triggers
|
|
313
|
+
}
|
|
314
|
+
for branch, kind, attribute_name in unique_nodes:
|
|
315
|
+
if event_name != BranchDeletedEvent.event_name and branch == branch_name:
|
|
306
316
|
await service.workflow.submit_workflow(
|
|
307
317
|
workflow=TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
|
|
308
318
|
context=context,
|
|
309
319
|
parameters={
|
|
310
|
-
"branch_name":
|
|
311
|
-
"computed_attribute_name":
|
|
312
|
-
"computed_attribute_kind":
|
|
320
|
+
"branch_name": branch,
|
|
321
|
+
"computed_attribute_name": attribute_name,
|
|
322
|
+
"computed_attribute_kind": kind,
|
|
313
323
|
},
|
|
314
324
|
)
|
|
315
325
|
|
|
@@ -319,6 +329,7 @@ async def computed_attribute_setup_jinja2(
|
|
|
319
329
|
client=prefect_client,
|
|
320
330
|
triggers=triggers,
|
|
321
331
|
trigger_type=TriggerType.COMPUTED_ATTR_JINJA2,
|
|
332
|
+
force_update=False,
|
|
322
333
|
) # type: ignore[misc]
|
|
323
334
|
|
|
324
335
|
log.info(f"{len(triggers)} Computed Attribute for Jinja2 automation configuration completed")
|
|
@@ -346,18 +357,29 @@ async def computed_attribute_setup_python(
|
|
|
346
357
|
|
|
347
358
|
triggers_python, triggers_python_query = await gather_trigger_computed_attribute_python(db=db)
|
|
348
359
|
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
360
|
+
# Since we can have multiple trigger per NodeKind
|
|
361
|
+
# we need to extract the list of unique node that should be processed
|
|
362
|
+
# also
|
|
363
|
+
# Because the automation in Prefect doesn't capture all information about the computed attribute
|
|
364
|
+
# we can't tell right now if a given computed attribute has changed and need to be updated
|
|
365
|
+
unique_nodes: set[tuple[str, str, str]] = {
|
|
366
|
+
(
|
|
367
|
+
trigger.branch,
|
|
368
|
+
trigger.computed_attribute.computed_attribute.kind,
|
|
369
|
+
trigger.computed_attribute.computed_attribute.attribute.name,
|
|
370
|
+
)
|
|
371
|
+
for trigger in triggers_python
|
|
372
|
+
}
|
|
373
|
+
for branch, kind, attribute_name in unique_nodes:
|
|
374
|
+
if event_name != BranchDeletedEvent.event_name and branch == branch_name:
|
|
375
|
+
log.info(f"Triggering update for {kind}.{attribute_name} on {branch}")
|
|
354
376
|
await service.workflow.submit_workflow(
|
|
355
377
|
workflow=TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
|
|
356
378
|
context=context,
|
|
357
379
|
parameters={
|
|
358
380
|
"branch_name": branch_name,
|
|
359
|
-
"computed_attribute_name":
|
|
360
|
-
"computed_attribute_kind":
|
|
381
|
+
"computed_attribute_name": attribute_name,
|
|
382
|
+
"computed_attribute_kind": kind,
|
|
361
383
|
},
|
|
362
384
|
)
|
|
363
385
|
|
infrahub/config.py
CHANGED
|
@@ -23,7 +23,7 @@ from pydantic import (
|
|
|
23
23
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
24
24
|
from typing_extensions import Self
|
|
25
25
|
|
|
26
|
-
from infrahub.database
|
|
26
|
+
from infrahub.constants.database import DatabaseType
|
|
27
27
|
from infrahub.exceptions import InitializationError, ProcessingError
|
|
28
28
|
|
|
29
29
|
if TYPE_CHECKING:
|
|
@@ -629,6 +629,7 @@ class AnalyticsSettings(BaseSettings):
|
|
|
629
629
|
class ExperimentalFeaturesSettings(BaseSettings):
|
|
630
630
|
model_config = SettingsConfigDict(env_prefix="INFRAHUB_EXPERIMENTAL_")
|
|
631
631
|
graphql_enums: bool = False
|
|
632
|
+
value_db_index: bool = False
|
|
632
633
|
|
|
633
634
|
|
|
634
635
|
class SecuritySettings(BaseSettings):
|
|
File without changes
|
infrahub/core/branch/tasks.py
CHANGED
|
@@ -212,8 +212,6 @@ async def merge_branch(
|
|
|
212
212
|
|
|
213
213
|
merger: BranchMerger | None = None
|
|
214
214
|
async with lock.registry.global_graph_lock():
|
|
215
|
-
# await update_diff(model=RequestDiffUpdate(branch_name=obj.name))
|
|
216
|
-
|
|
217
215
|
diff_repository = await component_registry.get_component(DiffRepository, db=db, branch=obj)
|
|
218
216
|
diff_coordinator = await component_registry.get_component(DiffCoordinator, db=db, branch=obj)
|
|
219
217
|
diff_merger = await component_registry.get_component(DiffMerger, db=db, branch=obj)
|
|
@@ -150,6 +150,7 @@ class ContentType(InfrahubStringEnum):
|
|
|
150
150
|
APPLICATION_JSON = "application/json"
|
|
151
151
|
APPLICATION_YAML = "application/yaml"
|
|
152
152
|
APPLICATION_XML = "application/xml"
|
|
153
|
+
APPLICATION_HCL = "application/hcl"
|
|
153
154
|
TEXT_PLAIN = "text/plain"
|
|
154
155
|
TEXT_MARKDOWN = "text/markdown"
|
|
155
156
|
TEXT_CSV = "text/csv"
|
infrahub/core/diff/calculator.py
CHANGED
|
@@ -14,6 +14,7 @@ from infrahub.core.timestamp import Timestamp
|
|
|
14
14
|
from infrahub.database import InfrahubDatabase
|
|
15
15
|
from infrahub.log import get_logger
|
|
16
16
|
|
|
17
|
+
from .model.field_specifiers_map import NodeFieldSpecifierMap
|
|
17
18
|
from .model.path import CalculatedDiffs
|
|
18
19
|
|
|
19
20
|
log = get_logger()
|
|
@@ -26,8 +27,8 @@ class DiffCalculationRequest:
|
|
|
26
27
|
branch_from_time: Timestamp
|
|
27
28
|
from_time: Timestamp
|
|
28
29
|
to_time: Timestamp
|
|
29
|
-
current_node_field_specifiers:
|
|
30
|
-
new_node_field_specifiers:
|
|
30
|
+
current_node_field_specifiers: NodeFieldSpecifierMap | None = field(default=None)
|
|
31
|
+
new_node_field_specifiers: NodeFieldSpecifierMap | None = field(default=None)
|
|
31
32
|
|
|
32
33
|
|
|
33
34
|
class DiffCalculator:
|
|
@@ -75,7 +76,7 @@ class DiffCalculator:
|
|
|
75
76
|
from_time: Timestamp,
|
|
76
77
|
to_time: Timestamp,
|
|
77
78
|
include_unchanged: bool = True,
|
|
78
|
-
previous_node_specifiers:
|
|
79
|
+
previous_node_specifiers: NodeFieldSpecifierMap | None = None,
|
|
79
80
|
) -> CalculatedDiffs:
|
|
80
81
|
if diff_branch.name == registry.default_branch:
|
|
81
82
|
diff_branch_from_time = from_time
|
infrahub/core/diff/combiner.py
CHANGED
|
@@ -384,8 +384,7 @@ class DiffCombiner:
|
|
|
384
384
|
):
|
|
385
385
|
combined_nodes.add(
|
|
386
386
|
EnrichedDiffNode(
|
|
387
|
-
|
|
388
|
-
kind=node_pair.later.kind,
|
|
387
|
+
identifier=node_pair.later.identifier,
|
|
389
388
|
label=node_pair.later.label,
|
|
390
389
|
changed_at=node_pair.later.changed_at or node_pair.earlier.changed_at,
|
|
391
390
|
action=combined_action,
|
|
@@ -9,6 +9,7 @@ from infrahub.core.timestamp import Timestamp
|
|
|
9
9
|
from infrahub.exceptions import ValidationError
|
|
10
10
|
from infrahub.log import get_logger
|
|
11
11
|
|
|
12
|
+
from .model.field_specifiers_map import NodeFieldSpecifierMap
|
|
12
13
|
from .model.path import (
|
|
13
14
|
BranchTrackingId,
|
|
14
15
|
EnrichedDiffRoot,
|
|
@@ -16,6 +17,7 @@ from .model.path import (
|
|
|
16
17
|
EnrichedDiffs,
|
|
17
18
|
EnrichedDiffsMetadata,
|
|
18
19
|
NameTrackingId,
|
|
20
|
+
NodeIdentifier,
|
|
19
21
|
TrackingId,
|
|
20
22
|
)
|
|
21
23
|
|
|
@@ -43,7 +45,7 @@ class EnrichedDiffRequest:
|
|
|
43
45
|
from_time: Timestamp
|
|
44
46
|
to_time: Timestamp
|
|
45
47
|
tracking_id: TrackingId
|
|
46
|
-
node_field_specifiers:
|
|
48
|
+
node_field_specifiers: NodeFieldSpecifierMap = field(default_factory=NodeFieldSpecifierMap)
|
|
47
49
|
|
|
48
50
|
def __repr__(self) -> str:
|
|
49
51
|
return (
|
|
@@ -141,7 +143,7 @@ class DiffCoordinator:
|
|
|
141
143
|
self.lock_registry.get(name=incremental_lock_name, namespace=self.lock_namespace),
|
|
142
144
|
):
|
|
143
145
|
log.info(f"Acquired lock to run branch diff update for {base_branch.name} - {diff_branch.name}")
|
|
144
|
-
enriched_diffs = await self._update_diffs(
|
|
146
|
+
enriched_diffs, node_identifiers_to_drop = await self._update_diffs(
|
|
145
147
|
base_branch=base_branch,
|
|
146
148
|
diff_branch=diff_branch,
|
|
147
149
|
from_time=from_time,
|
|
@@ -149,7 +151,9 @@ class DiffCoordinator:
|
|
|
149
151
|
tracking_id=tracking_id,
|
|
150
152
|
force_branch_refresh=False,
|
|
151
153
|
)
|
|
152
|
-
await self.diff_repo.save(
|
|
154
|
+
await self.diff_repo.save(
|
|
155
|
+
enriched_diffs=enriched_diffs, node_identifiers_to_drop=list(node_identifiers_to_drop)
|
|
156
|
+
)
|
|
153
157
|
await self._update_core_data_checks(enriched_diff=enriched_diffs.diff_branch_diff)
|
|
154
158
|
log.info(f"Branch diff update complete for {base_branch.name} - {diff_branch.name}")
|
|
155
159
|
return enriched_diffs.diff_branch_diff
|
|
@@ -168,7 +172,7 @@ class DiffCoordinator:
|
|
|
168
172
|
)
|
|
169
173
|
async with self.lock_registry.get(name=general_lock_name, namespace=self.lock_namespace):
|
|
170
174
|
log.info(f"Acquired lock to run arbitrary diff update for {base_branch.name} - {diff_branch.name}")
|
|
171
|
-
enriched_diffs = await self._update_diffs(
|
|
175
|
+
enriched_diffs, node_identifiers_to_drop = await self._update_diffs(
|
|
172
176
|
base_branch=base_branch,
|
|
173
177
|
diff_branch=diff_branch,
|
|
174
178
|
from_time=from_time,
|
|
@@ -177,7 +181,9 @@ class DiffCoordinator:
|
|
|
177
181
|
force_branch_refresh=False,
|
|
178
182
|
)
|
|
179
183
|
|
|
180
|
-
await self.diff_repo.save(
|
|
184
|
+
await self.diff_repo.save(
|
|
185
|
+
enriched_diffs=enriched_diffs, node_identifiers_to_drop=list(node_identifiers_to_drop)
|
|
186
|
+
)
|
|
181
187
|
await self._update_core_data_checks(enriched_diff=enriched_diffs.diff_branch_diff)
|
|
182
188
|
log.info(f"Arbitrary diff update complete for {base_branch.name} - {diff_branch.name}")
|
|
183
189
|
return enriched_diffs.diff_branch_diff
|
|
@@ -205,7 +211,7 @@ class DiffCoordinator:
|
|
|
205
211
|
from_time = current_branch_diff.from_time
|
|
206
212
|
branched_from_time = Timestamp(diff_branch.get_branched_from())
|
|
207
213
|
from_time = max(from_time, branched_from_time)
|
|
208
|
-
enriched_diffs = await self._update_diffs(
|
|
214
|
+
enriched_diffs, _ = await self._update_diffs(
|
|
209
215
|
base_branch=base_branch,
|
|
210
216
|
diff_branch=diff_branch,
|
|
211
217
|
from_time=branched_from_time,
|
|
@@ -282,7 +288,7 @@ class DiffCoordinator:
|
|
|
282
288
|
to_time: Timestamp,
|
|
283
289
|
tracking_id: TrackingId,
|
|
284
290
|
force_branch_refresh: Literal[True] = ...,
|
|
285
|
-
) -> EnrichedDiffs: ...
|
|
291
|
+
) -> tuple[EnrichedDiffs, set[NodeIdentifier]]: ...
|
|
286
292
|
|
|
287
293
|
@overload
|
|
288
294
|
async def _update_diffs(
|
|
@@ -293,7 +299,7 @@ class DiffCoordinator:
|
|
|
293
299
|
to_time: Timestamp,
|
|
294
300
|
tracking_id: TrackingId,
|
|
295
301
|
force_branch_refresh: Literal[False] = ...,
|
|
296
|
-
) -> EnrichedDiffs | EnrichedDiffsMetadata: ...
|
|
302
|
+
) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]: ...
|
|
297
303
|
|
|
298
304
|
async def _update_diffs(
|
|
299
305
|
self,
|
|
@@ -303,7 +309,7 @@ class DiffCoordinator:
|
|
|
303
309
|
to_time: Timestamp,
|
|
304
310
|
tracking_id: TrackingId,
|
|
305
311
|
force_branch_refresh: bool = False,
|
|
306
|
-
) -> EnrichedDiffs | EnrichedDiffsMetadata:
|
|
312
|
+
) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]:
|
|
307
313
|
# start with empty diffs b/c we only care about their metadata for now, hydrate them with data as needed
|
|
308
314
|
diff_pairs_metadata = await self.diff_repo.get_diff_pairs_metadata(
|
|
309
315
|
base_branch_names=[base_branch.name],
|
|
@@ -312,7 +318,7 @@ class DiffCoordinator:
|
|
|
312
318
|
to_time=to_time,
|
|
313
319
|
tracking_id=tracking_id,
|
|
314
320
|
)
|
|
315
|
-
aggregated_enriched_diffs = await self._aggregate_enriched_diffs(
|
|
321
|
+
aggregated_enriched_diffs, node_identifiers_to_drop = await self._aggregate_enriched_diffs(
|
|
316
322
|
diff_request=EnrichedDiffRequest(
|
|
317
323
|
base_branch=base_branch,
|
|
318
324
|
diff_branch=diff_branch,
|
|
@@ -343,7 +349,7 @@ class DiffCoordinator:
|
|
|
343
349
|
# this is an EnrichedDiffsMetadata, so there are no nodes to enrich
|
|
344
350
|
if not isinstance(aggregated_enriched_diffs, EnrichedDiffs):
|
|
345
351
|
aggregated_enriched_diffs.update_metadata(from_time=from_time, to_time=to_time, tracking_id=tracking_id)
|
|
346
|
-
return aggregated_enriched_diffs
|
|
352
|
+
return aggregated_enriched_diffs, set()
|
|
347
353
|
|
|
348
354
|
await self.conflicts_enricher.add_conflicts_to_branch_diff(
|
|
349
355
|
base_diff_root=aggregated_enriched_diffs.base_branch_diff,
|
|
@@ -353,27 +359,27 @@ class DiffCoordinator:
|
|
|
353
359
|
enriched_diff_root=aggregated_enriched_diffs.diff_branch_diff, conflicts_only=True
|
|
354
360
|
)
|
|
355
361
|
|
|
356
|
-
return aggregated_enriched_diffs
|
|
362
|
+
return aggregated_enriched_diffs, node_identifiers_to_drop
|
|
357
363
|
|
|
358
364
|
@overload
|
|
359
365
|
async def _aggregate_enriched_diffs(
|
|
360
366
|
self,
|
|
361
367
|
diff_request: EnrichedDiffRequest,
|
|
362
368
|
partial_enriched_diffs: list[EnrichedDiffsMetadata],
|
|
363
|
-
) -> EnrichedDiffs | EnrichedDiffsMetadata: ...
|
|
369
|
+
) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]: ...
|
|
364
370
|
|
|
365
371
|
@overload
|
|
366
372
|
async def _aggregate_enriched_diffs(
|
|
367
373
|
self,
|
|
368
374
|
diff_request: EnrichedDiffRequest,
|
|
369
375
|
partial_enriched_diffs: None,
|
|
370
|
-
) -> EnrichedDiffs: ...
|
|
376
|
+
) -> tuple[EnrichedDiffs, set[NodeIdentifier]]: ...
|
|
371
377
|
|
|
372
378
|
async def _aggregate_enriched_diffs(
|
|
373
379
|
self,
|
|
374
380
|
diff_request: EnrichedDiffRequest,
|
|
375
381
|
partial_enriched_diffs: list[EnrichedDiffsMetadata] | None,
|
|
376
|
-
) -> EnrichedDiffs | EnrichedDiffsMetadata:
|
|
382
|
+
) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata, set[NodeIdentifier]]:
|
|
377
383
|
"""
|
|
378
384
|
If return is an EnrichedDiffsMetadata, it acts as a pointer to a diff in the database that has all the
|
|
379
385
|
necessary data for this diff_request. Might have a different time range and/or tracking_id
|
|
@@ -385,6 +391,7 @@ class DiffCoordinator:
|
|
|
385
391
|
diff_request=diff_request, is_incremental_diff=False
|
|
386
392
|
)
|
|
387
393
|
|
|
394
|
+
node_identifiers_to_drop: set[NodeIdentifier] = set()
|
|
388
395
|
if partial_enriched_diffs is not None and not aggregated_enriched_diffs:
|
|
389
396
|
ordered_diffs = self._get_ordered_diff_pairs(diff_pairs=partial_enriched_diffs, allow_overlap=False)
|
|
390
397
|
ordered_diff_reprs = [repr(d) for d in ordered_diffs]
|
|
@@ -430,31 +437,31 @@ class DiffCoordinator:
|
|
|
430
437
|
)
|
|
431
438
|
current_time = end_time
|
|
432
439
|
|
|
433
|
-
aggregated_enriched_diffs = await self._concatenate_diffs_and_requests(
|
|
440
|
+
aggregated_enriched_diffs, node_identifiers_to_drop = await self._concatenate_diffs_and_requests(
|
|
434
441
|
diff_or_request_list=incremental_diffs_and_requests, full_diff_request=diff_request
|
|
435
442
|
)
|
|
436
443
|
|
|
437
444
|
# no changes during this time period, so generate an EnrichedDiffs with no nodes
|
|
438
445
|
if not aggregated_enriched_diffs:
|
|
439
|
-
return self._build_enriched_diffs_with_no_nodes(diff_request=diff_request)
|
|
446
|
+
return self._build_enriched_diffs_with_no_nodes(diff_request=diff_request), node_identifiers_to_drop
|
|
440
447
|
|
|
441
448
|
# metadata-only diff, means that a diff exists in the database that covers at least
|
|
442
449
|
# part of this time period, but it might need to have its start or end time extended
|
|
443
450
|
# to cover time ranges with no changes
|
|
444
451
|
if not isinstance(aggregated_enriched_diffs, EnrichedDiffs):
|
|
445
|
-
return aggregated_enriched_diffs
|
|
452
|
+
return aggregated_enriched_diffs, node_identifiers_to_drop
|
|
446
453
|
|
|
447
454
|
# a new diff (with nodes) covering the time period
|
|
448
455
|
aggregated_enriched_diffs.update_metadata(
|
|
449
456
|
from_time=diff_request.from_time, to_time=diff_request.to_time, tracking_id=diff_request.tracking_id
|
|
450
457
|
)
|
|
451
|
-
return aggregated_enriched_diffs
|
|
458
|
+
return aggregated_enriched_diffs, node_identifiers_to_drop
|
|
452
459
|
|
|
453
460
|
async def _concatenate_diffs_and_requests(
|
|
454
461
|
self,
|
|
455
462
|
diff_or_request_list: Sequence[EnrichedDiffsMetadata | EnrichedDiffRequest | None],
|
|
456
463
|
full_diff_request: EnrichedDiffRequest,
|
|
457
|
-
) -> EnrichedDiffs | EnrichedDiffsMetadata | None:
|
|
464
|
+
) -> tuple[EnrichedDiffs | EnrichedDiffsMetadata | None, set[NodeIdentifier]]:
|
|
458
465
|
"""
|
|
459
466
|
Returns None if diff_or_request_list is empty or all Nones
|
|
460
467
|
meaning there are no changes for the diff during this time period
|
|
@@ -464,7 +471,7 @@ class DiffCoordinator:
|
|
|
464
471
|
meaning multiple diffs (some that may have been freshly calculated) were combined
|
|
465
472
|
"""
|
|
466
473
|
previous_diff_pair: EnrichedDiffs | EnrichedDiffsMetadata | None = None
|
|
467
|
-
|
|
474
|
+
updated_node_identifiers: set[NodeIdentifier] = set()
|
|
468
475
|
for diff_or_request in diff_or_request_list:
|
|
469
476
|
if isinstance(diff_or_request, EnrichedDiffRequest):
|
|
470
477
|
if previous_diff_pair:
|
|
@@ -478,8 +485,8 @@ class DiffCoordinator:
|
|
|
478
485
|
calculated_diff = await self._calculate_enriched_diff(
|
|
479
486
|
diff_request=diff_or_request, is_incremental_diff=is_incremental_diff
|
|
480
487
|
)
|
|
481
|
-
|
|
482
|
-
|
|
488
|
+
updated_node_identifiers |= calculated_diff.base_node_identifiers
|
|
489
|
+
updated_node_identifiers |= calculated_diff.branch_node_identifiers
|
|
483
490
|
single_enriched_diffs: EnrichedDiffs | EnrichedDiffsMetadata = calculated_diff
|
|
484
491
|
|
|
485
492
|
elif isinstance(diff_or_request, EnrichedDiffsMetadata):
|
|
@@ -495,17 +502,22 @@ class DiffCoordinator:
|
|
|
495
502
|
previous_diff_pair = await self._combine_diffs(
|
|
496
503
|
earlier=previous_diff_pair,
|
|
497
504
|
later=single_enriched_diffs,
|
|
498
|
-
|
|
505
|
+
node_identifiers=updated_node_identifiers,
|
|
499
506
|
)
|
|
500
507
|
log.info("Diffs combined.")
|
|
501
508
|
|
|
502
|
-
|
|
509
|
+
node_identifiers_to_drop: set[NodeIdentifier] = set()
|
|
510
|
+
if isinstance(previous_diff_pair, EnrichedDiffs):
|
|
511
|
+
# nodes that were updated and that no longer exist on this diff have been removed
|
|
512
|
+
node_identifiers_to_drop = updated_node_identifiers - previous_diff_pair.branch_node_identifiers
|
|
513
|
+
|
|
514
|
+
return previous_diff_pair, node_identifiers_to_drop
|
|
503
515
|
|
|
504
516
|
async def _combine_diffs(
|
|
505
517
|
self,
|
|
506
518
|
earlier: EnrichedDiffs | EnrichedDiffsMetadata,
|
|
507
519
|
later: EnrichedDiffs | EnrichedDiffsMetadata,
|
|
508
|
-
|
|
520
|
+
node_identifiers: set[NodeIdentifier],
|
|
509
521
|
) -> EnrichedDiffs | EnrichedDiffsMetadata:
|
|
510
522
|
log.info(f"Earlier diff to combine: {earlier!r}")
|
|
511
523
|
log.info(f"Later diff to combine: {later!r}")
|
|
@@ -522,11 +534,15 @@ class DiffCoordinator:
|
|
|
522
534
|
# hydrate the diffs to combine, if necessary
|
|
523
535
|
if not isinstance(earlier, EnrichedDiffs):
|
|
524
536
|
log.info("Hydrating earlier diff...")
|
|
525
|
-
earlier = await self.diff_repo.hydrate_diff_pair(
|
|
537
|
+
earlier = await self.diff_repo.hydrate_diff_pair(
|
|
538
|
+
enriched_diffs_metadata=earlier, node_identifiers=node_identifiers
|
|
539
|
+
)
|
|
526
540
|
log.info("Earlier diff hydrated.")
|
|
527
541
|
if not isinstance(later, EnrichedDiffs):
|
|
528
542
|
log.info("Hydrating later diff...")
|
|
529
|
-
later = await self.diff_repo.hydrate_diff_pair(
|
|
543
|
+
later = await self.diff_repo.hydrate_diff_pair(
|
|
544
|
+
enriched_diffs_metadata=later, node_identifiers=node_identifiers
|
|
545
|
+
)
|
|
530
546
|
log.info("Later diff hydrated.")
|
|
531
547
|
|
|
532
548
|
return await self.diff_combiner.combine(earlier_diffs=earlier, later_diffs=later)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
2
|
|
|
3
3
|
from infrahub.core.constants import BranchConflictKeep, InfrahubKind
|
|
4
|
+
from infrahub.core.diff.query.filters import EnrichedDiffQueryFilters
|
|
4
5
|
from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder
|
|
5
6
|
from infrahub.core.manager import NodeManager
|
|
6
7
|
from infrahub.core.node import Node
|
|
@@ -74,7 +75,7 @@ class DiffDataCheckSynchronizer:
|
|
|
74
75
|
retrieved_diff_conflicts_only = await self.diff_repository.get_one(
|
|
75
76
|
diff_branch_name=enriched_diff.diff_branch_name,
|
|
76
77
|
diff_id=enriched_diff.uuid,
|
|
77
|
-
filters=
|
|
78
|
+
filters=EnrichedDiffQueryFilters(only_conflicted=True),
|
|
78
79
|
)
|
|
79
80
|
enriched_diff_all_conflicts = retrieved_diff_conflicts_only
|
|
80
81
|
# if `enriched_diff` is an EnrichedDiffRootsMetadata, then there have been no changes to the diff and
|
|
@@ -116,7 +117,7 @@ class DiffDataCheckSynchronizer:
|
|
|
116
117
|
def _update_diff_conflicts(self, updated_diff: EnrichedDiffRoot, retrieved_diff: EnrichedDiffRoot) -> None:
|
|
117
118
|
for updated_node in updated_diff.nodes:
|
|
118
119
|
try:
|
|
119
|
-
retrieved_node = retrieved_diff.get_node(
|
|
120
|
+
retrieved_node = retrieved_diff.get_node(node_identifier=updated_node.identifier)
|
|
120
121
|
except ValueError:
|
|
121
122
|
retrieved_node = None
|
|
122
123
|
if not retrieved_node:
|