infrahub-server 1.4.12__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +208 -16
- infrahub/api/artifact.py +3 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/query.py +2 -0
- infrahub/api/schema.py +27 -3
- infrahub/auth.py +5 -5
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +160 -157
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +56 -9
- infrahub/computed_attribute/tasks.py +19 -7
- infrahub/config.py +7 -2
- infrahub/core/attribute.py +35 -24
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +9 -5
- infrahub/core/branch/needs_rebase_status.py +11 -0
- infrahub/core/branch/tasks.py +72 -10
- infrahub/core/changelog/models.py +2 -10
- infrahub/core/constants/__init__.py +4 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/convert_object_type/object_conversion.py +201 -0
- infrahub/core/convert_object_type/repository_conversion.py +89 -0
- infrahub/core/convert_object_type/schema_mapping.py +27 -3
- infrahub/core/diff/calculator.py +2 -2
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/delete_query.py +9 -5
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/diff/query/merge.py +39 -23
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +7 -4
- infrahub/core/manager.py +3 -81
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +13 -10
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
- infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
- infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
- infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
- infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +26 -5
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +66 -19
- infrahub/core/models.py +2 -2
- infrahub/core/node/__init__.py +207 -54
- infrahub/core/node/create.py +53 -49
- infrahub/core/node/lock_utils.py +124 -0
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/property.py +11 -0
- infrahub/core/protocols.py +8 -1
- infrahub/core/query/attribute.py +82 -15
- infrahub/core/query/diff.py +61 -16
- infrahub/core/query/ipam.py +16 -4
- infrahub/core/query/node.py +92 -212
- infrahub/core/query/relationship.py +44 -26
- infrahub/core/query/subquery.py +0 -8
- infrahub/core/relationship/model.py +69 -24
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -2
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/check.py +1 -1
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/core/repository.py +7 -0
- infrahub/core/schema/definitions/core/transform.py +1 -1
- infrahub/core/schema/definitions/internal.py +12 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +3 -0
- infrahub/core/schema/node_schema.py +1 -0
- infrahub/core/schema/relationship_schema.py +0 -1
- infrahub/core/schema/schema_branch.py +295 -10
- infrahub/core/schema/schema_branch_display.py +135 -0
- infrahub/core/schema/schema_branch_hfid.py +120 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +38 -12
- infrahub/generators/tasks.py +34 -16
- infrahub/git/base.py +42 -2
- infrahub/git/integrator.py +22 -14
- infrahub/git/tasks.py +52 -2
- infrahub/graphql/analyzer.py +9 -0
- infrahub/graphql/api/dependencies.py +2 -4
- infrahub/graphql/api/endpoints.py +16 -6
- infrahub/graphql/app.py +2 -4
- infrahub/graphql/initialization.py +2 -3
- infrahub/graphql/manager.py +213 -137
- infrahub/graphql/middleware.py +12 -0
- infrahub/graphql/mutations/branch.py +16 -0
- infrahub/graphql/mutations/computed_attribute.py +110 -3
- infrahub/graphql/mutations/convert_object_type.py +44 -13
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +73 -41
- infrahub/graphql/mutations/main.py +61 -178
- infrahub/graphql/mutations/profile.py +195 -0
- infrahub/graphql/mutations/proposed_change.py +8 -1
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/repository.py +22 -83
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/webhook.py +1 -1
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/registry.py +173 -0
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +8 -1
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +119 -42
- infrahub/locks/__init__.py +0 -0
- infrahub/locks/tasks.py +37 -0
- infrahub/message_bus/types.py +1 -0
- infrahub/patch/plan_writer.py +2 -2
- infrahub/permissions/constants.py +2 -0
- infrahub/profiles/__init__.py +0 -0
- infrahub/profiles/node_applier.py +101 -0
- infrahub/profiles/queries/__init__.py +0 -0
- infrahub/profiles/queries/get_profile_data.py +98 -0
- infrahub/profiles/tasks.py +63 -0
- infrahub/proposed_change/tasks.py +67 -14
- infrahub/repositories/__init__.py +0 -0
- infrahub/repositories/create_repository.py +113 -0
- infrahub/server.py +9 -1
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/tasks/registry.py +6 -4
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/webhook/models.py +1 -1
- infrahub/workers/dependencies.py +3 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +118 -3
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/models.py +17 -2
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/branch.py +17 -8
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +376 -95
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/convert_object_type.py +61 -0
- infrahub_sdk/ctl/branch.py +3 -0
- infrahub_sdk/ctl/check.py +2 -3
- infrahub_sdk/ctl/cli_commands.py +20 -12
- infrahub_sdk/ctl/config.py +8 -2
- infrahub_sdk/ctl/generator.py +6 -3
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/repository.py +39 -1
- infrahub_sdk/ctl/schema.py +40 -10
- infrahub_sdk/ctl/task.py +110 -0
- infrahub_sdk/ctl/utils.py +4 -0
- infrahub_sdk/ctl/validate.py +5 -3
- infrahub_sdk/diff.py +4 -5
- infrahub_sdk/exceptions.py +2 -0
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/node/attribute.py +2 -0
- infrahub_sdk/node/node.py +28 -20
- infrahub_sdk/node/relationship.py +1 -3
- infrahub_sdk/playback.py +1 -2
- infrahub_sdk/protocols.py +54 -6
- infrahub_sdk/pytest_plugin/plugin.py +7 -4
- infrahub_sdk/pytest_plugin/utils.py +40 -0
- infrahub_sdk/repository.py +1 -2
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +54 -6
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +118 -0
- infrahub_sdk/task/models.py +6 -4
- infrahub_sdk/timestamp.py +18 -6
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +233 -176
- infrahub_testcontainers/container.py +114 -2
- infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
- infrahub_testcontainers/docker-compose.test.yml +5 -0
- infrahub_testcontainers/models.py +2 -2
- infrahub_testcontainers/performance_test.py +4 -4
- infrahub/core/convert_object_type/conversion.py +0 -134
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -8,9 +8,10 @@ from pathlib import Path
|
|
|
8
8
|
from typing import TYPE_CHECKING
|
|
9
9
|
|
|
10
10
|
import pytest
|
|
11
|
-
from infrahub_sdk.exceptions import ModuleImportError
|
|
11
|
+
from infrahub_sdk.exceptions import ModuleImportError, NodeNotFoundError, URLNotFoundError
|
|
12
12
|
from infrahub_sdk.node import InfrahubNode
|
|
13
13
|
from infrahub_sdk.protocols import (
|
|
14
|
+
CoreArtifactDefinition,
|
|
14
15
|
CoreArtifactValidator,
|
|
15
16
|
CoreGeneratorDefinition,
|
|
16
17
|
CoreGeneratorValidator,
|
|
@@ -44,7 +45,7 @@ from infrahub.core.diff.model.diff import DiffElementType, SchemaConflict
|
|
|
44
45
|
from infrahub.core.diff.model.path import NodeDiffFieldSummary
|
|
45
46
|
from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder
|
|
46
47
|
from infrahub.core.manager import NodeManager
|
|
47
|
-
from infrahub.core.protocols import
|
|
48
|
+
from infrahub.core.protocols import CoreDataCheck, CoreValidator
|
|
48
49
|
from infrahub.core.protocols import CoreProposedChange as InternalCoreProposedChange
|
|
49
50
|
from infrahub.core.timestamp import Timestamp
|
|
50
51
|
from infrahub.core.validators.checks_runner import run_checks_and_update_validator
|
|
@@ -59,6 +60,8 @@ from infrahub.git.base import extract_repo_file_information
|
|
|
59
60
|
from infrahub.git.models import TriggerRepositoryInternalChecks, TriggerRepositoryUserChecks
|
|
60
61
|
from infrahub.git.repository import InfrahubRepository, get_initialized_repo
|
|
61
62
|
from infrahub.git.utils import fetch_artifact_definition_targets, fetch_proposed_change_generator_definition_targets
|
|
63
|
+
from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer
|
|
64
|
+
from infrahub.graphql.initialization import prepare_graphql_params
|
|
62
65
|
from infrahub.log import get_logger
|
|
63
66
|
from infrahub.message_bus.types import (
|
|
64
67
|
ProposedChangeArtifactDefinition,
|
|
@@ -308,6 +311,7 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
|
|
|
308
311
|
populate_store=True,
|
|
309
312
|
branch=model.source_branch,
|
|
310
313
|
)
|
|
314
|
+
|
|
311
315
|
generator_definitions = [
|
|
312
316
|
ProposedChangeGeneratorDefinition(
|
|
313
317
|
definition_id=generator.id,
|
|
@@ -320,8 +324,11 @@ async def run_generators(model: RequestProposedChangeRunGenerators, context: Inf
|
|
|
320
324
|
parameters=generator.parameters.value,
|
|
321
325
|
group_id=generator.targets.peer.id,
|
|
322
326
|
convert_query_response=generator.convert_query_response.value,
|
|
327
|
+
execute_in_proposed_change=generator.execute_in_proposed_change.value,
|
|
328
|
+
execute_after_merge=generator.execute_after_merge.value,
|
|
323
329
|
)
|
|
324
330
|
for generator in generators
|
|
331
|
+
if generator.execute_in_proposed_change.value
|
|
325
332
|
]
|
|
326
333
|
|
|
327
334
|
diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id)
|
|
@@ -525,7 +532,11 @@ async def run_proposed_change_user_tests(model: RequestProposedChangeUserTests)
|
|
|
525
532
|
log = get_run_logger()
|
|
526
533
|
client = get_client()
|
|
527
534
|
|
|
528
|
-
|
|
535
|
+
try:
|
|
536
|
+
proposed_change = await client.get(kind=CoreProposedChange, id=model.proposed_change)
|
|
537
|
+
except NodeNotFoundError:
|
|
538
|
+
log.warning(f"Proposed change ({model.proposed_change}) not found, skipping user tests execution")
|
|
539
|
+
return
|
|
529
540
|
|
|
530
541
|
def _execute(
|
|
531
542
|
directory: Path, repository: ProposedChangeRepository, proposed_change: InfrahubNode
|
|
@@ -664,6 +675,27 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
|
|
|
664
675
|
repository = model.branch_diff.get_repository(repository_id=model.artifact_definition.repository_id)
|
|
665
676
|
impacted_artifacts = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.ARTIFACT)
|
|
666
677
|
|
|
678
|
+
source_schema_branch = registry.schema.get_schema_branch(name=model.source_branch)
|
|
679
|
+
source_branch = registry.get_branch_from_registry(branch=model.source_branch)
|
|
680
|
+
|
|
681
|
+
graphql_params = await prepare_graphql_params(db=await get_database(), branch=model.source_branch)
|
|
682
|
+
query_analyzer = InfrahubGraphQLQueryAnalyzer(
|
|
683
|
+
query=model.artifact_definition.query_payload,
|
|
684
|
+
branch=source_branch,
|
|
685
|
+
schema_branch=source_schema_branch,
|
|
686
|
+
schema=graphql_params.schema,
|
|
687
|
+
)
|
|
688
|
+
|
|
689
|
+
only_has_unique_targets = query_analyzer.query_report.only_has_unique_targets
|
|
690
|
+
if not only_has_unique_targets:
|
|
691
|
+
log.warning(
|
|
692
|
+
f"Artifact definition {artifact_definition.name.value} query does not guarantee unique targets. All targets will be processed."
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
managed_branch = model.source_branch_sync_with_git and model.branch_diff.has_file_modifications
|
|
696
|
+
if managed_branch:
|
|
697
|
+
log.info("Source branch is synced with Git repositories with updates, all artifacts will be processed")
|
|
698
|
+
|
|
667
699
|
checks = []
|
|
668
700
|
|
|
669
701
|
for relationship in group.members.peers:
|
|
@@ -671,8 +703,9 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
|
|
|
671
703
|
artifact_id = artifacts_by_member.get(member.id)
|
|
672
704
|
if _should_render_artifact(
|
|
673
705
|
artifact_id=artifact_id,
|
|
674
|
-
managed_branch=
|
|
706
|
+
managed_branch=managed_branch,
|
|
675
707
|
impacted_artifacts=impacted_artifacts,
|
|
708
|
+
only_has_unique_targets=only_has_unique_targets,
|
|
676
709
|
):
|
|
677
710
|
log.info(f"Trigger Artifact processing for {member.display_label}")
|
|
678
711
|
|
|
@@ -718,21 +751,26 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
|
|
|
718
751
|
)
|
|
719
752
|
|
|
720
753
|
|
|
721
|
-
def _should_render_artifact(
|
|
754
|
+
def _should_render_artifact(
|
|
755
|
+
artifact_id: str | None,
|
|
756
|
+
managed_branch: bool,
|
|
757
|
+
impacted_artifacts: list[str],
|
|
758
|
+
only_has_unique_targets: bool,
|
|
759
|
+
) -> bool:
|
|
722
760
|
"""Returns a boolean to indicate if an artifact should be generated or not.
|
|
723
761
|
Will return true if:
|
|
724
762
|
* The artifact_id wasn't set which could be that it's a new object that doesn't have a previous artifact
|
|
725
|
-
* The source
|
|
763
|
+
* The source branch is not data only which would indicate that it could contain updates in git to the transform
|
|
726
764
|
* The artifact_id exists in the impacted_artifacts list
|
|
765
|
+
* The query failes the only_has_unique_targets check
|
|
727
766
|
Will return false if:
|
|
728
767
|
* The source branch is a data only branch and the artifact_id exists and is not in the impacted list
|
|
729
768
|
"""
|
|
730
769
|
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
return True
|
|
770
|
+
if not only_has_unique_targets or not artifact_id or managed_branch:
|
|
771
|
+
return True
|
|
772
|
+
|
|
773
|
+
return artifact_id in impacted_artifacts
|
|
736
774
|
|
|
737
775
|
|
|
738
776
|
@flow(
|
|
@@ -762,6 +800,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
|
|
|
762
800
|
query=model.generator_definition.query_name,
|
|
763
801
|
targets=model.generator_definition.group_id,
|
|
764
802
|
convert_query_response=model.generator_definition.convert_query_response,
|
|
803
|
+
execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
|
|
804
|
+
execute_after_merge=model.generator_definition.execute_after_merge,
|
|
765
805
|
)
|
|
766
806
|
|
|
767
807
|
commit_worktree = repository.get_commit_worktree(commit=model.commit)
|
|
@@ -788,6 +828,8 @@ async def run_generator_as_check(model: RunGeneratorAsCheckModel, context: Infra
|
|
|
788
828
|
params=model.variables,
|
|
789
829
|
generator_instance=generator_instance.id,
|
|
790
830
|
convert_query_response=generator_definition.convert_query_response,
|
|
831
|
+
execute_after_merge=generator_definition.execute_after_merge,
|
|
832
|
+
execute_in_proposed_change=generator_definition.execute_in_proposed_change,
|
|
791
833
|
infrahub_node=InfrahubNode,
|
|
792
834
|
)
|
|
793
835
|
generator._init_client.request_context = context.to_request_context()
|
|
@@ -931,7 +973,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
|
|
|
931
973
|
requested_instances = 0
|
|
932
974
|
impacted_instances = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.GENERATORINSTANCE)
|
|
933
975
|
|
|
934
|
-
check_generator_run_models = []
|
|
976
|
+
check_generator_run_models: list[RunGeneratorAsCheckModel] = []
|
|
935
977
|
for relationship in group.members.peers:
|
|
936
978
|
member = relationship.peer
|
|
937
979
|
generator_instance = instance_by_member.get(member.id)
|
|
@@ -967,6 +1009,7 @@ async def request_generator_definition_check(model: RequestGeneratorDefinitionCh
|
|
|
967
1009
|
context=context,
|
|
968
1010
|
)
|
|
969
1011
|
for check_generator_run_model in check_generator_run_models
|
|
1012
|
+
if check_generator_run_model.generator_definition.execute_in_proposed_change
|
|
970
1013
|
]
|
|
971
1014
|
|
|
972
1015
|
await run_checks_and_update_validator(
|
|
@@ -1249,6 +1292,9 @@ query GatherArtifactDefinitions {
|
|
|
1249
1292
|
name {
|
|
1250
1293
|
value
|
|
1251
1294
|
}
|
|
1295
|
+
query {
|
|
1296
|
+
value
|
|
1297
|
+
}
|
|
1252
1298
|
}
|
|
1253
1299
|
}
|
|
1254
1300
|
... on CoreTransformJinja2 {
|
|
@@ -1466,6 +1512,7 @@ def _parse_artifact_definitions(definitions: list[dict]) -> list[ProposedChangeA
|
|
|
1466
1512
|
query_name=definition["node"]["transformation"]["node"]["query"]["node"]["name"]["value"],
|
|
1467
1513
|
query_id=definition["node"]["transformation"]["node"]["query"]["node"]["id"],
|
|
1468
1514
|
query_models=definition["node"]["transformation"]["node"]["query"]["node"]["models"]["value"] or [],
|
|
1515
|
+
query_payload=definition["node"]["transformation"]["node"]["query"]["node"]["query"]["value"],
|
|
1469
1516
|
repository_id=definition["node"]["transformation"]["node"]["repository"]["node"]["id"],
|
|
1470
1517
|
transform_kind=definition["node"]["transformation"]["node"]["__typename"],
|
|
1471
1518
|
)
|
|
@@ -1489,8 +1536,14 @@ async def _get_proposed_change_repositories(
|
|
|
1489
1536
|
destination_all = await client.execute_graphql(
|
|
1490
1537
|
query=DESTINATION_ALLREPOSITORIES, branch_name=model.destination_branch
|
|
1491
1538
|
)
|
|
1492
|
-
|
|
1493
|
-
|
|
1539
|
+
try:
|
|
1540
|
+
source_managed = await client.execute_graphql(query=SOURCE_REPOSITORIES, branch_name=model.source_branch)
|
|
1541
|
+
source_readonly = await client.execute_graphql(
|
|
1542
|
+
query=SOURCE_READONLY_REPOSITORIES, branch_name=model.source_branch
|
|
1543
|
+
)
|
|
1544
|
+
except URLNotFoundError:
|
|
1545
|
+
# If the URL is not found it means that the source branch has been deleted after the proposed change was created
|
|
1546
|
+
return []
|
|
1494
1547
|
|
|
1495
1548
|
destination_all = destination_all[InfrahubKind.GENERICREPOSITORY]["edges"]
|
|
1496
1549
|
source_all = (
|
|
File without changes
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING, cast
|
|
4
|
+
|
|
5
|
+
from infrahub.core.constants import RepositoryInternalStatus
|
|
6
|
+
from infrahub.core.constants.infrahubkind import READONLYREPOSITORY, REPOSITORY
|
|
7
|
+
from infrahub.core.protocols import CoreGenericRepository, CoreReadOnlyRepository, CoreRepository
|
|
8
|
+
from infrahub.exceptions import ValidationError
|
|
9
|
+
from infrahub.git.models import GitRepositoryAdd, GitRepositoryAddReadOnly
|
|
10
|
+
from infrahub.log import get_logger
|
|
11
|
+
from infrahub.message_bus import messages
|
|
12
|
+
from infrahub.message_bus.messages.git_repository_connectivity import GitRepositoryConnectivityResponse
|
|
13
|
+
from infrahub.workflows.catalogue import GIT_REPOSITORY_ADD, GIT_REPOSITORY_ADD_READ_ONLY
|
|
14
|
+
|
|
15
|
+
if TYPE_CHECKING:
|
|
16
|
+
from infrahub.auth import AccountSession
|
|
17
|
+
from infrahub.context import InfrahubContext
|
|
18
|
+
from infrahub.core.branch import Branch
|
|
19
|
+
from infrahub.database import InfrahubDatabase
|
|
20
|
+
from infrahub.services import InfrahubServices
|
|
21
|
+
|
|
22
|
+
log = get_logger()
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class RepositoryFinalizer:
|
|
26
|
+
def __init__(
|
|
27
|
+
self,
|
|
28
|
+
account_session: AccountSession,
|
|
29
|
+
services: InfrahubServices,
|
|
30
|
+
context: InfrahubContext,
|
|
31
|
+
) -> None:
|
|
32
|
+
self.account_session = account_session
|
|
33
|
+
self.services = services
|
|
34
|
+
self.context = context
|
|
35
|
+
|
|
36
|
+
async def post_create(
|
|
37
|
+
self,
|
|
38
|
+
obj: CoreGenericRepository,
|
|
39
|
+
branch: Branch,
|
|
40
|
+
db: InfrahubDatabase,
|
|
41
|
+
delete_on_connectivity_failure: bool = True,
|
|
42
|
+
) -> None:
|
|
43
|
+
"""
|
|
44
|
+
Method meant to be called after a repository has been created in the database.
|
|
45
|
+
It mainly checks the connectivity to the remote repository and submit the workflow to create the repository in the local filesystem.
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
# If the connectivity is not good, we remove the repository to allow the user to add a new one
|
|
49
|
+
if delete_on_connectivity_failure:
|
|
50
|
+
message = messages.GitRepositoryConnectivity(
|
|
51
|
+
repository_name=obj.name.value,
|
|
52
|
+
repository_location=obj.location.value,
|
|
53
|
+
)
|
|
54
|
+
response = await self.services.message_bus.rpc(
|
|
55
|
+
message=message, response_class=GitRepositoryConnectivityResponse
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
if response.data.success is False:
|
|
59
|
+
await obj.delete(db=db)
|
|
60
|
+
raise ValidationError(response.data.message)
|
|
61
|
+
|
|
62
|
+
# If we are in the default branch, we set the sync status to Active
|
|
63
|
+
# If we are in another branch, we set the sync status to Staging
|
|
64
|
+
if branch.is_default:
|
|
65
|
+
obj.internal_status.value = RepositoryInternalStatus.ACTIVE.value
|
|
66
|
+
else:
|
|
67
|
+
obj.internal_status.value = RepositoryInternalStatus.STAGING.value
|
|
68
|
+
await obj.save(db=db)
|
|
69
|
+
|
|
70
|
+
# Create the new repository in the filesystem.
|
|
71
|
+
log.info("create_repository", name=obj.name.value)
|
|
72
|
+
authenticated_user = None
|
|
73
|
+
if self.account_session and self.account_session.authenticated:
|
|
74
|
+
authenticated_user = self.account_session.account_id
|
|
75
|
+
|
|
76
|
+
if obj.get_kind() == READONLYREPOSITORY:
|
|
77
|
+
obj = cast(CoreReadOnlyRepository, obj)
|
|
78
|
+
model = GitRepositoryAddReadOnly(
|
|
79
|
+
repository_id=obj.id,
|
|
80
|
+
repository_name=obj.name.value,
|
|
81
|
+
location=obj.location.value,
|
|
82
|
+
ref=obj.ref.value,
|
|
83
|
+
infrahub_branch_name=branch.name,
|
|
84
|
+
infrahub_branch_id=str(branch.get_uuid()),
|
|
85
|
+
internal_status=obj.internal_status.value,
|
|
86
|
+
created_by=authenticated_user,
|
|
87
|
+
)
|
|
88
|
+
await self.services.workflow.submit_workflow(
|
|
89
|
+
workflow=GIT_REPOSITORY_ADD_READ_ONLY,
|
|
90
|
+
context=self.context,
|
|
91
|
+
parameters={"model": model},
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
elif obj.get_kind() == REPOSITORY:
|
|
95
|
+
obj = cast(CoreRepository, obj)
|
|
96
|
+
git_repo_add_model = GitRepositoryAdd(
|
|
97
|
+
repository_id=obj.id,
|
|
98
|
+
repository_name=obj.name.value,
|
|
99
|
+
location=obj.location.value,
|
|
100
|
+
default_branch_name=obj.default_branch.value,
|
|
101
|
+
infrahub_branch_name=branch.name,
|
|
102
|
+
infrahub_branch_id=str(branch.get_uuid()),
|
|
103
|
+
internal_status=obj.internal_status.value,
|
|
104
|
+
created_by=authenticated_user,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
await self.services.workflow.submit_workflow(
|
|
108
|
+
workflow=GIT_REPOSITORY_ADD,
|
|
109
|
+
context=self.context,
|
|
110
|
+
parameters={"model": git_repo_add_model},
|
|
111
|
+
)
|
|
112
|
+
else:
|
|
113
|
+
raise ValueError(f"Unknown repository kind: {obj.get_kind()}")
|
infrahub/server.py
CHANGED
|
@@ -24,6 +24,7 @@ from infrahub.api.exception_handlers import generic_api_exception_handler
|
|
|
24
24
|
from infrahub.components import ComponentType
|
|
25
25
|
from infrahub.constants.environment import INSTALLATION_TYPE
|
|
26
26
|
from infrahub.core.initialization import initialization
|
|
27
|
+
from infrahub.database.graph import validate_graph_version
|
|
27
28
|
from infrahub.dependencies.registry import build_component_registry
|
|
28
29
|
from infrahub.exceptions import Error, ValidationError
|
|
29
30
|
from infrahub.graphql.api.endpoints import router as graphql_router
|
|
@@ -83,10 +84,17 @@ async def app_initialization(application: FastAPI, enable_scheduler: bool = True
|
|
|
83
84
|
initialize_lock(service=service)
|
|
84
85
|
# We must initialize DB after initialize lock and initialize lock depends on cache initialization
|
|
85
86
|
async with application.state.db.start_session() as db:
|
|
86
|
-
await initialization(db=db, add_database_indexes=True)
|
|
87
|
+
is_initial_setup = await initialization(db=db, add_database_indexes=True)
|
|
88
|
+
|
|
89
|
+
async with database.start_session() as dbs:
|
|
90
|
+
await validate_graph_version(db=dbs)
|
|
91
|
+
|
|
92
|
+
# Initialize the workflow after the registry has been setup
|
|
93
|
+
await service.initialize_workflow(is_initial_setup=is_initial_setup)
|
|
87
94
|
|
|
88
95
|
application.state.service = service
|
|
89
96
|
application.state.response_delay = config.SETTINGS.miscellaneous.response_delay
|
|
97
|
+
|
|
90
98
|
if enable_scheduler:
|
|
91
99
|
await service.scheduler.start_schedule()
|
|
92
100
|
|
infrahub/services/__init__.py
CHANGED
|
@@ -110,14 +110,17 @@ class InfrahubServices:
|
|
|
110
110
|
# This circular dependency could be removed if InfrahubScheduler only depends on what it needs.
|
|
111
111
|
scheduler.service = service
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
return service
|
|
114
|
+
|
|
115
|
+
async def initialize_workflow(self, is_initial_setup: bool = False) -> None:
|
|
116
|
+
if self.workflow is not None and isinstance(self.workflow, WorkflowWorkerExecution):
|
|
117
|
+
assert self.component is not None
|
|
115
118
|
# Ideally `WorkflowWorkerExecution.initialize` would be directly part of WorkflowWorkerExecution
|
|
116
119
|
# constructor but this requires some redesign as it depends on InfrahubComponent which is instantiated
|
|
117
120
|
# after workflow instantiation.
|
|
118
|
-
await
|
|
119
|
-
|
|
120
|
-
|
|
121
|
+
await self.component.refresh_heartbeat()
|
|
122
|
+
is_primary = await self.component.is_primary_gunicorn_worker()
|
|
123
|
+
await self.workflow.initialize(component_is_primary_server=is_primary, is_initial_setup=is_initial_setup)
|
|
121
124
|
|
|
122
125
|
@property
|
|
123
126
|
def component(self) -> InfrahubComponent:
|
|
@@ -3,10 +3,15 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
5
|
if TYPE_CHECKING:
|
|
6
|
+
import ssl
|
|
7
|
+
|
|
6
8
|
import httpx
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
class InfrahubHTTP:
|
|
12
|
+
def verify_tls(self, verify: bool | None = None) -> bool | ssl.SSLContext:
|
|
13
|
+
raise NotImplementedError()
|
|
14
|
+
|
|
10
15
|
async def get(
|
|
11
16
|
self,
|
|
12
17
|
url: str,
|
|
@@ -3,10 +3,12 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any, overload
|
|
4
4
|
|
|
5
5
|
from prefect.client.schemas.objects import StateType
|
|
6
|
+
from prefect.context import AsyncClientContext
|
|
6
7
|
from prefect.deployments import run_deployment
|
|
7
8
|
|
|
9
|
+
from infrahub.services.adapters.http.httpx import HttpxAdapter
|
|
8
10
|
from infrahub.workers.utils import inject_context_parameter
|
|
9
|
-
from infrahub.workflows.initialization import setup_task_manager
|
|
11
|
+
from infrahub.workflows.initialization import setup_task_manager, setup_task_manager_identifiers
|
|
10
12
|
from infrahub.workflows.models import WorkflowInfo
|
|
11
13
|
|
|
12
14
|
from . import InfrahubWorkflow, Return
|
|
@@ -19,11 +21,19 @@ if TYPE_CHECKING:
|
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
class WorkflowWorkerExecution(InfrahubWorkflow):
|
|
24
|
+
# This is required to grab a cached SSLContext from the HttpAdapter.
|
|
25
|
+
# We cannot use the get_http() dependency since it introduces a circular dependency.
|
|
26
|
+
# We could remove this later on by introducing a cached SSLContext outside of this adapter.
|
|
27
|
+
_http_adapter = HttpxAdapter()
|
|
28
|
+
|
|
22
29
|
@staticmethod
|
|
23
|
-
async def initialize(component_is_primary_server: bool) -> None:
|
|
30
|
+
async def initialize(component_is_primary_server: bool, is_initial_setup: bool = False) -> None:
|
|
24
31
|
if component_is_primary_server:
|
|
25
32
|
await setup_task_manager()
|
|
26
33
|
|
|
34
|
+
if is_initial_setup:
|
|
35
|
+
await setup_task_manager_identifiers()
|
|
36
|
+
|
|
27
37
|
@overload
|
|
28
38
|
async def execute_workflow(
|
|
29
39
|
self,
|
|
@@ -79,5 +89,6 @@ class WorkflowWorkerExecution(InfrahubWorkflow):
|
|
|
79
89
|
parameters = dict(parameters) if parameters is not None else {}
|
|
80
90
|
inject_context_parameter(func=flow_func, parameters=parameters, context=context)
|
|
81
91
|
|
|
82
|
-
|
|
92
|
+
async with AsyncClientContext(httpx_settings={"verify": self._http_adapter.verify_tls()}):
|
|
93
|
+
flow_run = await run_deployment(name=workflow.full_name, timeout=0, parameters=parameters or {}, tags=tags) # type: ignore[return-value, misc]
|
|
83
94
|
return WorkflowInfo.from_flow(flow_run=flow_run)
|
infrahub/task_manager/event.py
CHANGED
|
@@ -160,6 +160,9 @@ class PrefectEventData(PrefectEventModel):
|
|
|
160
160
|
def _return_branch_rebased(self) -> dict[str, Any]:
|
|
161
161
|
return {"rebased_branch": self._get_branch_name_from_resource()}
|
|
162
162
|
|
|
163
|
+
def _return_branch_migrated(self) -> dict[str, Any]:
|
|
164
|
+
return {"migrated_branch": self._get_branch_name_from_resource()}
|
|
165
|
+
|
|
163
166
|
def _return_group_event(self) -> dict[str, Any]:
|
|
164
167
|
members = []
|
|
165
168
|
ancestors = []
|
|
@@ -228,6 +231,8 @@ class PrefectEventData(PrefectEventModel):
|
|
|
228
231
|
event_specifics = self._return_branch_deleted()
|
|
229
232
|
case "infrahub.branch.merged":
|
|
230
233
|
event_specifics = self._return_branch_merged()
|
|
234
|
+
case "infrahub.branch.migrated":
|
|
235
|
+
event_specifics = self._return_branch_migrated()
|
|
231
236
|
case "infrahub.branch.rebased":
|
|
232
237
|
event_specifics = self._return_branch_rebased()
|
|
233
238
|
case "infrahub.group.member_added" | "infrahub.group.member_removed":
|
infrahub/task_manager/models.py
CHANGED
|
@@ -141,6 +141,13 @@ class InfrahubEventFilter(EventFilter):
|
|
|
141
141
|
if branches:
|
|
142
142
|
self.resource = EventResourceFilter(labels=ResourceSpecification({"infrahub.branch.name": branches}))
|
|
143
143
|
|
|
144
|
+
if branch_migrated := event_type_filter.get("branch_migrated"):
|
|
145
|
+
branches = branch_migrated.get("branches") or []
|
|
146
|
+
if "infrahub.branch.created" not in event_type:
|
|
147
|
+
event_type.append("infrahub.branch.migrated")
|
|
148
|
+
if branches:
|
|
149
|
+
self.resource = EventResourceFilter(labels=ResourceSpecification({"infrahub.branch.name": branches}))
|
|
150
|
+
|
|
144
151
|
if branch_rebased := event_type_filter.get("branch_rebased"):
|
|
145
152
|
branches = branch_rebased.get("branches") or []
|
|
146
153
|
if "infrahub.branch.created" not in event_type:
|
infrahub/task_manager/task.py
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import uuid
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
2
4
|
from typing import Any
|
|
3
5
|
from uuid import UUID
|
|
4
6
|
|
|
7
|
+
from prefect import State
|
|
5
8
|
from prefect.client.orchestration import PrefectClient, get_client
|
|
6
9
|
from prefect.client.schemas.filters import (
|
|
7
10
|
ArtifactFilter,
|
|
@@ -12,6 +15,7 @@ from prefect.client.schemas.filters import (
|
|
|
12
15
|
FlowRunFilter,
|
|
13
16
|
FlowRunFilterId,
|
|
14
17
|
FlowRunFilterName,
|
|
18
|
+
FlowRunFilterStartTime,
|
|
15
19
|
FlowRunFilterState,
|
|
16
20
|
FlowRunFilterStateType,
|
|
17
21
|
FlowRunFilterTags,
|
|
@@ -311,3 +315,72 @@ class PrefectTask:
|
|
|
311
315
|
)
|
|
312
316
|
|
|
313
317
|
return {"count": count or 0, "edges": nodes}
|
|
318
|
+
|
|
319
|
+
@classmethod
|
|
320
|
+
async def delete_flow_runs(
|
|
321
|
+
cls,
|
|
322
|
+
states: list[StateType] = [StateType.COMPLETED, StateType.FAILED, StateType.CANCELLED], # noqa: B006
|
|
323
|
+
delete: bool = True,
|
|
324
|
+
days_to_keep: int = 2,
|
|
325
|
+
batch_size: int = 100,
|
|
326
|
+
) -> None:
|
|
327
|
+
"""Delete flow runs in the specified states and older than specified days."""
|
|
328
|
+
|
|
329
|
+
logger = get_logger()
|
|
330
|
+
|
|
331
|
+
async with get_client(sync_client=False) as client:
|
|
332
|
+
cutoff = datetime.now(timezone.utc) - timedelta(days=days_to_keep)
|
|
333
|
+
|
|
334
|
+
flow_run_filter = FlowRunFilter(
|
|
335
|
+
start_time=FlowRunFilterStartTime(before_=cutoff), # type: ignore[arg-type]
|
|
336
|
+
state=FlowRunFilterState(type=FlowRunFilterStateType(any_=states)),
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
# Get flow runs to delete
|
|
340
|
+
flow_runs = await client.read_flow_runs(flow_run_filter=flow_run_filter, limit=batch_size)
|
|
341
|
+
|
|
342
|
+
deleted_total = 0
|
|
343
|
+
|
|
344
|
+
while True:
|
|
345
|
+
batch_deleted = 0
|
|
346
|
+
failed_deletes = []
|
|
347
|
+
|
|
348
|
+
# Delete each flow run through the API
|
|
349
|
+
for flow_run in flow_runs:
|
|
350
|
+
try:
|
|
351
|
+
if delete:
|
|
352
|
+
await client.delete_flow_run(flow_run_id=flow_run.id)
|
|
353
|
+
else:
|
|
354
|
+
await client.set_flow_run_state(
|
|
355
|
+
flow_run_id=flow_run.id,
|
|
356
|
+
state=State(type=StateType.CRASHED),
|
|
357
|
+
force=True,
|
|
358
|
+
)
|
|
359
|
+
deleted_total += 1
|
|
360
|
+
batch_deleted += 1
|
|
361
|
+
except Exception as e:
|
|
362
|
+
logger.warning(f"Failed to delete flow run {flow_run.id}: {e}")
|
|
363
|
+
failed_deletes.append(flow_run.id)
|
|
364
|
+
|
|
365
|
+
# Rate limiting
|
|
366
|
+
if batch_deleted % 10 == 0:
|
|
367
|
+
await asyncio.sleep(0.5)
|
|
368
|
+
|
|
369
|
+
logger.info(f"Delete {batch_deleted}/{len(flow_runs)} flow runs (total: {deleted_total})")
|
|
370
|
+
|
|
371
|
+
# Get next batch
|
|
372
|
+
previous_flow_run_ids = [fr.id for fr in flow_runs]
|
|
373
|
+
flow_runs = await client.read_flow_runs(flow_run_filter=flow_run_filter, limit=batch_size)
|
|
374
|
+
|
|
375
|
+
if not flow_runs:
|
|
376
|
+
logger.info("No more flow runs to delete")
|
|
377
|
+
break
|
|
378
|
+
|
|
379
|
+
if previous_flow_run_ids == [fr.id for fr in flow_runs]:
|
|
380
|
+
logger.info("Found same flow runs to delete, aborting")
|
|
381
|
+
break
|
|
382
|
+
|
|
383
|
+
# Delay between batches to avoid overwhelming the API
|
|
384
|
+
await asyncio.sleep(1.0)
|
|
385
|
+
|
|
386
|
+
logger.info(f"Retention complete. Total deleted tasks: {deleted_total}")
|
infrahub/tasks/registry.py
CHANGED
|
@@ -5,6 +5,7 @@ from typing import TYPE_CHECKING
|
|
|
5
5
|
from infrahub import lock
|
|
6
6
|
from infrahub.core import registry
|
|
7
7
|
from infrahub.core.constants import GLOBAL_BRANCH_NAME
|
|
8
|
+
from infrahub.graphql.registry import registry as graphql_registry
|
|
8
9
|
from infrahub.log import get_logger
|
|
9
10
|
from infrahub.worker import WORKER_IDENTITY
|
|
10
11
|
|
|
@@ -20,9 +21,8 @@ def update_graphql_schema(branch: Branch, schema_branch: SchemaBranch) -> None:
|
|
|
20
21
|
"""
|
|
21
22
|
Update the GraphQL schema for the given branch.
|
|
22
23
|
"""
|
|
23
|
-
from infrahub.graphql.manager import GraphQLSchemaManager
|
|
24
24
|
|
|
25
|
-
gqlm =
|
|
25
|
+
gqlm = graphql_registry.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
|
|
26
26
|
gqlm.get_graphql_schema(
|
|
27
27
|
include_query=True,
|
|
28
28
|
include_mutation=True,
|
|
@@ -67,6 +67,9 @@ async def update_branch_registry(db: InfrahubDatabase, branch: Branch) -> None:
|
|
|
67
67
|
worker=WORKER_IDENTITY,
|
|
68
68
|
)
|
|
69
69
|
registry.branch[branch.name] = branch
|
|
70
|
+
elif existing_branch.status != branch.status:
|
|
71
|
+
log.info(f"Updating registry branch cache for {branch.name=}")
|
|
72
|
+
registry.branch[branch.name] = branch
|
|
70
73
|
return
|
|
71
74
|
|
|
72
75
|
log.info(
|
|
@@ -89,7 +92,6 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
|
|
|
89
92
|
If a branch is already present with a different value for the hash
|
|
90
93
|
We pull the new schema from the database and we update the registry.
|
|
91
94
|
"""
|
|
92
|
-
from infrahub.graphql.manager import GraphQLSchemaManager
|
|
93
95
|
|
|
94
96
|
async with lock.registry.local_schema_lock():
|
|
95
97
|
active_branches = await registry.branch_object.get_list(db=db)
|
|
@@ -106,7 +108,7 @@ async def refresh_branches(db: InfrahubDatabase) -> None:
|
|
|
106
108
|
|
|
107
109
|
purged_branches = await registry.purge_inactive_branches(db=db, active_branches=active_branches)
|
|
108
110
|
purged_branches.update(
|
|
109
|
-
|
|
111
|
+
graphql_registry.purge_inactive(active_branches=[branch.name for branch in active_branches])
|
|
110
112
|
)
|
|
111
113
|
for branch_name in sorted(purged_branches):
|
|
112
114
|
log.info(f"Removed branch {branch_name!r} from the registry", branch=branch_name, worker=WORKER_IDENTITY)
|
infrahub/trigger/catalogue.py
CHANGED
|
@@ -4,6 +4,8 @@ from infrahub.computed_attribute.triggers import (
|
|
|
4
4
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
5
5
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
6
6
|
)
|
|
7
|
+
from infrahub.display_labels.triggers import TRIGGER_DISPLAY_LABELS_ALL_SCHEMA
|
|
8
|
+
from infrahub.hfid.triggers import TRIGGER_HFID_ALL_SCHEMA
|
|
7
9
|
from infrahub.schema.triggers import TRIGGER_SCHEMA_UPDATED
|
|
8
10
|
from infrahub.trigger.models import TriggerDefinition
|
|
9
11
|
from infrahub.webhook.triggers import TRIGGER_WEBHOOK_DELETE, TRIGGER_WEBHOOK_SETUP_UPDATE
|
|
@@ -13,6 +15,8 @@ builtin_triggers: list[TriggerDefinition] = [
|
|
|
13
15
|
TRIGGER_BRANCH_MERGED,
|
|
14
16
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
15
17
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
18
|
+
TRIGGER_DISPLAY_LABELS_ALL_SCHEMA,
|
|
19
|
+
TRIGGER_HFID_ALL_SCHEMA,
|
|
16
20
|
TRIGGER_SCHEMA_UPDATED,
|
|
17
21
|
TRIGGER_WEBHOOK_DELETE,
|
|
18
22
|
TRIGGER_WEBHOOK_SETUP_UPDATE,
|
infrahub/trigger/models.py
CHANGED
|
@@ -37,6 +37,8 @@ class TriggerType(str, Enum):
|
|
|
37
37
|
COMPUTED_ATTR_JINJA2 = "computed_attr_jinja2"
|
|
38
38
|
COMPUTED_ATTR_PYTHON = "computed_attr_python"
|
|
39
39
|
COMPUTED_ATTR_PYTHON_QUERY = "computed_attr_python_query"
|
|
40
|
+
DISPLAY_LABEL_JINJA2 = "display_label_jinja2"
|
|
41
|
+
HUMAN_FRIENDLY_ID = "human_friendly_id"
|
|
40
42
|
# OBJECT = "object"
|
|
41
43
|
|
|
42
44
|
|