infrahub-server 1.2.11__py3-none-any.whl → 1.3.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (147) hide show
  1. infrahub/actions/constants.py +86 -0
  2. infrahub/actions/gather.py +114 -0
  3. infrahub/actions/models.py +241 -0
  4. infrahub/actions/parsers.py +104 -0
  5. infrahub/actions/schema.py +382 -0
  6. infrahub/actions/tasks.py +126 -0
  7. infrahub/actions/triggers.py +21 -0
  8. infrahub/cli/db.py +1 -2
  9. infrahub/core/account.py +24 -47
  10. infrahub/core/attribute.py +13 -15
  11. infrahub/core/constants/__init__.py +5 -0
  12. infrahub/core/constants/infrahubkind.py +9 -0
  13. infrahub/core/convert_object_type/__init__.py +0 -0
  14. infrahub/core/convert_object_type/conversion.py +122 -0
  15. infrahub/core/convert_object_type/schema_mapping.py +56 -0
  16. infrahub/core/diff/query/all_conflicts.py +1 -5
  17. infrahub/core/diff/query/artifact.py +10 -20
  18. infrahub/core/diff/query/diff_get.py +3 -6
  19. infrahub/core/diff/query/field_summary.py +2 -4
  20. infrahub/core/diff/query/merge.py +70 -123
  21. infrahub/core/diff/query/save.py +20 -32
  22. infrahub/core/diff/query/summary_counts_enricher.py +34 -54
  23. infrahub/core/manager.py +14 -11
  24. infrahub/core/migrations/graph/m003_relationship_parent_optional.py +1 -2
  25. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +2 -4
  26. infrahub/core/migrations/graph/m019_restore_rels_to_time.py +11 -22
  27. infrahub/core/migrations/graph/m020_duplicate_edges.py +3 -6
  28. infrahub/core/migrations/graph/m021_missing_hierarchy_merge.py +1 -2
  29. infrahub/core/migrations/graph/m024_missing_hierarchy_backfill.py +1 -2
  30. infrahub/core/migrations/query/attribute_add.py +1 -2
  31. infrahub/core/migrations/query/attribute_rename.py +5 -10
  32. infrahub/core/migrations/query/delete_element_in_schema.py +19 -17
  33. infrahub/core/migrations/query/node_duplicate.py +19 -21
  34. infrahub/core/migrations/query/relationship_duplicate.py +19 -17
  35. infrahub/core/migrations/schema/node_attribute_remove.py +4 -8
  36. infrahub/core/migrations/schema/node_remove.py +19 -19
  37. infrahub/core/models.py +29 -2
  38. infrahub/core/node/__init__.py +90 -18
  39. infrahub/core/node/create.py +211 -0
  40. infrahub/core/node/resource_manager/number_pool.py +31 -5
  41. infrahub/core/node/standard.py +6 -1
  42. infrahub/core/protocols.py +56 -0
  43. infrahub/core/protocols_base.py +3 -0
  44. infrahub/core/query/__init__.py +2 -2
  45. infrahub/core/query/diff.py +19 -32
  46. infrahub/core/query/ipam.py +10 -20
  47. infrahub/core/query/node.py +28 -46
  48. infrahub/core/query/relationship.py +53 -32
  49. infrahub/core/query/resource_manager.py +1 -2
  50. infrahub/core/query/subquery.py +2 -4
  51. infrahub/core/relationship/model.py +3 -0
  52. infrahub/core/schema/__init__.py +2 -1
  53. infrahub/core/schema/attribute_parameters.py +160 -0
  54. infrahub/core/schema/attribute_schema.py +111 -8
  55. infrahub/core/schema/basenode_schema.py +25 -1
  56. infrahub/core/schema/definitions/core/__init__.py +29 -1
  57. infrahub/core/schema/definitions/core/group.py +45 -0
  58. infrahub/core/schema/definitions/internal.py +27 -4
  59. infrahub/core/schema/generated/attribute_schema.py +16 -3
  60. infrahub/core/schema/manager.py +3 -0
  61. infrahub/core/schema/schema_branch.py +67 -7
  62. infrahub/core/validators/__init__.py +13 -1
  63. infrahub/core/validators/attribute/choices.py +1 -3
  64. infrahub/core/validators/attribute/enum.py +1 -3
  65. infrahub/core/validators/attribute/kind.py +1 -3
  66. infrahub/core/validators/attribute/length.py +13 -7
  67. infrahub/core/validators/attribute/min_max.py +118 -0
  68. infrahub/core/validators/attribute/number_pool.py +106 -0
  69. infrahub/core/validators/attribute/optional.py +1 -4
  70. infrahub/core/validators/attribute/regex.py +5 -6
  71. infrahub/core/validators/attribute/unique.py +1 -3
  72. infrahub/core/validators/determiner.py +18 -2
  73. infrahub/core/validators/enum.py +12 -0
  74. infrahub/core/validators/node/hierarchy.py +3 -6
  75. infrahub/core/validators/query.py +1 -3
  76. infrahub/core/validators/relationship/count.py +6 -12
  77. infrahub/core/validators/relationship/optional.py +2 -4
  78. infrahub/core/validators/relationship/peer.py +3 -8
  79. infrahub/core/validators/uniqueness/query.py +5 -9
  80. infrahub/database/__init__.py +11 -2
  81. infrahub/events/group_action.py +1 -0
  82. infrahub/git/base.py +5 -3
  83. infrahub/git/integrator.py +102 -3
  84. infrahub/graphql/analyzer.py +139 -18
  85. infrahub/graphql/manager.py +4 -0
  86. infrahub/graphql/mutations/action.py +164 -0
  87. infrahub/graphql/mutations/convert_object_type.py +62 -0
  88. infrahub/graphql/mutations/main.py +24 -175
  89. infrahub/graphql/mutations/proposed_change.py +20 -17
  90. infrahub/graphql/mutations/resource_manager.py +62 -6
  91. infrahub/graphql/queries/convert_object_type_mapping.py +36 -0
  92. infrahub/graphql/queries/resource_manager.py +7 -1
  93. infrahub/graphql/schema.py +6 -0
  94. infrahub/menu/menu.py +31 -0
  95. infrahub/message_bus/messages/__init__.py +0 -10
  96. infrahub/message_bus/operations/__init__.py +0 -8
  97. infrahub/patch/queries/consolidate_duplicated_nodes.py +3 -6
  98. infrahub/patch/queries/delete_duplicated_edges.py +5 -10
  99. infrahub/pools/number.py +5 -3
  100. infrahub/prefect_server/models.py +1 -19
  101. infrahub/proposed_change/models.py +68 -3
  102. infrahub/proposed_change/tasks.py +907 -30
  103. infrahub/task_manager/models.py +10 -6
  104. infrahub/trigger/catalogue.py +2 -0
  105. infrahub/trigger/models.py +18 -2
  106. infrahub/trigger/tasks.py +3 -1
  107. infrahub/types.py +6 -0
  108. infrahub/workflows/catalogue.py +76 -0
  109. infrahub_sdk/client.py +43 -10
  110. infrahub_sdk/node/__init__.py +39 -0
  111. infrahub_sdk/node/attribute.py +122 -0
  112. infrahub_sdk/node/constants.py +21 -0
  113. infrahub_sdk/{node.py → node/node.py} +50 -749
  114. infrahub_sdk/node/parsers.py +15 -0
  115. infrahub_sdk/node/property.py +24 -0
  116. infrahub_sdk/node/related_node.py +266 -0
  117. infrahub_sdk/node/relationship.py +302 -0
  118. infrahub_sdk/protocols.py +112 -0
  119. infrahub_sdk/protocols_base.py +34 -2
  120. infrahub_sdk/query_groups.py +13 -2
  121. infrahub_sdk/schema/main.py +1 -0
  122. infrahub_sdk/schema/repository.py +16 -0
  123. infrahub_sdk/spec/object.py +1 -1
  124. infrahub_sdk/store.py +1 -1
  125. infrahub_sdk/testing/schemas/car_person.py +1 -0
  126. {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/METADATA +4 -4
  127. {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/RECORD +134 -122
  128. {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/WHEEL +1 -1
  129. infrahub_testcontainers/container.py +0 -1
  130. infrahub_testcontainers/docker-compose.test.yml +1 -1
  131. infrahub_testcontainers/helpers.py +8 -2
  132. infrahub/message_bus/messages/check_generator_run.py +0 -26
  133. infrahub/message_bus/messages/finalize_validator_execution.py +0 -15
  134. infrahub/message_bus/messages/proposed_change/base_with_diff.py +0 -16
  135. infrahub/message_bus/messages/proposed_change/request_proposedchange_refreshartifacts.py +0 -11
  136. infrahub/message_bus/messages/request_generatordefinition_check.py +0 -20
  137. infrahub/message_bus/messages/request_proposedchange_pipeline.py +0 -23
  138. infrahub/message_bus/operations/check/__init__.py +0 -3
  139. infrahub/message_bus/operations/check/generator.py +0 -156
  140. infrahub/message_bus/operations/finalize/__init__.py +0 -3
  141. infrahub/message_bus/operations/finalize/validator.py +0 -133
  142. infrahub/message_bus/operations/requests/__init__.py +0 -9
  143. infrahub/message_bus/operations/requests/generator_definition.py +0 -140
  144. infrahub/message_bus/operations/requests/proposed_change.py +0 -629
  145. /infrahub/{message_bus/messages/proposed_change → actions}/__init__.py +0 -0
  146. {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/LICENSE.txt +0 -0
  147. {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/entry_points.txt +0 -0
@@ -3,11 +3,20 @@ from __future__ import annotations
3
3
  import asyncio
4
4
  import os
5
5
  import sys
6
+ from enum import IntFlag
6
7
  from pathlib import Path
7
8
  from typing import TYPE_CHECKING
8
9
 
9
10
  import pytest
10
- from infrahub_sdk.protocols import CoreArtifactValidator, CoreGeneratorDefinition, CoreProposedChange
11
+ from infrahub_sdk.exceptions import ModuleImportError
12
+ from infrahub_sdk.node import InfrahubNode
13
+ from infrahub_sdk.protocols import (
14
+ CoreArtifactValidator,
15
+ CoreGeneratorDefinition,
16
+ CoreGeneratorValidator,
17
+ CoreProposedChange,
18
+ )
19
+ from infrahub_sdk.schema.repository import InfrahubGeneratorDefinitionConfig
11
20
  from prefect import flow, task
12
21
  from prefect.cache_policies import NONE
13
22
  from prefect.client.schemas.objects import (
@@ -15,20 +24,28 @@ from prefect.client.schemas.objects import (
15
24
  )
16
25
  from prefect.logging import get_run_logger
17
26
  from prefect.states import Completed, Failed
27
+ from pydantic import BaseModel
18
28
 
19
- from infrahub import config
29
+ from infrahub import config, lock
20
30
  from infrahub.artifacts.models import CheckArtifactCreate
21
31
  from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
22
32
  from infrahub.core import registry
23
33
  from infrahub.core.branch import Branch
24
34
  from infrahub.core.branch.tasks import merge_branch
25
- from infrahub.core.constants import InfrahubKind, RepositoryInternalStatus, ValidatorConclusion
35
+ from infrahub.core.constants import (
36
+ CheckType,
37
+ GeneratorInstanceStatus,
38
+ InfrahubKind,
39
+ RepositoryInternalStatus,
40
+ ValidatorConclusion,
41
+ )
26
42
  from infrahub.core.diff.coordinator import DiffCoordinator
27
43
  from infrahub.core.diff.model.diff import DiffElementType, SchemaConflict
28
44
  from infrahub.core.diff.model.path import NodeDiffFieldSummary
29
45
  from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder
30
46
  from infrahub.core.protocols import CoreDataCheck, CoreValidator
31
47
  from infrahub.core.protocols import CoreProposedChange as InternalCoreProposedChange
48
+ from infrahub.core.timestamp import Timestamp
32
49
  from infrahub.core.validators.checks_runner import run_checks_and_update_validator
33
50
  from infrahub.core.validators.determiner import ConstraintValidatorDeterminer
34
51
  from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
@@ -36,19 +53,34 @@ from infrahub.core.validators.tasks import schema_validate_migrations
36
53
  from infrahub.dependencies.registry import get_component_registry
37
54
  from infrahub.exceptions import MergeFailedError
38
55
  from infrahub.generators.models import ProposedChangeGeneratorDefinition
56
+ from infrahub.git.base import extract_repo_file_information
39
57
  from infrahub.git.models import TriggerRepositoryInternalChecks, TriggerRepositoryUserChecks
40
- from infrahub.git.repository import get_initialized_repo
58
+ from infrahub.git.repository import InfrahubRepository, get_initialized_repo
41
59
  from infrahub.log import get_logger
42
- from infrahub.message_bus import InfrahubMessage, messages
43
- from infrahub.message_bus.operations.requests.proposed_change import DefinitionSelect
60
+ from infrahub.message_bus.types import (
61
+ ProposedChangeArtifactDefinition,
62
+ ProposedChangeBranchDiff,
63
+ ProposedChangeRepository,
64
+ ProposedChangeSubscriber,
65
+ )
66
+ from infrahub.proposed_change.branch_diff import (
67
+ get_modified_node_ids,
68
+ has_data_changes,
69
+ has_node_changes,
70
+ set_diff_summary_cache,
71
+ )
44
72
  from infrahub.proposed_change.constants import ProposedChangeState
45
73
  from infrahub.proposed_change.models import (
46
74
  RequestArtifactDefinitionCheck,
75
+ RequestGeneratorDefinitionCheck,
47
76
  RequestProposedChangeDataIntegrity,
77
+ RequestProposedChangePipeline,
78
+ RequestProposedChangeRefreshArtifacts,
48
79
  RequestProposedChangeRepositoryChecks,
49
80
  RequestProposedChangeRunGenerators,
50
81
  RequestProposedChangeSchemaIntegrity,
51
82
  RequestProposedChangeUserTests,
83
+ RunGeneratorAsCheckModel,
52
84
  )
53
85
  from infrahub.pytest_plugin import InfrahubBackendPlugin
54
86
  from infrahub.services import InfrahubServices # noqa: TC001 needed for prefect flow
@@ -57,7 +89,15 @@ from infrahub.workflows.catalogue import (
57
89
  GIT_REPOSITORIES_CHECK_ARTIFACT_CREATE,
58
90
  GIT_REPOSITORY_INTERNAL_CHECKS_TRIGGER,
59
91
  GIT_REPOSITORY_USER_CHECKS_TRIGGER,
92
+ REQUEST_ARTIFACT_DEFINITION_CHECK,
93
+ REQUEST_GENERATOR_DEFINITION_CHECK,
94
+ REQUEST_PROPOSED_CHANGE_DATA_INTEGRITY,
95
+ REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS,
60
96
  REQUEST_PROPOSED_CHANGE_REPOSITORY_CHECKS,
97
+ REQUEST_PROPOSED_CHANGE_RUN_GENERATORS,
98
+ REQUEST_PROPOSED_CHANGE_SCHEMA_INTEGRITY,
99
+ REQUEST_PROPOSED_CHANGE_USER_TESTS,
100
+ RUN_GENERATOR_AS_CHECK,
61
101
  )
62
102
  from infrahub.workflows.utils import add_tags
63
103
 
@@ -65,11 +105,9 @@ from .branch_diff import get_diff_summary_cache, get_modified_kinds
65
105
 
66
106
  if TYPE_CHECKING:
67
107
  from infrahub_sdk.diff import NodeDiff
68
- from infrahub_sdk.node import InfrahubNode
69
108
 
70
109
  from infrahub.core.models import SchemaUpdateConstraintInfo
71
110
  from infrahub.core.schema.schema_branch import SchemaBranch
72
- from infrahub.message_bus.types import ProposedChangeRepository
73
111
 
74
112
 
75
113
  async def _proposed_change_transition_state(
@@ -281,8 +319,7 @@ async def run_generators(
281
319
  )
282
320
 
283
321
  if select:
284
- msg = messages.RequestGeneratorDefinitionCheck(
285
- context=context,
322
+ request_generator_def_check_model = RequestGeneratorDefinitionCheck(
286
323
  generator_definition=generator_definition,
287
324
  branch_diff=model.branch_diff,
288
325
  proposed_change=model.proposed_change,
@@ -290,20 +327,24 @@ async def run_generators(
290
327
  source_branch_sync_with_git=model.source_branch_sync_with_git,
291
328
  destination_branch=model.destination_branch,
292
329
  )
293
- msg.assign_meta(parent=model)
294
- await service.message_bus.send(message=msg)
295
-
296
- next_messages: list[InfrahubMessage] = []
297
- if model.refresh_artifacts:
298
- next_messages.append(
299
- messages.RequestProposedChangeRefreshArtifacts(
330
+ await service.workflow.submit_workflow(
331
+ workflow=REQUEST_GENERATOR_DEFINITION_CHECK,
332
+ parameters={"model": request_generator_def_check_model},
300
333
  context=context,
301
- proposed_change=model.proposed_change,
302
- source_branch=model.source_branch,
303
- source_branch_sync_with_git=model.source_branch_sync_with_git,
304
- destination_branch=model.destination_branch,
305
- branch_diff=model.branch_diff,
306
334
  )
335
+
336
+ if model.refresh_artifacts:
337
+ request_refresh_artifact_model = RequestProposedChangeRefreshArtifacts(
338
+ proposed_change=model.proposed_change,
339
+ source_branch=model.source_branch,
340
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
341
+ destination_branch=model.destination_branch,
342
+ branch_diff=model.branch_diff,
343
+ )
344
+ await service.workflow.submit_workflow(
345
+ workflow=REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS,
346
+ parameters={"model": request_refresh_artifact_model},
347
+ context=context,
307
348
  )
308
349
 
309
350
  if model.do_repository_checks:
@@ -320,10 +361,6 @@ async def run_generators(
320
361
  parameters={"model": model_proposed_change_repo_checks},
321
362
  )
322
363
 
323
- for next_msg in next_messages:
324
- next_msg.assign_meta(parent=model)
325
- await service.message_bus.send(message=next_msg)
326
-
327
364
 
328
365
  @flow(
329
366
  name="proposed-changed-schema-integrity",
@@ -533,7 +570,9 @@ async def run_proposed_change_user_tests(model: RequestProposedChangeUserTests,
533
570
  name="artifacts-generation-validation",
534
571
  flow_run_name="Validating generation of artifacts for {model.artifact_definition.definition_name}",
535
572
  )
536
- async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, service: InfrahubServices) -> None:
573
+ async def validate_artifacts_generation(
574
+ model: RequestArtifactDefinitionCheck, service: InfrahubServices, context: InfrahubContext
575
+ ) -> None:
537
576
  await add_tags(branches=[model.source_branch], nodes=[model.proposed_change], db_change=True)
538
577
 
539
578
  log = get_run_logger()
@@ -566,7 +605,7 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, s
566
605
  "label": validator_name,
567
606
  "definition": model.artifact_definition.definition_id,
568
607
  },
569
- context=model.context,
608
+ context=context,
570
609
  )
571
610
 
572
611
  await artifact_definition.targets.fetch()
@@ -599,7 +638,7 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, s
599
638
  log.info(f"Trigger Artifact processing for {member.display_label}")
600
639
 
601
640
  check_model = CheckArtifactCreate(
602
- context=model.context,
641
+ context=context,
603
642
  artifact_name=model.artifact_definition.artifact_name,
604
643
  artifact_id=artifact_id,
605
644
  artifact_definition=model.artifact_definition.definition_id,
@@ -633,7 +672,7 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, s
633
672
  checks=checks,
634
673
  validator=validator,
635
674
  proposed_change_id=model.proposed_change,
636
- context=model.context,
675
+ context=context,
637
676
  service=service,
638
677
  )
639
678
 
@@ -653,3 +692,841 @@ def _should_render_artifact(artifact_id: str | None, managed_branch: bool, impac
653
692
  # return artifact_id in impacted_artifacts
654
693
  # Temporary workaround tracked in https://github.com/opsmill/infrahub/issues/4991
655
694
  return True
695
+
696
+
697
+ @flow(
698
+ name="run-generator-as-check",
699
+ flow_run_name="Execute Generator {model.generator_definition.definition_name} for {model.target_name}",
700
+ )
701
+ async def run_generator_as_check(
702
+ model: RunGeneratorAsCheckModel, service: InfrahubServices, context: InfrahubContext
703
+ ) -> ValidatorConclusion:
704
+ await add_tags(branches=[model.branch_name], nodes=[model.proposed_change], db_change=True)
705
+
706
+ log = get_run_logger()
707
+
708
+ repository = await get_initialized_repo(
709
+ repository_id=model.repository_id,
710
+ name=model.repository_name,
711
+ service=service,
712
+ repository_kind=model.repository_kind,
713
+ commit=model.commit,
714
+ )
715
+
716
+ conclusion = ValidatorConclusion.SUCCESS
717
+
718
+ generator_definition = InfrahubGeneratorDefinitionConfig(
719
+ name=model.generator_definition.definition_name,
720
+ class_name=model.generator_definition.class_name,
721
+ file_path=model.generator_definition.file_path,
722
+ query=model.generator_definition.query_name,
723
+ targets=model.generator_definition.group_id,
724
+ convert_query_response=model.generator_definition.convert_query_response,
725
+ )
726
+
727
+ commit_worktree = repository.get_commit_worktree(commit=model.commit)
728
+
729
+ file_info = extract_repo_file_information(
730
+ full_filename=commit_worktree.directory / generator_definition.file_path,
731
+ repo_directory=repository.directory_root,
732
+ worktree_directory=commit_worktree.directory,
733
+ )
734
+ generator_instance = await _define_instance(model=model, service=service)
735
+
736
+ check_message = "Instance successfully generated"
737
+ try:
738
+ log.debug(f"repo information {file_info}")
739
+ log.debug(f"Root directory : {repository.directory_root}")
740
+ generator_class = generator_definition.load_class(
741
+ import_root=repository.directory_root, relative_path=file_info.relative_repo_path_dir
742
+ )
743
+
744
+ generator = generator_class(
745
+ query=generator_definition.query,
746
+ client=service.client,
747
+ branch=model.branch_name,
748
+ params=model.variables,
749
+ generator_instance=generator_instance.id,
750
+ convert_query_response=generator_definition.convert_query_response,
751
+ infrahub_node=InfrahubNode,
752
+ )
753
+ generator._init_client.request_context = context.to_request_context()
754
+ await generator.run(identifier=generator_definition.name)
755
+ generator_instance.status.value = GeneratorInstanceStatus.READY.value
756
+ except ModuleImportError as exc:
757
+ conclusion = ValidatorConclusion.FAILURE
758
+ generator_instance.status.value = GeneratorInstanceStatus.ERROR.value
759
+ check_message = f"Failed to import generator: {exc.message}"
760
+ log.exception(check_message, exc_info=exc)
761
+ except Exception as exc:
762
+ conclusion = ValidatorConclusion.FAILURE
763
+ generator_instance.status.value = GeneratorInstanceStatus.ERROR.value
764
+ check_message = f"Failed to execute generator: {str(exc)}"
765
+ log.exception(check_message, exc_info=exc)
766
+
767
+ log.info("Generator run completed, starting update")
768
+ await generator_instance.update(do_full_update=True)
769
+
770
+ check = None
771
+ existing_check = await service.client.filters(
772
+ kind=InfrahubKind.GENERATORCHECK, validator__ids=model.validator_id, instance__value=generator_instance.id
773
+ )
774
+ if existing_check:
775
+ check = existing_check[0]
776
+
777
+ if check:
778
+ check.created_at.value = Timestamp().to_string()
779
+ check.conclusion.value = conclusion.value
780
+ await check.save()
781
+ else:
782
+ check = await service.client.create(
783
+ kind=InfrahubKind.GENERATORCHECK,
784
+ data={
785
+ "name": model.target_name,
786
+ "origin": model.repository_id,
787
+ "kind": "GeneratorDefinition",
788
+ "validator": model.validator_id,
789
+ "created_at": Timestamp().to_string(),
790
+ "message": check_message,
791
+ "conclusion": conclusion.value,
792
+ "instance": generator_instance.id,
793
+ },
794
+ )
795
+ await check.save()
796
+
797
+ return conclusion
798
+
799
+
800
+ async def _define_instance(model: RunGeneratorAsCheckModel, service: InfrahubServices) -> InfrahubNode:
801
+ if model.generator_instance:
802
+ instance = await service.client.get(
803
+ kind=InfrahubKind.GENERATORINSTANCE, id=model.generator_instance, branch=model.branch_name
804
+ )
805
+ instance.status.value = GeneratorInstanceStatus.PENDING.value
806
+ await instance.update(do_full_update=True)
807
+
808
+ else:
809
+ async with lock.registry.get(
810
+ f"{model.target_id}-{model.generator_definition.definition_id}", namespace="generator"
811
+ ):
812
+ instances = await service.client.filters(
813
+ kind=InfrahubKind.GENERATORINSTANCE,
814
+ definition__ids=[model.generator_definition.definition_id],
815
+ object__ids=[model.target_id],
816
+ branch=model.branch_name,
817
+ )
818
+ if instances:
819
+ instance = instances[0]
820
+ instance.status.value = GeneratorInstanceStatus.PENDING.value
821
+ await instance.update(do_full_update=True)
822
+ else:
823
+ instance = await service.client.create(
824
+ kind=InfrahubKind.GENERATORINSTANCE,
825
+ branch=model.branch_name,
826
+ data={
827
+ "name": f"{model.generator_definition.definition_name}: {model.target_name}",
828
+ "status": GeneratorInstanceStatus.PENDING.value,
829
+ "object": model.target_id,
830
+ "definition": model.generator_definition.definition_id,
831
+ },
832
+ )
833
+ await instance.save()
834
+ return instance
835
+
836
+
837
+ @flow(
838
+ name="request-generator-definition-check",
839
+ flow_run_name="Validate Generator selection for {model.generator_definition.definition_name}",
840
+ )
841
+ async def request_generator_definition_check(
842
+ model: RequestGeneratorDefinitionCheck, service: InfrahubServices, context: InfrahubContext
843
+ ) -> None:
844
+ log = get_run_logger()
845
+ await add_tags(branches=[model.source_branch], nodes=[model.proposed_change])
846
+
847
+ proposed_change = await service.client.get(kind=InfrahubKind.PROPOSEDCHANGE, id=model.proposed_change)
848
+
849
+ validator_name = f"Generator Validator: {model.generator_definition.definition_name}"
850
+ await proposed_change.validations.fetch()
851
+
852
+ previous_validator: CoreGeneratorValidator | None = None
853
+ for relationship in proposed_change.validations.peers:
854
+ existing_validator = relationship.peer
855
+ if (
856
+ existing_validator.typename == InfrahubKind.GENERATORVALIDATOR
857
+ and existing_validator.definition.id == model.generator_definition.definition_id
858
+ ):
859
+ previous_validator = existing_validator
860
+
861
+ validator = await start_validator(
862
+ service=service,
863
+ validator=previous_validator,
864
+ validator_type=CoreGeneratorValidator,
865
+ proposed_change=model.proposed_change,
866
+ data={
867
+ "label": validator_name,
868
+ "definition": model.generator_definition.definition_id,
869
+ },
870
+ context=context,
871
+ )
872
+
873
+ group = await service.client.get(
874
+ kind=InfrahubKind.GENERICGROUP,
875
+ prefetch_relationships=True,
876
+ populate_store=True,
877
+ id=model.generator_definition.group_id,
878
+ branch=model.source_branch,
879
+ )
880
+ await group.members.fetch()
881
+
882
+ existing_instances = await service.client.filters(
883
+ kind=InfrahubKind.GENERATORINSTANCE,
884
+ definition__ids=[model.generator_definition.definition_id],
885
+ include=["object"],
886
+ branch=model.source_branch,
887
+ )
888
+ instance_by_member = {}
889
+ for instance in existing_instances:
890
+ instance_by_member[instance.object.peer.id] = instance.id
891
+
892
+ repository = model.branch_diff.get_repository(repository_id=model.generator_definition.repository_id)
893
+ requested_instances = 0
894
+ impacted_instances = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.GENERATORINSTANCE)
895
+
896
+ check_generator_run_models = []
897
+ for relationship in group.members.peers:
898
+ member = relationship.peer
899
+ generator_instance = instance_by_member.get(member.id)
900
+ if _run_generator(
901
+ instance_id=generator_instance,
902
+ managed_branch=model.source_branch_sync_with_git,
903
+ impacted_instances=impacted_instances,
904
+ ):
905
+ requested_instances += 1
906
+ log.info(f"Trigger execution of {model.generator_definition.definition_name} for {member.display_label}")
907
+ check_generator_run_model = RunGeneratorAsCheckModel(
908
+ generator_definition=model.generator_definition,
909
+ generator_instance=generator_instance,
910
+ commit=repository.source_commit,
911
+ repository_id=repository.repository_id,
912
+ repository_name=repository.repository_name,
913
+ repository_kind=repository.kind,
914
+ branch_name=model.source_branch,
915
+ query=model.generator_definition.query_name,
916
+ variables=member.extract(params=model.generator_definition.parameters),
917
+ target_id=member.id,
918
+ target_name=member.display_label,
919
+ validator_id=validator.id,
920
+ proposed_change=model.proposed_change,
921
+ )
922
+ check_generator_run_models.append(check_generator_run_model)
923
+
924
+ checks_coroutines = [
925
+ service.workflow.execute_workflow(
926
+ workflow=RUN_GENERATOR_AS_CHECK,
927
+ parameters={"model": check_generator_run_model},
928
+ expected_return=ValidatorConclusion,
929
+ context=context,
930
+ )
931
+ for check_generator_run_model in check_generator_run_models
932
+ ]
933
+
934
+ await run_checks_and_update_validator(
935
+ checks=checks_coroutines,
936
+ validator=validator,
937
+ context=context,
938
+ service=service,
939
+ proposed_change_id=proposed_change.id,
940
+ )
941
+
942
+
943
+ def _run_generator(instance_id: str | None, managed_branch: bool, impacted_instances: list[str]) -> bool:
944
+ """Returns a boolean to indicate if a generator instance needs to be executed
945
+ Will return true if:
946
+ * The instance_id wasn't set which could be that it's a new object that doesn't have a previous generator instance
947
+ * The source branch is set to sync with Git which would indicate that it could contain updates in git to the generator
948
+ * The instance_id exists in the impacted_instances list
949
+ Will return false if:
950
+ * The source branch is a not one that syncs with git and the instance_id exists and is not in the impacted list
951
+ """
952
+ if not instance_id or managed_branch:
953
+ return True
954
+ return instance_id in impacted_instances
955
+
956
+
957
+ class DefinitionSelect(IntFlag):
958
+ NONE = 0
959
+ MODIFIED_KINDS = 1
960
+ FILE_CHANGES = 2
961
+
962
+ @staticmethod
963
+ def add_flag(current: DefinitionSelect, flag: DefinitionSelect, condition: bool) -> DefinitionSelect:
964
+ if condition:
965
+ return current | flag
966
+ return current
967
+
968
+ @property
969
+ def log_line(self) -> str:
970
+ change_types = []
971
+ if DefinitionSelect.MODIFIED_KINDS in self:
972
+ change_types.append("data changes within relevant object kinds")
973
+
974
+ if DefinitionSelect.FILE_CHANGES in self:
975
+ change_types.append("file modifications in Git repositories")
976
+
977
+ if self:
978
+ return f"Requesting generation due to {' and '.join(change_types)}"
979
+
980
+ return "Doesn't require changes due to no relevant modified kinds or file changes in Git"
981
+
982
+
983
+ @flow(name="proposed-changed-pipeline", flow_run_name="Execute proposed changed pipeline")
984
+ async def run_proposed_change_pipeline(
985
+ model: RequestProposedChangePipeline, service: InfrahubServices, context: InfrahubContext
986
+ ) -> None:
987
+ repositories = await _get_proposed_change_repositories(model=model, service=service)
988
+
989
+ if model.source_branch_sync_with_git and await _validate_repository_merge_conflicts(
990
+ repositories=repositories, service=service
991
+ ):
992
+ for repo in repositories:
993
+ if not repo.read_only and repo.internal_status == RepositoryInternalStatus.ACTIVE.value:
994
+ trigger_repo_checks_model = TriggerRepositoryInternalChecks(
995
+ proposed_change=model.proposed_change,
996
+ repository=repo.repository_id,
997
+ source_branch=repo.source_branch,
998
+ target_branch=repo.destination_branch,
999
+ )
1000
+ await service.workflow.submit_workflow(
1001
+ workflow=GIT_REPOSITORY_INTERNAL_CHECKS_TRIGGER,
1002
+ context=context,
1003
+ parameters={"model": trigger_repo_checks_model},
1004
+ )
1005
+ return
1006
+
1007
+ await _gather_repository_repository_diffs(repositories=repositories, service=service)
1008
+
1009
+ async with service.database.start_transaction() as dbt:
1010
+ destination_branch = await registry.get_branch(db=dbt, branch=model.destination_branch)
1011
+ source_branch = await registry.get_branch(db=dbt, branch=model.source_branch)
1012
+ component_registry = get_component_registry()
1013
+ diff_coordinator = await component_registry.get_component(DiffCoordinator, db=dbt, branch=source_branch)
1014
+ await diff_coordinator.update_branch_diff(base_branch=destination_branch, diff_branch=source_branch)
1015
+
1016
+ diff_summary = await service.client.get_diff_summary(branch=model.source_branch)
1017
+ await set_diff_summary_cache(pipeline_id=model.pipeline_id, diff_summary=diff_summary, cache=service.cache)
1018
+ branch_diff = ProposedChangeBranchDiff(pipeline_id=model.pipeline_id, repositories=repositories)
1019
+ await _populate_subscribers(
1020
+ branch_diff=branch_diff, diff_summary=diff_summary, service=service, branch=model.source_branch
1021
+ )
1022
+
1023
+ if model.check_type is CheckType.ARTIFACT:
1024
+ request_refresh_artifact_model = RequestProposedChangeRefreshArtifacts(
1025
+ proposed_change=model.proposed_change,
1026
+ source_branch=model.source_branch,
1027
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1028
+ destination_branch=model.destination_branch,
1029
+ branch_diff=branch_diff,
1030
+ )
1031
+ await service.workflow.submit_workflow(
1032
+ workflow=REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS,
1033
+ parameters={"model": request_refresh_artifact_model},
1034
+ context=context,
1035
+ )
1036
+
1037
+ if model.check_type in [CheckType.ALL, CheckType.GENERATOR]:
1038
+ model_proposed_change_run_generator = RequestProposedChangeRunGenerators(
1039
+ proposed_change=model.proposed_change,
1040
+ source_branch=model.source_branch,
1041
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1042
+ destination_branch=model.destination_branch,
1043
+ branch_diff=branch_diff,
1044
+ refresh_artifacts=model.check_type is CheckType.ALL,
1045
+ do_repository_checks=model.check_type is CheckType.ALL,
1046
+ )
1047
+ await service.workflow.submit_workflow(
1048
+ workflow=REQUEST_PROPOSED_CHANGE_RUN_GENERATORS,
1049
+ context=context,
1050
+ parameters={"model": model_proposed_change_run_generator},
1051
+ )
1052
+
1053
+ if model.check_type in [CheckType.ALL, CheckType.DATA] and has_node_changes(
1054
+ diff_summary=diff_summary, branch=model.source_branch
1055
+ ):
1056
+ model_proposed_change_data_integrity = RequestProposedChangeDataIntegrity(
1057
+ proposed_change=model.proposed_change,
1058
+ source_branch=model.source_branch,
1059
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1060
+ destination_branch=model.destination_branch,
1061
+ branch_diff=branch_diff,
1062
+ )
1063
+ await service.workflow.submit_workflow(
1064
+ workflow=REQUEST_PROPOSED_CHANGE_DATA_INTEGRITY,
1065
+ context=context,
1066
+ parameters={"model": model_proposed_change_data_integrity},
1067
+ )
1068
+
1069
+ if model.check_type in [CheckType.REPOSITORY, CheckType.USER]:
1070
+ model_proposed_change_repo_checks = RequestProposedChangeRepositoryChecks(
1071
+ proposed_change=model.proposed_change,
1072
+ source_branch=model.source_branch,
1073
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1074
+ destination_branch=model.destination_branch,
1075
+ branch_diff=branch_diff,
1076
+ )
1077
+ await service.workflow.submit_workflow(
1078
+ workflow=REQUEST_PROPOSED_CHANGE_REPOSITORY_CHECKS,
1079
+ context=context,
1080
+ parameters={"model": model_proposed_change_repo_checks},
1081
+ )
1082
+
1083
+ if model.check_type in [CheckType.ALL, CheckType.SCHEMA] and has_data_changes(
1084
+ diff_summary=diff_summary, branch=model.source_branch
1085
+ ):
1086
+ await service.workflow.submit_workflow(
1087
+ workflow=REQUEST_PROPOSED_CHANGE_SCHEMA_INTEGRITY,
1088
+ context=context,
1089
+ parameters={
1090
+ "model": RequestProposedChangeSchemaIntegrity(
1091
+ proposed_change=model.proposed_change,
1092
+ source_branch=model.source_branch,
1093
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1094
+ destination_branch=model.destination_branch,
1095
+ branch_diff=branch_diff,
1096
+ )
1097
+ },
1098
+ )
1099
+
1100
+ if model.check_type in [CheckType.ALL, CheckType.TEST]:
1101
+ await service.workflow.submit_workflow(
1102
+ workflow=REQUEST_PROPOSED_CHANGE_USER_TESTS,
1103
+ context=context,
1104
+ parameters={
1105
+ "model": RequestProposedChangeUserTests(
1106
+ proposed_change=model.proposed_change,
1107
+ source_branch=model.source_branch,
1108
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1109
+ destination_branch=model.destination_branch,
1110
+ branch_diff=branch_diff,
1111
+ )
1112
+ },
1113
+ )
1114
+
1115
+
1116
+ @flow(
1117
+ name="proposed-changed-refresh-artifacts",
1118
+ flow_run_name="Trigger artifacts refresh",
1119
+ )
1120
+ async def refresh_artifacts(
1121
+ model: RequestProposedChangeRefreshArtifacts, service: InfrahubServices, context: InfrahubContext
1122
+ ) -> None:
1123
+ await add_tags(branches=[model.source_branch], nodes=[model.proposed_change])
1124
+ log = get_run_logger()
1125
+
1126
+ definition_information = await service.client.execute_graphql(
1127
+ query=GATHER_ARTIFACT_DEFINITIONS,
1128
+ branch_name=model.source_branch,
1129
+ )
1130
+ artifact_definitions = _parse_artifact_definitions(
1131
+ definitions=definition_information[InfrahubKind.ARTIFACTDEFINITION]["edges"]
1132
+ )
1133
+ diff_summary = await get_diff_summary_cache(pipeline_id=model.branch_diff.pipeline_id, cache=service.cache)
1134
+ modified_kinds = get_modified_kinds(diff_summary=diff_summary, branch=model.source_branch)
1135
+
1136
+ for artifact_definition in artifact_definitions:
1137
+ # Request artifact definition checks if the source branch that is managed in combination
1138
+ # to the Git repository containing modifications which could indicate changes to the transforms
1139
+ # in code
1140
+ # Alternatively if the queries used touches models that have been modified in the path
1141
+ # impacted artifact definitions will be included for consideration
1142
+
1143
+ select = DefinitionSelect.NONE
1144
+ select = select.add_flag(
1145
+ current=select,
1146
+ flag=DefinitionSelect.FILE_CHANGES,
1147
+ condition=model.source_branch_sync_with_git and model.branch_diff.has_file_modifications,
1148
+ )
1149
+
1150
+ for changed_model in modified_kinds:
1151
+ condition = False
1152
+ if (changed_model in artifact_definition.query_models) or (
1153
+ changed_model.startswith("Profile")
1154
+ and changed_model.replace("Profile", "", 1) in artifact_definition.query_models
1155
+ ):
1156
+ condition = True
1157
+
1158
+ select = select.add_flag(
1159
+ current=select,
1160
+ flag=DefinitionSelect.MODIFIED_KINDS,
1161
+ condition=condition,
1162
+ )
1163
+
1164
+ if select:
1165
+ log.info(f"Trigger processing of {artifact_definition.definition_name}")
1166
+ request_artifacts_definitions_model = RequestArtifactDefinitionCheck(
1167
+ artifact_definition=artifact_definition,
1168
+ branch_diff=model.branch_diff,
1169
+ proposed_change=model.proposed_change,
1170
+ source_branch=model.source_branch,
1171
+ source_branch_sync_with_git=model.source_branch_sync_with_git,
1172
+ destination_branch=model.destination_branch,
1173
+ )
1174
+
1175
+ await service.workflow.submit_workflow(
1176
+ REQUEST_ARTIFACT_DEFINITION_CHECK,
1177
+ parameters={"model": request_artifacts_definitions_model},
1178
+ context=context,
1179
+ )
1180
+
1181
+
1182
+ GATHER_ARTIFACT_DEFINITIONS = """
1183
+ query GatherArtifactDefinitions {
1184
+ CoreArtifactDefinition {
1185
+ edges {
1186
+ node {
1187
+ id
1188
+ name {
1189
+ value
1190
+ }
1191
+ artifact_name {
1192
+ value
1193
+ }
1194
+ content_type {
1195
+ value
1196
+ }
1197
+ transformation {
1198
+ node {
1199
+ __typename
1200
+ timeout {
1201
+ value
1202
+ }
1203
+ query {
1204
+ node {
1205
+ models {
1206
+ value
1207
+ }
1208
+ name {
1209
+ value
1210
+ }
1211
+ }
1212
+ }
1213
+ ... on CoreTransformJinja2 {
1214
+ template_path {
1215
+ value
1216
+ }
1217
+ }
1218
+ ... on CoreTransformPython {
1219
+ class_name {
1220
+ value
1221
+ }
1222
+ file_path {
1223
+ value
1224
+ }
1225
+ convert_query_response {
1226
+ value
1227
+ }
1228
+ }
1229
+ repository {
1230
+ node {
1231
+ id
1232
+ }
1233
+ }
1234
+ }
1235
+ }
1236
+ }
1237
+ }
1238
+ }
1239
+ }
1240
+ """
1241
+
1242
+ GATHER_GRAPHQL_QUERY_SUBSCRIBERS = """
1243
+ query GatherGraphQLQuerySubscribers($members: [ID!]) {
1244
+ CoreGraphQLQueryGroup(members__ids: $members) {
1245
+ edges {
1246
+ node {
1247
+ subscribers {
1248
+ edges {
1249
+ node {
1250
+ id
1251
+ __typename
1252
+ }
1253
+ }
1254
+ }
1255
+ }
1256
+ }
1257
+ }
1258
+ }
1259
+ """
1260
+
1261
+
1262
+ DESTINATION_ALLREPOSITORIES = """
1263
+ query DestinationBranchRepositories {
1264
+ CoreGenericRepository {
1265
+ edges {
1266
+ node {
1267
+ __typename
1268
+ id
1269
+ name {
1270
+ value
1271
+ }
1272
+ internal_status {
1273
+ value
1274
+ }
1275
+ ... on CoreRepository {
1276
+ commit {
1277
+ value
1278
+ }
1279
+ }
1280
+ ... on CoreReadOnlyRepository {
1281
+ commit {
1282
+ value
1283
+ }
1284
+ }
1285
+ }
1286
+ }
1287
+ }
1288
+ }
1289
+ """
1290
+
1291
+ SOURCE_REPOSITORIES = """
1292
+ query MyQuery {
1293
+ CoreRepository {
1294
+ edges {
1295
+ node {
1296
+ __typename
1297
+ id
1298
+ name {
1299
+ value
1300
+ }
1301
+ internal_status {
1302
+ value
1303
+ }
1304
+ commit {
1305
+ value
1306
+ }
1307
+ }
1308
+ }
1309
+ }
1310
+ }
1311
+ """
1312
+ SOURCE_READONLY_REPOSITORIES = """
1313
+ query MyQuery {
1314
+ CoreReadOnlyRepository {
1315
+ edges {
1316
+ node {
1317
+ __typename
1318
+ id
1319
+ name {
1320
+ value
1321
+ }
1322
+ internal_status {
1323
+ value
1324
+ }
1325
+ commit {
1326
+ value
1327
+ }
1328
+ }
1329
+ }
1330
+ }
1331
+ }
1332
+ """
1333
+
1334
+
1335
+ class Repository(BaseModel):
1336
+ repository_id: str
1337
+ repository_name: str
1338
+ read_only: bool
1339
+ commit: str
1340
+ internal_status: str
1341
+
1342
+
1343
+ def _parse_proposed_change_repositories(
1344
+ model: RequestProposedChangePipeline, source: list[dict], destination: list[dict]
1345
+ ) -> list[ProposedChangeRepository]:
1346
+ """This function assumes that the repos is a list of the edges
1347
+
1348
+ The data should come from the queries:
1349
+ * DESTINATION_ALLREPOSITORIES
1350
+ * SOURCE_REPOSITORIES
1351
+ * SOURCE_READONLY_REPOSITORIES
1352
+ """
1353
+ destination_repos = _parse_repositories(repositories=destination)
1354
+ source_repos = _parse_repositories(repositories=source)
1355
+ pc_repos: dict[str, ProposedChangeRepository] = {}
1356
+ for repo in destination_repos:
1357
+ if repo.repository_id not in pc_repos:
1358
+ pc_repos[repo.repository_id] = ProposedChangeRepository(
1359
+ repository_id=repo.repository_id,
1360
+ repository_name=repo.repository_name,
1361
+ read_only=repo.read_only,
1362
+ internal_status=repo.internal_status,
1363
+ destination_commit=repo.commit,
1364
+ source_branch=model.source_branch,
1365
+ destination_branch=model.destination_branch,
1366
+ )
1367
+ else:
1368
+ pc_repos[repo.repository_id].destination_commit = repo.commit
1369
+
1370
+ for repo in source_repos:
1371
+ if repo.repository_id not in pc_repos:
1372
+ pc_repos[repo.repository_id] = ProposedChangeRepository(
1373
+ repository_id=repo.repository_id,
1374
+ repository_name=repo.repository_name,
1375
+ read_only=repo.read_only,
1376
+ internal_status=repo.internal_status,
1377
+ source_commit=repo.commit,
1378
+ source_branch=model.source_branch,
1379
+ destination_branch=model.destination_branch,
1380
+ )
1381
+ else:
1382
+ pc_repos[repo.repository_id].source_commit = repo.commit
1383
+ pc_repos[repo.repository_id].internal_status = repo.internal_status
1384
+
1385
+ return list(pc_repos.values())
1386
+
1387
+
1388
+ def _parse_repositories(repositories: list[dict]) -> list[Repository]:
1389
+ """This function assumes that the repos is a list of the edges
1390
+
1391
+ The data should come from the queries:
1392
+ * DESTINATION_ALLREPOSITORIES
1393
+ * SOURCE_REPOSITORIES
1394
+ * SOURCE_READONLY_REPOSITORIES
1395
+ """
1396
+ parsed = []
1397
+ for repo in repositories:
1398
+ parsed.append(
1399
+ Repository(
1400
+ repository_id=repo["node"]["id"],
1401
+ repository_name=repo["node"]["name"]["value"],
1402
+ read_only=repo["node"]["__typename"] == InfrahubKind.READONLYREPOSITORY,
1403
+ commit=repo["node"]["commit"]["value"] or "",
1404
+ internal_status=repo["node"]["internal_status"]["value"],
1405
+ )
1406
+ )
1407
+ return parsed
1408
+
1409
+
1410
+ def _parse_artifact_definitions(definitions: list[dict]) -> list[ProposedChangeArtifactDefinition]:
1411
+ """This function assumes that definitions is a list of the edges
1412
+
1413
+ The edge should be of type CoreArtifactDefinition from the query
1414
+ * GATHER_ARTIFACT_DEFINITIONS
1415
+ """
1416
+
1417
+ parsed = []
1418
+ for definition in definitions:
1419
+ artifact_definition = ProposedChangeArtifactDefinition(
1420
+ definition_id=definition["node"]["id"],
1421
+ definition_name=definition["node"]["name"]["value"],
1422
+ artifact_name=definition["node"]["artifact_name"]["value"],
1423
+ content_type=definition["node"]["content_type"]["value"],
1424
+ timeout=definition["node"]["transformation"]["node"]["timeout"]["value"],
1425
+ query_name=definition["node"]["transformation"]["node"]["query"]["node"]["name"]["value"],
1426
+ query_models=definition["node"]["transformation"]["node"]["query"]["node"]["models"]["value"] or [],
1427
+ repository_id=definition["node"]["transformation"]["node"]["repository"]["node"]["id"],
1428
+ transform_kind=definition["node"]["transformation"]["node"]["__typename"],
1429
+ )
1430
+ if artifact_definition.transform_kind == InfrahubKind.TRANSFORMJINJA2:
1431
+ artifact_definition.template_path = definition["node"]["transformation"]["node"]["template_path"]["value"]
1432
+ elif artifact_definition.transform_kind == InfrahubKind.TRANSFORMPYTHON:
1433
+ artifact_definition.class_name = definition["node"]["transformation"]["node"]["class_name"]["value"]
1434
+ artifact_definition.file_path = definition["node"]["transformation"]["node"]["file_path"]["value"]
1435
+ artifact_definition.convert_query_response = definition["node"]["transformation"]["node"][
1436
+ "convert_query_response"
1437
+ ]["value"]
1438
+
1439
+ parsed.append(artifact_definition)
1440
+
1441
+ return parsed
1442
+
1443
+
1444
+ async def _get_proposed_change_repositories(
1445
+ model: RequestProposedChangePipeline, service: InfrahubServices
1446
+ ) -> list[ProposedChangeRepository]:
1447
+ destination_all = await service.client.execute_graphql(
1448
+ query=DESTINATION_ALLREPOSITORIES, branch_name=model.destination_branch
1449
+ )
1450
+ source_managed = await service.client.execute_graphql(query=SOURCE_REPOSITORIES, branch_name=model.source_branch)
1451
+ source_readonly = await service.client.execute_graphql(
1452
+ query=SOURCE_READONLY_REPOSITORIES, branch_name=model.source_branch
1453
+ )
1454
+
1455
+ destination_all = destination_all[InfrahubKind.GENERICREPOSITORY]["edges"]
1456
+ source_all = (
1457
+ source_managed[InfrahubKind.REPOSITORY]["edges"] + source_readonly[InfrahubKind.READONLYREPOSITORY]["edges"]
1458
+ )
1459
+
1460
+ return _parse_proposed_change_repositories(model=model, source=source_all, destination=destination_all)
1461
+
1462
+
1463
+ @task(name="proposed-change-validate-repository-conflicts", task_run_name="Validate conflicts on repository") # type: ignore[arg-type]
1464
+ async def _validate_repository_merge_conflicts(
1465
+ repositories: list[ProposedChangeRepository], service: InfrahubServices
1466
+ ) -> bool:
1467
+ log = get_run_logger()
1468
+ conflicts = False
1469
+ for repo in repositories:
1470
+ if repo.has_diff and not repo.is_staging:
1471
+ git_repo = await InfrahubRepository.init(
1472
+ id=repo.repository_id,
1473
+ name=repo.repository_name,
1474
+ client=service.client,
1475
+ service=service,
1476
+ )
1477
+ async with lock.registry.get(name=repo.repository_name, namespace="repository"):
1478
+ repo.conflicts = await git_repo.get_conflicts(
1479
+ source_branch=repo.source_branch, dest_branch=repo.destination_branch
1480
+ )
1481
+ if repo.conflicts:
1482
+ log.info(f"{len(repo.conflicts)} conflict(s) identified on {repo.repository_name}")
1483
+ conflicts = True
1484
+ else:
1485
+ log.info(f"no conflict identified for {repo.repository_name}")
1486
+
1487
+ return conflicts
1488
+
1489
+
1490
+ async def _gather_repository_repository_diffs(
1491
+ repositories: list[ProposedChangeRepository], service: InfrahubServices
1492
+ ) -> None:
1493
+ for repo in repositories:
1494
+ if repo.has_diff and repo.source_commit and repo.destination_commit:
1495
+ # TODO we need to find a way to return all files in the repo if the repo is new
1496
+ git_repo = await InfrahubRepository.init(
1497
+ id=repo.repository_id,
1498
+ name=repo.repository_name,
1499
+ client=service.client,
1500
+ service=service,
1501
+ )
1502
+
1503
+ files_changed: list[str] = []
1504
+ files_added: list[str] = []
1505
+ files_removed: list[str] = []
1506
+
1507
+ if repo.destination_branch:
1508
+ files_changed, files_added, files_removed = await git_repo.calculate_diff_between_commits(
1509
+ first_commit=repo.source_commit, second_commit=repo.destination_commit
1510
+ )
1511
+ else:
1512
+ files_added = await git_repo.list_all_files(commit=repo.source_commit)
1513
+
1514
+ repo.files_removed = files_removed
1515
+ repo.files_added = files_added
1516
+ repo.files_changed = files_changed
1517
+
1518
+
1519
+ async def _populate_subscribers(
1520
+ branch_diff: ProposedChangeBranchDiff, diff_summary: list[NodeDiff], service: InfrahubServices, branch: str
1521
+ ) -> None:
1522
+ result = await service.client.execute_graphql(
1523
+ query=GATHER_GRAPHQL_QUERY_SUBSCRIBERS,
1524
+ branch_name=branch,
1525
+ variables={"members": get_modified_node_ids(diff_summary=diff_summary, branch=branch)},
1526
+ )
1527
+
1528
+ for group in result[InfrahubKind.GRAPHQLQUERYGROUP]["edges"]:
1529
+ for subscriber in group["node"]["subscribers"]["edges"]:
1530
+ branch_diff.subscribers.append(
1531
+ ProposedChangeSubscriber(subscriber_id=subscriber["node"]["id"], kind=subscriber["node"]["__typename"])
1532
+ )