infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +8 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/oauth2.py +13 -19
- infrahub/api/oidc.py +15 -21
- infrahub/api/schema.py +24 -3
- infrahub/artifacts/models.py +2 -1
- infrahub/auth.py +137 -3
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +103 -98
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +30 -3
- infrahub/computed_attribute/tasks.py +20 -8
- infrahub/core/attribute.py +13 -5
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +7 -3
- infrahub/core/branch/tasks.py +70 -8
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/__init__.py +3 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +5 -2
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +6 -3
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +12 -11
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
- infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
- infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +35 -4
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +52 -19
- infrahub/core/node/__init__.py +158 -51
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/create.py +46 -63
- infrahub/core/node/lock_utils.py +70 -44
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/protocols.py +7 -1
- infrahub/core/query/attribute.py +55 -0
- infrahub/core/query/ipam.py +1 -0
- infrahub/core/query/node.py +23 -4
- infrahub/core/query/relationship.py +1 -0
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +1 -1
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -0
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/internal.py +16 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +22 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/schema_branch.py +300 -8
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +7 -0
- infrahub/generators/tasks.py +34 -22
- infrahub/git/base.py +4 -1
- infrahub/git/integrator.py +23 -15
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +22 -5
- infrahub/git/tasks.py +66 -10
- infrahub/git/utils.py +123 -1
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/api/endpoints.py +14 -4
- infrahub/graphql/manager.py +4 -9
- infrahub/graphql/mutations/convert_object_type.py +11 -1
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +54 -35
- infrahub/graphql/mutations/main.py +27 -28
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +4 -0
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +67 -16
- infrahub/message_bus/types.py +2 -1
- infrahub/middleware.py +26 -1
- infrahub/permissions/constants.py +2 -0
- infrahub/proposed_change/tasks.py +35 -17
- infrahub/server.py +21 -4
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/workers/dependencies.py +10 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +80 -0
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +13 -10
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/ctl/cli_commands.py +2 -0
- infrahub_sdk/ctl/generator.py +4 -0
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/schema.py +28 -9
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/protocols.py +14 -0
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +53 -44
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
- infrahub_testcontainers/container.py +115 -3
- infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
- infrahub_testcontainers/docker-compose.test.yml +6 -1
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub/generators/tasks.py
CHANGED
|
@@ -14,6 +14,7 @@ from prefect.states import Completed, Failed
|
|
|
14
14
|
from infrahub import lock
|
|
15
15
|
from infrahub.context import InfrahubContext # noqa: TC001 needed for prefect flow
|
|
16
16
|
from infrahub.core.constants import GeneratorInstanceStatus, InfrahubKind
|
|
17
|
+
from infrahub.generators.constants import GeneratorDefinitionRunSource
|
|
17
18
|
from infrahub.generators.models import (
|
|
18
19
|
GeneratorDefinitionModel,
|
|
19
20
|
ProposedChangeGeneratorDefinition,
|
|
@@ -22,6 +23,7 @@ from infrahub.generators.models import (
|
|
|
22
23
|
)
|
|
23
24
|
from infrahub.git.base import extract_repo_file_information
|
|
24
25
|
from infrahub.git.repository import get_initialized_repo
|
|
26
|
+
from infrahub.git.utils import fetch_proposed_change_generator_definition_targets
|
|
25
27
|
from infrahub.workers.dependencies import get_client, get_workflow
|
|
26
28
|
from infrahub.workflows.catalogue import REQUEST_GENERATOR_DEFINITION_RUN, REQUEST_GENERATOR_RUN
|
|
27
29
|
from infrahub.workflows.utils import add_tags
|
|
@@ -57,6 +59,8 @@ async def run_generator(model: RequestGeneratorRun) -> None:
|
|
|
57
59
|
query=model.generator_definition.query_name,
|
|
58
60
|
targets=model.generator_definition.group_id,
|
|
59
61
|
convert_query_response=model.generator_definition.convert_query_response,
|
|
62
|
+
execute_in_proposed_change=model.generator_definition.execute_in_proposed_change,
|
|
63
|
+
execute_after_merge=model.generator_definition.execute_after_merge,
|
|
60
64
|
)
|
|
61
65
|
|
|
62
66
|
commit_worktree = repository.get_commit_worktree(commit=model.commit)
|
|
@@ -80,6 +84,8 @@ async def run_generator(model: RequestGeneratorRun) -> None:
|
|
|
80
84
|
params=model.variables,
|
|
81
85
|
generator_instance=generator_instance.id,
|
|
82
86
|
convert_query_response=generator_definition.convert_query_response,
|
|
87
|
+
execute_in_proposed_change=generator_definition.execute_in_proposed_change,
|
|
88
|
+
execute_after_merge=generator_definition.execute_after_merge,
|
|
83
89
|
infrahub_node=InfrahubNode,
|
|
84
90
|
)
|
|
85
91
|
await generator.run(identifier=generator_definition.name)
|
|
@@ -129,28 +135,39 @@ async def _define_instance(model: RequestGeneratorRun, client: InfrahubClient) -
|
|
|
129
135
|
|
|
130
136
|
|
|
131
137
|
@flow(name="generator-definition-run", flow_run_name="Run all generators")
|
|
132
|
-
async def run_generator_definition(
|
|
138
|
+
async def run_generator_definition(
|
|
139
|
+
branch: str, context: InfrahubContext, source: GeneratorDefinitionRunSource = GeneratorDefinitionRunSource.UNKNOWN
|
|
140
|
+
) -> None:
|
|
133
141
|
await add_tags(branches=[branch])
|
|
134
142
|
|
|
135
143
|
generators = await get_client().filters(
|
|
136
144
|
kind=InfrahubKind.GENERATORDEFINITION, prefetch_relationships=True, populate_store=True, branch=branch
|
|
137
145
|
)
|
|
138
146
|
|
|
139
|
-
generator_definitions = [
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
147
|
+
generator_definitions: list[ProposedChangeGeneratorDefinition] = []
|
|
148
|
+
|
|
149
|
+
for generator in generators:
|
|
150
|
+
if (
|
|
151
|
+
source == GeneratorDefinitionRunSource.PROPOSED_CHANGE and not generator.execute_in_proposed_change.value
|
|
152
|
+
) or (source == GeneratorDefinitionRunSource.MERGE and not generator.execute_after_merge.value):
|
|
153
|
+
continue
|
|
154
|
+
|
|
155
|
+
generator_definitions.append(
|
|
156
|
+
ProposedChangeGeneratorDefinition(
|
|
157
|
+
definition_id=generator.id,
|
|
158
|
+
definition_name=generator.name.value,
|
|
159
|
+
class_name=generator.class_name.value,
|
|
160
|
+
file_path=generator.file_path.value,
|
|
161
|
+
query_name=generator.query.peer.name.value,
|
|
162
|
+
query_models=generator.query.peer.models.value,
|
|
163
|
+
repository_id=generator.repository.peer.id,
|
|
164
|
+
parameters=generator.parameters.value,
|
|
165
|
+
group_id=generator.targets.peer.id,
|
|
166
|
+
convert_query_response=generator.convert_query_response.value,
|
|
167
|
+
execute_in_proposed_change=generator.execute_in_proposed_change.value,
|
|
168
|
+
execute_after_merge=generator.execute_after_merge.value,
|
|
169
|
+
)
|
|
151
170
|
)
|
|
152
|
-
for generator in generators
|
|
153
|
-
]
|
|
154
171
|
|
|
155
172
|
for generator_definition in generator_definitions:
|
|
156
173
|
model = RequestGeneratorDefinitionRun(branch=branch, generator_definition=generator_definition)
|
|
@@ -179,14 +196,9 @@ async def request_generator_definition_run(
|
|
|
179
196
|
branch=model.branch,
|
|
180
197
|
)
|
|
181
198
|
|
|
182
|
-
group = await
|
|
183
|
-
|
|
184
|
-
prefetch_relationships=True,
|
|
185
|
-
populate_store=True,
|
|
186
|
-
id=model.generator_definition.group_id,
|
|
187
|
-
branch=model.branch,
|
|
199
|
+
group = await fetch_proposed_change_generator_definition_targets(
|
|
200
|
+
client=client, branch=model.branch, definition=model.generator_definition
|
|
188
201
|
)
|
|
189
|
-
await group.members.fetch()
|
|
190
202
|
|
|
191
203
|
instance_by_member = {}
|
|
192
204
|
for instance in existing_instances:
|
infrahub/git/base.py
CHANGED
|
@@ -941,7 +941,10 @@ class InfrahubRepositoryBase(BaseModel, ABC):
|
|
|
941
941
|
def _raise_enriched_error_static(
|
|
942
942
|
error: GitCommandError, name: str, location: str, branch_name: str | None = None
|
|
943
943
|
) -> NoReturn:
|
|
944
|
-
if
|
|
944
|
+
if any(
|
|
945
|
+
err in error.stderr
|
|
946
|
+
for err in ("Repository not found", "does not appear to be a git", "Failed to connect to")
|
|
947
|
+
):
|
|
945
948
|
raise RepositoryConnectionError(identifier=name) from error
|
|
946
949
|
|
|
947
950
|
if "error: pathspec" in error.stderr:
|
infrahub/git/integrator.py
CHANGED
|
@@ -226,7 +226,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
226
226
|
)
|
|
227
227
|
)
|
|
228
228
|
|
|
229
|
-
@task(name="import-jinja2-
|
|
229
|
+
@task(name="import-jinja2-transforms", task_run_name="Import Jinja2 transform", cache_policy=NONE)
|
|
230
230
|
async def import_jinja2_transforms(
|
|
231
231
|
self,
|
|
232
232
|
branch_name: str,
|
|
@@ -331,7 +331,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
331
331
|
|
|
332
332
|
await existing_transform.save()
|
|
333
333
|
|
|
334
|
-
@task(name="import-artifact-definitions", task_run_name="Import Artifact Definitions", cache_policy=NONE)
|
|
334
|
+
@task(name="import-artifact-definitions", task_run_name="Import Artifact Definitions", cache_policy=NONE)
|
|
335
335
|
async def import_artifact_definitions(
|
|
336
336
|
self,
|
|
337
337
|
branch_name: str,
|
|
@@ -432,7 +432,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
432
432
|
|
|
433
433
|
await existing_artifact_definition.save()
|
|
434
434
|
|
|
435
|
-
@task(name="repository-get-config", task_run_name="get repository config", cache_policy=NONE)
|
|
435
|
+
@task(name="repository-get-config", task_run_name="get repository config", cache_policy=NONE)
|
|
436
436
|
async def get_repository_config(self, branch_name: str, commit: str) -> InfrahubRepositoryConfig | None:
|
|
437
437
|
branch_wt = self.get_worktree(identifier=commit or branch_name)
|
|
438
438
|
log = get_run_logger()
|
|
@@ -469,7 +469,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
469
469
|
log.error(f"Unable to load the configuration file {config_file_name}, the format is not valid : {exc}")
|
|
470
470
|
return None
|
|
471
471
|
|
|
472
|
-
@task(name="import-schema-files", task_run_name="Import schema files", cache_policy=NONE)
|
|
472
|
+
@task(name="import-schema-files", task_run_name="Import schema files", cache_policy=NONE)
|
|
473
473
|
async def import_schema_files(self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig) -> None:
|
|
474
474
|
log = get_run_logger()
|
|
475
475
|
branch_wt = self.get_worktree(identifier=commit or branch_name)
|
|
@@ -541,7 +541,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
541
541
|
for schema_file in schemas_data:
|
|
542
542
|
log.info(f"schema '{schema_file.identifier}' loaded successfully!")
|
|
543
543
|
|
|
544
|
-
@task(name="import-graphql-queries", task_run_name="Import GraphQL Queries", cache_policy=NONE)
|
|
544
|
+
@task(name="import-graphql-queries", task_run_name="Import GraphQL Queries", cache_policy=NONE)
|
|
545
545
|
async def import_all_graphql_query(
|
|
546
546
|
self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
|
|
547
547
|
) -> None:
|
|
@@ -599,7 +599,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
599
599
|
await obj.save()
|
|
600
600
|
return obj
|
|
601
601
|
|
|
602
|
-
@task(name="import-python-check-definitions", task_run_name="Import Python Check Definitions", cache_policy=NONE)
|
|
602
|
+
@task(name="import-python-check-definitions", task_run_name="Import Python Check Definitions", cache_policy=NONE)
|
|
603
603
|
async def import_python_check_definitions(
|
|
604
604
|
self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
|
|
605
605
|
) -> None:
|
|
@@ -670,7 +670,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
670
670
|
log.info(f"CheckDefinition '{check_name!r}' not found locally, deleting")
|
|
671
671
|
await check_definition_in_graph[check_name].delete()
|
|
672
672
|
|
|
673
|
-
@task(name="import-generator-definitions", task_run_name="Import Generator Definitions", cache_policy=NONE)
|
|
673
|
+
@task(name="import-generator-definitions", task_run_name="Import Generator Definitions", cache_policy=NONE)
|
|
674
674
|
async def import_generator_definitions(
|
|
675
675
|
self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
|
|
676
676
|
) -> None:
|
|
@@ -756,11 +756,13 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
756
756
|
or existing_generator.parameters.value != generator.parameters
|
|
757
757
|
or existing_generator.convert_query_response.value != generator.convert_query_response
|
|
758
758
|
or existing_generator.targets.id != generator.targets
|
|
759
|
+
or existing_generator.execute_in_proposed_change.value != generator.execute_in_proposed_change
|
|
760
|
+
or existing_generator.execute_after_merge.value != generator.execute_after_merge
|
|
759
761
|
):
|
|
760
762
|
return True
|
|
761
763
|
return False
|
|
762
764
|
|
|
763
|
-
@task(name="import-python-transforms", task_run_name="Import Python Transforms", cache_policy=NONE)
|
|
765
|
+
@task(name="import-python-transforms", task_run_name="Import Python Transforms", cache_policy=NONE)
|
|
764
766
|
async def import_python_transforms(
|
|
765
767
|
self, branch_name: str, commit: str, config_file: InfrahubRepositoryConfig
|
|
766
768
|
) -> None:
|
|
@@ -885,7 +887,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
885
887
|
file_type=file_type,
|
|
886
888
|
)
|
|
887
889
|
|
|
888
|
-
@task(name="import-objects", task_run_name="Import Objects", cache_policy=NONE)
|
|
890
|
+
@task(name="import-objects", task_run_name="Import Objects", cache_policy=NONE)
|
|
889
891
|
async def import_objects(
|
|
890
892
|
self,
|
|
891
893
|
branch_name: str,
|
|
@@ -905,7 +907,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
905
907
|
object_type=RepositoryObjects.MENU,
|
|
906
908
|
)
|
|
907
909
|
|
|
908
|
-
@task(name="check-definition-get", task_run_name="Get Check Definition", cache_policy=NONE)
|
|
910
|
+
@task(name="check-definition-get", task_run_name="Get Check Definition", cache_policy=NONE)
|
|
909
911
|
async def get_check_definition(
|
|
910
912
|
self,
|
|
911
913
|
branch_name: str,
|
|
@@ -945,7 +947,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
945
947
|
raise
|
|
946
948
|
return checks
|
|
947
949
|
|
|
948
|
-
@task(name="python-transform-get", task_run_name="Get Python Transform", cache_policy=NONE)
|
|
950
|
+
@task(name="python-transform-get", task_run_name="Get Python Transform", cache_policy=NONE)
|
|
949
951
|
async def get_python_transforms(
|
|
950
952
|
self, branch_name: str, module: types.ModuleType, file_path: str, transform: InfrahubPythonTransformConfig
|
|
951
953
|
) -> list[TransformPythonInformation]:
|
|
@@ -1023,6 +1025,12 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
1023
1025
|
if existing_generator.targets.id != generator.targets:
|
|
1024
1026
|
existing_generator.targets = {"id": generator.targets, "source": str(self.id), "is_protected": True}
|
|
1025
1027
|
|
|
1028
|
+
if existing_generator.execute_in_proposed_change.value != generator.execute_in_proposed_change:
|
|
1029
|
+
existing_generator.execute_in_proposed_change.value = generator.execute_in_proposed_change
|
|
1030
|
+
|
|
1031
|
+
if existing_generator.execute_after_merge.value != generator.execute_after_merge:
|
|
1032
|
+
existing_generator.execute_after_merge.value = generator.execute_after_merge
|
|
1033
|
+
|
|
1026
1034
|
await existing_generator.save()
|
|
1027
1035
|
|
|
1028
1036
|
async def create_python_check_definition(
|
|
@@ -1152,7 +1160,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
1152
1160
|
await self.import_python_transforms(branch_name=branch_name, commit=commit, config_file=config_file) # type: ignore[misc]
|
|
1153
1161
|
await self.import_generator_definitions(branch_name=branch_name, commit=commit, config_file=config_file) # type: ignore[misc]
|
|
1154
1162
|
|
|
1155
|
-
@task(name="jinja2-template-render", task_run_name="Render Jinja2 template", cache_policy=NONE)
|
|
1163
|
+
@task(name="jinja2-template-render", task_run_name="Render Jinja2 template", cache_policy=NONE)
|
|
1156
1164
|
async def render_jinja2_template(self, commit: str, location: str, data: dict) -> str:
|
|
1157
1165
|
log = get_run_logger()
|
|
1158
1166
|
commit_worktree = self.get_commit_worktree(commit=commit)
|
|
@@ -1168,7 +1176,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
1168
1176
|
repository_name=self.name, commit=commit, location=location, message=exc.message
|
|
1169
1177
|
) from exc
|
|
1170
1178
|
|
|
1171
|
-
@task(name="python-check-execute", task_run_name="Execute Python Check", cache_policy=NONE)
|
|
1179
|
+
@task(name="python-check-execute", task_run_name="Execute Python Check", cache_policy=NONE)
|
|
1172
1180
|
async def execute_python_check(
|
|
1173
1181
|
self,
|
|
1174
1182
|
branch_name: str,
|
|
@@ -1227,7 +1235,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
1227
1235
|
repository_name=self.name, class_name=class_name, commit=commit, location=location, message=str(exc)
|
|
1228
1236
|
) from exc
|
|
1229
1237
|
|
|
1230
|
-
@task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE)
|
|
1238
|
+
@task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE)
|
|
1231
1239
|
async def execute_python_transform(
|
|
1232
1240
|
self,
|
|
1233
1241
|
branch_name: str,
|
|
@@ -1363,7 +1371,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
|
|
|
1363
1371
|
message: CheckArtifactCreate | RequestArtifactGenerate,
|
|
1364
1372
|
) -> ArtifactGenerateResult:
|
|
1365
1373
|
response = await self.sdk.query_gql_query(
|
|
1366
|
-
name=message.
|
|
1374
|
+
name=message.query_id,
|
|
1367
1375
|
variables=message.variables,
|
|
1368
1376
|
update_group=True,
|
|
1369
1377
|
subscribers=[artifact.id],
|
infrahub/git/models.py
CHANGED
|
@@ -38,7 +38,8 @@ class RequestArtifactGenerate(BaseModel):
|
|
|
38
38
|
target_kind: str = Field(..., description="The kind of the target object for this artifact")
|
|
39
39
|
target_name: str = Field(..., description="Name of the artifact target")
|
|
40
40
|
artifact_id: str | None = Field(default=None, description="The id of the artifact if it previously existed")
|
|
41
|
-
query: str = Field(..., description="The name of the query to use when collecting data")
|
|
41
|
+
query: str = Field(..., description="The name of the query to use when collecting data") # Deprecated
|
|
42
|
+
query_id: str = Field(..., description="The id of the query to use when collecting data")
|
|
42
43
|
timeout: int = Field(..., description="Timeout for requests used to generate this artifact")
|
|
43
44
|
variables: dict = Field(..., description="Input variables when generating the artifact")
|
|
44
45
|
context: InfrahubContext = Field(..., description="The context of the task")
|
infrahub/git/repository.py
CHANGED
|
@@ -2,6 +2,9 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
|
+
from cachetools import TTLCache
|
|
6
|
+
from cachetools.keys import hashkey
|
|
7
|
+
from cachetools_async import cached
|
|
5
8
|
from git.exc import BadName, GitCommandError
|
|
6
9
|
from infrahub_sdk.exceptions import GraphQLError
|
|
7
10
|
from prefect import task
|
|
@@ -248,12 +251,13 @@ class InfrahubReadOnlyRepository(InfrahubRepositoryIntegrator):
|
|
|
248
251
|
await self.update_commit_value(branch_name=self.infrahub_branch_name, commit=commit)
|
|
249
252
|
|
|
250
253
|
|
|
251
|
-
@
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
254
|
+
@cached(
|
|
255
|
+
TTLCache(maxsize=100, ttl=30),
|
|
256
|
+
key=lambda *_, **kwargs: hashkey(
|
|
257
|
+
kwargs.get("repository_id"), kwargs.get("name"), kwargs.get("repository_kind"), kwargs.get("commit")
|
|
258
|
+
),
|
|
255
259
|
)
|
|
256
|
-
async def
|
|
260
|
+
async def _get_initialized_repo(
|
|
257
261
|
client: InfrahubClient, repository_id: str, name: str, repository_kind: str, commit: str | None = None
|
|
258
262
|
) -> InfrahubReadOnlyRepository | InfrahubRepository:
|
|
259
263
|
if repository_kind == InfrahubKind.REPOSITORY:
|
|
@@ -263,3 +267,16 @@ async def get_initialized_repo(
|
|
|
263
267
|
return await InfrahubReadOnlyRepository.init(id=repository_id, name=name, commit=commit, client=client)
|
|
264
268
|
|
|
265
269
|
raise NotImplementedError(f"The repository kind {repository_kind} has not been implemented")
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
@task(
|
|
273
|
+
name="Fetch repository commit",
|
|
274
|
+
description="Retrieve a git repository at a given commit, if it does not already exist locally",
|
|
275
|
+
cache_policy=NONE,
|
|
276
|
+
)
|
|
277
|
+
async def get_initialized_repo(
|
|
278
|
+
client: InfrahubClient, repository_id: str, name: str, repository_kind: str, commit: str | None = None
|
|
279
|
+
) -> InfrahubReadOnlyRepository | InfrahubRepository:
|
|
280
|
+
return await _get_initialized_repo(
|
|
281
|
+
client=client, repository_id=repository_id, name=name, repository_kind=repository_kind, commit=commit
|
|
282
|
+
)
|
infrahub/git/tasks.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
1
3
|
from infrahub_sdk import InfrahubClient
|
|
2
4
|
from infrahub_sdk.protocols import (
|
|
3
5
|
CoreArtifact,
|
|
@@ -14,7 +16,12 @@ from prefect.logging import get_run_logger
|
|
|
14
16
|
|
|
15
17
|
from infrahub import lock
|
|
16
18
|
from infrahub.context import InfrahubContext
|
|
17
|
-
from infrahub.core.constants import
|
|
19
|
+
from infrahub.core.constants import (
|
|
20
|
+
InfrahubKind,
|
|
21
|
+
RepositoryInternalStatus,
|
|
22
|
+
RepositoryOperationalStatus,
|
|
23
|
+
ValidatorConclusion,
|
|
24
|
+
)
|
|
18
25
|
from infrahub.core.manager import NodeManager
|
|
19
26
|
from infrahub.core.registry import registry
|
|
20
27
|
from infrahub.exceptions import CheckError, RepositoryError
|
|
@@ -53,6 +60,7 @@ from .models import (
|
|
|
53
60
|
UserCheckDefinitionData,
|
|
54
61
|
)
|
|
55
62
|
from .repository import InfrahubReadOnlyRepository, InfrahubRepository, get_initialized_repo
|
|
63
|
+
from .utils import fetch_artifact_definition_targets, fetch_check_definition_targets
|
|
56
64
|
|
|
57
65
|
|
|
58
66
|
@flow(
|
|
@@ -151,6 +159,39 @@ async def create_branch(branch: str, branch_id: str) -> None:
|
|
|
151
159
|
pass
|
|
152
160
|
|
|
153
161
|
|
|
162
|
+
@flow(name="sync-git-repo-with-origin", flow_run_name="Sync git repo with origin")
|
|
163
|
+
async def sync_git_repo_with_origin_and_tag_on_failure(
|
|
164
|
+
client: InfrahubClient,
|
|
165
|
+
repository_id: str,
|
|
166
|
+
repository_name: str,
|
|
167
|
+
repository_location: str,
|
|
168
|
+
internal_status: str,
|
|
169
|
+
default_branch_name: str,
|
|
170
|
+
operational_status: str,
|
|
171
|
+
staging_branch: str | None = None,
|
|
172
|
+
infrahub_branch: str | None = None,
|
|
173
|
+
) -> None:
|
|
174
|
+
repo = await InfrahubRepository.init(
|
|
175
|
+
id=repository_id,
|
|
176
|
+
name=repository_name,
|
|
177
|
+
location=repository_location,
|
|
178
|
+
client=client,
|
|
179
|
+
internal_status=internal_status,
|
|
180
|
+
default_branch_name=default_branch_name,
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
try:
|
|
184
|
+
await repo.sync(staging_branch=staging_branch)
|
|
185
|
+
except RepositoryError:
|
|
186
|
+
if operational_status == RepositoryOperationalStatus.ONLINE.value:
|
|
187
|
+
params: dict[str, Any] = {
|
|
188
|
+
"branches": [infrahub_branch] if infrahub_branch else [],
|
|
189
|
+
"nodes": [str(repository_id)],
|
|
190
|
+
}
|
|
191
|
+
await add_tags(**params)
|
|
192
|
+
raise
|
|
193
|
+
|
|
194
|
+
|
|
154
195
|
@flow(name="git_repositories_sync", flow_run_name="Sync Git Repositories")
|
|
155
196
|
async def sync_remote_repositories() -> None:
|
|
156
197
|
log = get_run_logger()
|
|
@@ -203,7 +244,17 @@ async def sync_remote_repositories() -> None:
|
|
|
203
244
|
continue
|
|
204
245
|
|
|
205
246
|
try:
|
|
206
|
-
await
|
|
247
|
+
await sync_git_repo_with_origin_and_tag_on_failure(
|
|
248
|
+
client=client,
|
|
249
|
+
repository_id=repository_data.repository.id,
|
|
250
|
+
repository_name=repository_data.repository.name.value,
|
|
251
|
+
repository_location=repository_data.repository.location.value,
|
|
252
|
+
internal_status=active_internal_status,
|
|
253
|
+
default_branch_name=repository_data.repository.default_branch.value,
|
|
254
|
+
operational_status=repository_data.repository.operational_status.value,
|
|
255
|
+
staging_branch=staging_branch,
|
|
256
|
+
infrahub_branch=infrahub_branch,
|
|
257
|
+
)
|
|
207
258
|
# Tell workers to fetch to stay in sync
|
|
208
259
|
message = messages.RefreshGitFetch(
|
|
209
260
|
meta=Meta(initiator_id=WORKER_IDENTITY, request_id=get_log_data().get("request_id", "")),
|
|
@@ -323,9 +374,8 @@ async def generate_request_artifact_definition(
|
|
|
323
374
|
kind=CoreArtifactDefinition, id=model.artifact_definition_id, branch=model.branch
|
|
324
375
|
)
|
|
325
376
|
|
|
326
|
-
await artifact_definition
|
|
327
|
-
|
|
328
|
-
await group.members.fetch()
|
|
377
|
+
group = await fetch_artifact_definition_targets(client=client, branch=model.branch, definition=artifact_definition)
|
|
378
|
+
|
|
329
379
|
current_members = [member.id for member in group.members.peers]
|
|
330
380
|
|
|
331
381
|
artifacts_by_member = {}
|
|
@@ -356,6 +406,7 @@ async def generate_request_artifact_definition(
|
|
|
356
406
|
transform_location = f"{transform.file_path.value}::{transform.class_name.value}"
|
|
357
407
|
convert_query_response = transform.convert_query_response.value
|
|
358
408
|
|
|
409
|
+
batch = await client.create_batch()
|
|
359
410
|
for relationship in group.members.peers:
|
|
360
411
|
member = relationship.peer
|
|
361
412
|
artifact_id = artifacts_by_member.get(member.id)
|
|
@@ -376,6 +427,7 @@ async def generate_request_artifact_definition(
|
|
|
376
427
|
repository_kind=repository.get_kind(),
|
|
377
428
|
branch_name=model.branch,
|
|
378
429
|
query=query.name.value,
|
|
430
|
+
query_id=query.id,
|
|
379
431
|
variables=await member.extract(params=artifact_definition.parameters.value),
|
|
380
432
|
target_id=member.id,
|
|
381
433
|
target_name=member.display_label,
|
|
@@ -385,10 +437,16 @@ async def generate_request_artifact_definition(
|
|
|
385
437
|
context=context,
|
|
386
438
|
)
|
|
387
439
|
|
|
388
|
-
|
|
389
|
-
|
|
440
|
+
batch.add(
|
|
441
|
+
task=get_workflow().submit_workflow,
|
|
442
|
+
workflow=REQUEST_ARTIFACT_GENERATE,
|
|
443
|
+
context=context,
|
|
444
|
+
parameters={"model": request_artifact_generate_model},
|
|
390
445
|
)
|
|
391
446
|
|
|
447
|
+
async for _, _ in batch.execute():
|
|
448
|
+
pass
|
|
449
|
+
|
|
392
450
|
|
|
393
451
|
@flow(name="git-repository-pull-read-only", flow_run_name="Pull latest commit on {model.repository_name}")
|
|
394
452
|
async def pull_read_only(model: GitRepositoryPullReadOnly) -> None:
|
|
@@ -569,9 +627,7 @@ async def trigger_repository_user_checks_definitions(model: UserCheckDefinitionD
|
|
|
569
627
|
|
|
570
628
|
if definition.targets.id:
|
|
571
629
|
# Check against a group of targets
|
|
572
|
-
await definition
|
|
573
|
-
group = definition.targets.peer
|
|
574
|
-
await group.members.fetch()
|
|
630
|
+
group = await fetch_check_definition_targets(client=client, branch=model.branch_name, definition=definition)
|
|
575
631
|
check_models = []
|
|
576
632
|
for relationship in group.members.peers:
|
|
577
633
|
member = relationship.peer
|
infrahub/git/utils.py
CHANGED
|
@@ -1,9 +1,16 @@
|
|
|
1
|
-
from
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from typing import TYPE_CHECKING, Any
|
|
3
|
+
|
|
4
|
+
from infrahub_sdk import InfrahubClient
|
|
5
|
+
from infrahub_sdk.node import RelationshipManager
|
|
6
|
+
from infrahub_sdk.protocols import CoreArtifactDefinition, CoreCheckDefinition, CoreGroup
|
|
7
|
+
from infrahub_sdk.types import Order
|
|
2
8
|
|
|
3
9
|
from infrahub.core import registry
|
|
4
10
|
from infrahub.core.constants import InfrahubKind
|
|
5
11
|
from infrahub.core.manager import NodeManager
|
|
6
12
|
from infrahub.database import InfrahubDatabase
|
|
13
|
+
from infrahub.generators.models import ProposedChangeGeneratorDefinition
|
|
7
14
|
|
|
8
15
|
from .models import RepositoryBranchInfo, RepositoryData
|
|
9
16
|
|
|
@@ -46,3 +53,118 @@ async def get_repositories_commit_per_branch(
|
|
|
46
53
|
)
|
|
47
54
|
|
|
48
55
|
return repositories
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _collect_parameter_first_segments(params: Any) -> set[str]:
|
|
59
|
+
segments: set[str] = set()
|
|
60
|
+
|
|
61
|
+
def _walk(value: Any) -> None:
|
|
62
|
+
if isinstance(value, str):
|
|
63
|
+
segment = value.split("__", 1)[0]
|
|
64
|
+
if segment:
|
|
65
|
+
segments.add(segment)
|
|
66
|
+
elif isinstance(value, dict):
|
|
67
|
+
for nested in value.values():
|
|
68
|
+
_walk(nested)
|
|
69
|
+
elif isinstance(value, (list, tuple, set)):
|
|
70
|
+
for nested in value:
|
|
71
|
+
_walk(nested)
|
|
72
|
+
|
|
73
|
+
_walk(params)
|
|
74
|
+
return segments
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
async def _prefetch_group_member_nodes(
|
|
78
|
+
client: InfrahubClient,
|
|
79
|
+
members: RelationshipManager,
|
|
80
|
+
branch: str,
|
|
81
|
+
required_fields: set[str],
|
|
82
|
+
) -> None:
|
|
83
|
+
ids_per_kind: dict[str, set[str]] = defaultdict(set)
|
|
84
|
+
for peer in members.peers:
|
|
85
|
+
if peer.id and peer.typename:
|
|
86
|
+
ids_per_kind[peer.typename].add(peer.id)
|
|
87
|
+
|
|
88
|
+
if not ids_per_kind:
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
batch = await client.create_batch()
|
|
92
|
+
|
|
93
|
+
for kind, ids in ids_per_kind.items():
|
|
94
|
+
schema = await client.schema.get(kind=kind, branch=branch)
|
|
95
|
+
|
|
96
|
+
# FIXME: https://github.com/opsmill/infrahub-sdk-python/pull/205
|
|
97
|
+
valid_fields = set(schema.attribute_names) | set(schema.relationship_names)
|
|
98
|
+
keep_relationships = set(schema.relationship_names) & required_fields
|
|
99
|
+
cleaned_fields = valid_fields - required_fields
|
|
100
|
+
|
|
101
|
+
kwargs: dict[str, Any] = {
|
|
102
|
+
"kind": kind,
|
|
103
|
+
"ids": list(ids),
|
|
104
|
+
"branch": branch,
|
|
105
|
+
"exclude": list(cleaned_fields),
|
|
106
|
+
"populate_store": True,
|
|
107
|
+
"order": Order(disable=True),
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
if keep_relationships:
|
|
111
|
+
kwargs["include"] = list(keep_relationships)
|
|
112
|
+
|
|
113
|
+
batch.add(task=client.filters, **kwargs)
|
|
114
|
+
|
|
115
|
+
async for _ in batch.execute():
|
|
116
|
+
pass
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
async def _fetch_definition_targets(
|
|
120
|
+
client: InfrahubClient,
|
|
121
|
+
branch: str,
|
|
122
|
+
group_id: str,
|
|
123
|
+
parameters: Any,
|
|
124
|
+
) -> CoreGroup:
|
|
125
|
+
group = await client.get(
|
|
126
|
+
kind=CoreGroup,
|
|
127
|
+
id=group_id,
|
|
128
|
+
branch=branch,
|
|
129
|
+
include=["members"],
|
|
130
|
+
)
|
|
131
|
+
|
|
132
|
+
parameter_fields = _collect_parameter_first_segments(parameters)
|
|
133
|
+
await _prefetch_group_member_nodes(
|
|
134
|
+
client=client,
|
|
135
|
+
members=group.members,
|
|
136
|
+
branch=branch,
|
|
137
|
+
required_fields=parameter_fields,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
return group
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
async def fetch_artifact_definition_targets(
|
|
144
|
+
client: InfrahubClient,
|
|
145
|
+
branch: str,
|
|
146
|
+
definition: CoreArtifactDefinition,
|
|
147
|
+
) -> CoreGroup:
|
|
148
|
+
return await _fetch_definition_targets(
|
|
149
|
+
client=client, branch=branch, group_id=definition.targets.id, parameters=definition.parameters.value
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
async def fetch_check_definition_targets(
|
|
154
|
+
client: InfrahubClient,
|
|
155
|
+
branch: str,
|
|
156
|
+
definition: CoreCheckDefinition,
|
|
157
|
+
) -> CoreGroup:
|
|
158
|
+
return await _fetch_definition_targets(
|
|
159
|
+
client=client, branch=branch, group_id=definition.targets.id, parameters=definition.parameters.value
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
async def fetch_proposed_change_generator_definition_targets(
|
|
164
|
+
client: InfrahubClient,
|
|
165
|
+
branch: str,
|
|
166
|
+
definition: ProposedChangeGeneratorDefinition,
|
|
167
|
+
) -> CoreGroup:
|
|
168
|
+
return await _fetch_definition_targets(
|
|
169
|
+
client=client, branch=branch, group_id=definition.group_id, parameters=definition.parameters
|
|
170
|
+
)
|
infrahub/graphql/analyzer.py
CHANGED
|
@@ -639,7 +639,7 @@ class InfrahubGraphQLQueryAnalyzer(GraphQLQueryAnalyzer):
|
|
|
639
639
|
self, node: InlineFragmentNode, query_node: GraphQLQueryNode
|
|
640
640
|
) -> GraphQLQueryNode:
|
|
641
641
|
context_type = query_node.context_type
|
|
642
|
-
infrahub_model = self.schema_branch.get(name=node.type_condition.name.value)
|
|
642
|
+
infrahub_model = self.schema_branch.get(name=node.type_condition.name.value, duplicate=False)
|
|
643
643
|
context_type = ContextType.DIRECT
|
|
644
644
|
current_node = GraphQLQueryNode(
|
|
645
645
|
parent=query_node,
|
|
@@ -2,14 +2,15 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from typing import TYPE_CHECKING
|
|
4
4
|
|
|
5
|
-
from fastapi import APIRouter, Depends
|
|
5
|
+
from fastapi import APIRouter, Depends, Query
|
|
6
6
|
from fastapi.responses import PlainTextResponse
|
|
7
|
-
from graphql import print_schema
|
|
7
|
+
from graphql import parse, print_ast, print_schema
|
|
8
8
|
from starlette.routing import Route, WebSocketRoute
|
|
9
9
|
|
|
10
10
|
from infrahub.api.dependencies import get_branch_dep, get_current_user
|
|
11
11
|
from infrahub.core import registry
|
|
12
12
|
from infrahub.graphql.registry import registry as graphql_registry
|
|
13
|
+
from infrahub.graphql.schema_sort import sort_schema_ast
|
|
13
14
|
|
|
14
15
|
from .dependencies import build_graphql_app
|
|
15
16
|
|
|
@@ -27,11 +28,20 @@ router.routes.append(WebSocketRoute(path="/graphql", endpoint=graphql_app))
|
|
|
27
28
|
router.routes.append(WebSocketRoute(path="/graphql/{branch_name:str}", endpoint=graphql_app))
|
|
28
29
|
|
|
29
30
|
|
|
30
|
-
@router.get("/schema.graphql"
|
|
31
|
+
@router.get("/schema.graphql")
|
|
31
32
|
async def get_graphql_schema(
|
|
32
|
-
branch: Branch = Depends(get_branch_dep),
|
|
33
|
+
branch: Branch = Depends(get_branch_dep),
|
|
34
|
+
_: AccountSession = Depends(get_current_user),
|
|
35
|
+
sort_schema: bool = Query(default=False, alias="sorted", description="Whether to sort the schema alphabetically."),
|
|
33
36
|
) -> PlainTextResponse:
|
|
34
37
|
schema_branch = registry.schema.get_schema_branch(name=branch.name)
|
|
35
38
|
gqlm = graphql_registry.get_manager_for_branch(branch=branch, schema_branch=schema_branch)
|
|
36
39
|
graphql_schema = gqlm.get_graphql_schema()
|
|
40
|
+
|
|
41
|
+
if sort_schema:
|
|
42
|
+
schema_str = print_schema(graphql_schema)
|
|
43
|
+
schema_ast = parse(schema_str)
|
|
44
|
+
sorted_schema_ast = sort_schema_ast(schema_ast)
|
|
45
|
+
return PlainTextResponse(content=print_ast(sorted_schema_ast))
|
|
46
|
+
|
|
37
47
|
return PlainTextResponse(content=print_schema(graphql_schema))
|