infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +8 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/cli/db.py +24 -0
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/core/attribute.py +3 -3
- infrahub/core/branch/tasks.py +2 -1
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +6 -3
- infrahub/core/migrations/graph/__init__.py +4 -0
- infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +97 -0
- infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +86 -0
- infrahub/core/migrations/schema/node_attribute_add.py +5 -2
- infrahub/core/migrations/shared.py +5 -6
- infrahub/core/node/__init__.py +142 -40
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/standard.py +1 -1
- infrahub/core/protocols.py +7 -1
- infrahub/core/query/node.py +14 -1
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +1 -1
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/internal.py +14 -1
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/schema_branch.py +134 -0
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +186 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +7 -0
- infrahub/generators/tasks.py +31 -15
- infrahub/git/integrator.py +22 -14
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/mutations/display_label.py +111 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +118 -0
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +4 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +185 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +15 -4
- infrahub/middleware.py +26 -1
- infrahub/proposed_change/tasks.py +10 -1
- infrahub/server.py +16 -3
- infrahub/services/__init__.py +8 -5
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/tasks.py +3 -0
- infrahub/workflows/catalogue.py +72 -0
- infrahub/workflows/initialization.py +16 -0
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/ctl/cli_commands.py +2 -0
- infrahub_sdk/ctl/generator.py +4 -0
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/schema.py +6 -2
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/protocols.py +14 -0
- infrahub_sdk/schema/__init__.py +38 -0
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/object.py +84 -10
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/METADATA +5 -4
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/RECORD +104 -79
- infrahub_testcontainers/container.py +1 -1
- infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b1.dist-info}/entry_points.txt +0 -0
infrahub/server.py
CHANGED
|
@@ -10,7 +10,6 @@ from asgi_correlation_id import CorrelationIdMiddleware
|
|
|
10
10
|
from asgi_correlation_id.context import correlation_id
|
|
11
11
|
from fastapi import FastAPI, Request, Response
|
|
12
12
|
from fastapi.logger import logger
|
|
13
|
-
from fastapi.middleware.gzip import GZipMiddleware
|
|
14
13
|
from fastapi.responses import RedirectResponse
|
|
15
14
|
from fastapi.staticfiles import StaticFiles
|
|
16
15
|
from fastapi.templating import Jinja2Templates
|
|
@@ -30,7 +29,7 @@ from infrahub.exceptions import Error, ValidationError
|
|
|
30
29
|
from infrahub.graphql.api.endpoints import router as graphql_router
|
|
31
30
|
from infrahub.lock import initialize_lock
|
|
32
31
|
from infrahub.log import clear_log_context, get_logger, set_log_data
|
|
33
|
-
from infrahub.middleware import InfrahubCORSMiddleware
|
|
32
|
+
from infrahub.middleware import ConditionalGZipMiddleware, InfrahubCORSMiddleware
|
|
34
33
|
from infrahub.services import InfrahubServices
|
|
35
34
|
from infrahub.trace import add_span_exception, configure_trace, get_traceid
|
|
36
35
|
from infrahub.worker import WORKER_IDENTITY
|
|
@@ -86,8 +85,12 @@ async def app_initialization(application: FastAPI, enable_scheduler: bool = True
|
|
|
86
85
|
async with application.state.db.start_session() as db:
|
|
87
86
|
await initialization(db=db, add_database_indexes=True)
|
|
88
87
|
|
|
88
|
+
# Initialize the workflow after the registry has been setup
|
|
89
|
+
await service.initialize_workflow()
|
|
90
|
+
|
|
89
91
|
application.state.service = service
|
|
90
92
|
application.state.response_delay = config.SETTINGS.miscellaneous.response_delay
|
|
93
|
+
|
|
91
94
|
if enable_scheduler:
|
|
92
95
|
await service.scheduler.start_schedule()
|
|
93
96
|
|
|
@@ -184,7 +187,17 @@ app.add_middleware(
|
|
|
184
187
|
skip_paths=["/health"],
|
|
185
188
|
)
|
|
186
189
|
app.add_middleware(InfrahubCORSMiddleware)
|
|
187
|
-
app.add_middleware(
|
|
190
|
+
app.add_middleware(
|
|
191
|
+
ConditionalGZipMiddleware,
|
|
192
|
+
minimum_size=100_000,
|
|
193
|
+
compresslevel=1,
|
|
194
|
+
include_paths=(
|
|
195
|
+
"/assets",
|
|
196
|
+
"/favicons",
|
|
197
|
+
"/docs",
|
|
198
|
+
"/api/schema",
|
|
199
|
+
),
|
|
200
|
+
)
|
|
188
201
|
|
|
189
202
|
app.add_exception_handler(Error, generic_api_exception_handler)
|
|
190
203
|
app.add_exception_handler(TimestampFormatError, partial(generic_api_exception_handler, http_code=400))
|
infrahub/services/__init__.py
CHANGED
|
@@ -110,14 +110,17 @@ class InfrahubServices:
|
|
|
110
110
|
# This circular dependency could be removed if InfrahubScheduler only depends on what it needs.
|
|
111
111
|
scheduler.service = service
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
return service
|
|
114
|
+
|
|
115
|
+
async def initialize_workflow(self) -> None:
|
|
116
|
+
if self.workflow is not None and isinstance(self.workflow, WorkflowWorkerExecution):
|
|
117
|
+
assert self.component is not None
|
|
115
118
|
# Ideally `WorkflowWorkerExecution.initialize` would be directly part of WorkflowWorkerExecution
|
|
116
119
|
# constructor but this requires some redesign as it depends on InfrahubComponent which is instantiated
|
|
117
120
|
# after workflow instantiation.
|
|
118
|
-
await
|
|
119
|
-
|
|
120
|
-
|
|
121
|
+
await self.component.refresh_heartbeat()
|
|
122
|
+
is_primary = await self.component.is_primary_gunicorn_worker()
|
|
123
|
+
await self.workflow.initialize(component_is_primary_server=is_primary)
|
|
121
124
|
|
|
122
125
|
@property
|
|
123
126
|
def component(self) -> InfrahubComponent:
|
infrahub/trigger/catalogue.py
CHANGED
|
@@ -4,6 +4,8 @@ from infrahub.computed_attribute.triggers import (
|
|
|
4
4
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
5
5
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
6
6
|
)
|
|
7
|
+
from infrahub.display_labels.triggers import TRIGGER_DISPLAY_LABELS_ALL_SCHEMA
|
|
8
|
+
from infrahub.hfid.triggers import TRIGGER_HFID_ALL_SCHEMA
|
|
7
9
|
from infrahub.schema.triggers import TRIGGER_SCHEMA_UPDATED
|
|
8
10
|
from infrahub.trigger.models import TriggerDefinition
|
|
9
11
|
from infrahub.webhook.triggers import TRIGGER_WEBHOOK_DELETE, TRIGGER_WEBHOOK_SETUP_UPDATE
|
|
@@ -13,6 +15,8 @@ builtin_triggers: list[TriggerDefinition] = [
|
|
|
13
15
|
TRIGGER_BRANCH_MERGED,
|
|
14
16
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
15
17
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
18
|
+
TRIGGER_DISPLAY_LABELS_ALL_SCHEMA,
|
|
19
|
+
TRIGGER_HFID_ALL_SCHEMA,
|
|
16
20
|
TRIGGER_SCHEMA_UPDATED,
|
|
17
21
|
TRIGGER_WEBHOOK_DELETE,
|
|
18
22
|
TRIGGER_WEBHOOK_SETUP_UPDATE,
|
infrahub/trigger/models.py
CHANGED
|
@@ -37,6 +37,8 @@ class TriggerType(str, Enum):
|
|
|
37
37
|
COMPUTED_ATTR_JINJA2 = "computed_attr_jinja2"
|
|
38
38
|
COMPUTED_ATTR_PYTHON = "computed_attr_python"
|
|
39
39
|
COMPUTED_ATTR_PYTHON_QUERY = "computed_attr_python_query"
|
|
40
|
+
DISPLAY_LABEL_JINJA2 = "display_label_jinja2"
|
|
41
|
+
HUMAN_FRIENDLY_ID = "human_friendly_id"
|
|
40
42
|
# OBJECT = "object"
|
|
41
43
|
|
|
42
44
|
|
infrahub/trigger/tasks.py
CHANGED
|
@@ -6,6 +6,7 @@ from infrahub.computed_attribute.gather import (
|
|
|
6
6
|
gather_trigger_computed_attribute_jinja2,
|
|
7
7
|
gather_trigger_computed_attribute_python,
|
|
8
8
|
)
|
|
9
|
+
from infrahub.display_labels.gather import gather_trigger_display_labels_jinja2
|
|
9
10
|
from infrahub.trigger.catalogue import builtin_triggers
|
|
10
11
|
from infrahub.webhook.gather import gather_trigger_webhook
|
|
11
12
|
from infrahub.workers.dependencies import get_database
|
|
@@ -18,6 +19,7 @@ async def trigger_configure_all() -> None:
|
|
|
18
19
|
database = await get_database()
|
|
19
20
|
async with database.start_session() as db:
|
|
20
21
|
webhook_trigger = await gather_trigger_webhook(db=db)
|
|
22
|
+
display_label_triggers = await gather_trigger_display_labels_jinja2()
|
|
21
23
|
computed_attribute_j2_triggers = await gather_trigger_computed_attribute_jinja2()
|
|
22
24
|
(
|
|
23
25
|
computed_attribute_python_triggers,
|
|
@@ -28,6 +30,7 @@ async def trigger_configure_all() -> None:
|
|
|
28
30
|
computed_attribute_j2_triggers
|
|
29
31
|
+ computed_attribute_python_triggers
|
|
30
32
|
+ computed_attribute_python_query_triggers
|
|
33
|
+
+ display_label_triggers
|
|
31
34
|
+ builtin_triggers
|
|
32
35
|
+ webhook_trigger
|
|
33
36
|
+ action_rules
|
infrahub/workflows/catalogue.py
CHANGED
|
@@ -323,6 +323,62 @@ COMPUTED_ATTRIBUTE_JINJA2_UPDATE_VALUE = WorkflowDefinition(
|
|
|
323
323
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
324
324
|
)
|
|
325
325
|
|
|
326
|
+
DISPLAY_LABELS_PROCESS_JINJA2 = WorkflowDefinition(
|
|
327
|
+
name="display-label-process-jinja2",
|
|
328
|
+
type=WorkflowType.CORE,
|
|
329
|
+
module="infrahub.display_labels.tasks",
|
|
330
|
+
function="process_display_label",
|
|
331
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
332
|
+
)
|
|
333
|
+
|
|
334
|
+
DISPLAY_LABEL_JINJA2_UPDATE_VALUE = WorkflowDefinition(
|
|
335
|
+
name="display-label-jinja2-update-value",
|
|
336
|
+
type=WorkflowType.CORE,
|
|
337
|
+
module="infrahub.display_labels.tasks",
|
|
338
|
+
function="display_label_jinja2_update_value",
|
|
339
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
340
|
+
)
|
|
341
|
+
|
|
342
|
+
HFID_PROCESS = WorkflowDefinition(
|
|
343
|
+
name="hfid-process",
|
|
344
|
+
type=WorkflowType.CORE,
|
|
345
|
+
module="infrahub.hfid.tasks",
|
|
346
|
+
function="process_hfid",
|
|
347
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
348
|
+
)
|
|
349
|
+
|
|
350
|
+
HFID_SETUP = WorkflowDefinition(
|
|
351
|
+
name="hfid-setup",
|
|
352
|
+
type=WorkflowType.CORE,
|
|
353
|
+
module="infrahub.hfid.tasks",
|
|
354
|
+
function="hfid_setup",
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
HFID_UPDATE_VALUE = WorkflowDefinition(
|
|
359
|
+
name="hfid-update-value",
|
|
360
|
+
type=WorkflowType.CORE,
|
|
361
|
+
module="infrahub.hfid.tasks",
|
|
362
|
+
function="hfid_update_value",
|
|
363
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
TRIGGER_UPDATE_DISPLAY_LABELS = WorkflowDefinition(
|
|
367
|
+
name="trigger-update-display-labels",
|
|
368
|
+
type=WorkflowType.CORE,
|
|
369
|
+
module="infrahub.display_labels.tasks",
|
|
370
|
+
function="trigger_update_display_labels",
|
|
371
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
TRIGGER_UPDATE_HFID = WorkflowDefinition(
|
|
375
|
+
name="trigger-update-hfid",
|
|
376
|
+
type=WorkflowType.CORE,
|
|
377
|
+
module="infrahub.hfid.tasks",
|
|
378
|
+
function="trigger_update_hfid",
|
|
379
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
380
|
+
)
|
|
381
|
+
|
|
326
382
|
TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES = WorkflowDefinition(
|
|
327
383
|
name="trigger_update_jinja2_computed_attributes",
|
|
328
384
|
type=WorkflowType.CORE,
|
|
@@ -359,6 +415,14 @@ COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM = WorkflowDefinition(
|
|
|
359
415
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
360
416
|
)
|
|
361
417
|
|
|
418
|
+
DISPLAY_LABELS_SETUP_JINJA2 = WorkflowDefinition(
|
|
419
|
+
name="display-labels-setup-jinja2",
|
|
420
|
+
type=WorkflowType.CORE,
|
|
421
|
+
module="infrahub.display_labels.tasks",
|
|
422
|
+
function="display_labels_setup_jinja2",
|
|
423
|
+
)
|
|
424
|
+
|
|
425
|
+
|
|
362
426
|
QUERY_COMPUTED_ATTRIBUTE_TRANSFORM_TARGETS = WorkflowDefinition(
|
|
363
427
|
name="query-computed-attribute-transform-targets",
|
|
364
428
|
type=WorkflowType.CORE,
|
|
@@ -589,6 +653,9 @@ WORKFLOWS = [
|
|
|
589
653
|
DIFF_REFRESH,
|
|
590
654
|
DIFF_REFRESH_ALL,
|
|
591
655
|
DIFF_UPDATE,
|
|
656
|
+
DISPLAY_LABELS_PROCESS_JINJA2,
|
|
657
|
+
DISPLAY_LABELS_SETUP_JINJA2,
|
|
658
|
+
DISPLAY_LABEL_JINJA2_UPDATE_VALUE,
|
|
592
659
|
GIT_REPOSITORIES_CHECK_ARTIFACT_CREATE,
|
|
593
660
|
GIT_REPOSITORIES_CREATE_BRANCH,
|
|
594
661
|
GIT_REPOSITORIES_DIFF_NAMES_ONLY,
|
|
@@ -604,6 +671,9 @@ WORKFLOWS = [
|
|
|
604
671
|
GIT_REPOSITORY_USER_CHECKS_TRIGGER,
|
|
605
672
|
GIT_REPOSITORY_USER_CHECK_RUN,
|
|
606
673
|
GRAPHQL_QUERY_GROUP_UPDATE,
|
|
674
|
+
HFID_PROCESS,
|
|
675
|
+
HFID_SETUP,
|
|
676
|
+
HFID_UPDATE_VALUE,
|
|
607
677
|
IPAM_RECONCILIATION,
|
|
608
678
|
PROFILE_REFRESH,
|
|
609
679
|
PROFILE_REFRESH_MULTIPLE,
|
|
@@ -632,6 +702,8 @@ WORKFLOWS = [
|
|
|
632
702
|
TRIGGER_ARTIFACT_DEFINITION_GENERATE,
|
|
633
703
|
TRIGGER_CONFIGURE_ALL,
|
|
634
704
|
TRIGGER_GENERATOR_DEFINITION_RUN,
|
|
705
|
+
TRIGGER_UPDATE_DISPLAY_LABELS,
|
|
706
|
+
TRIGGER_UPDATE_HFID,
|
|
635
707
|
TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
|
|
636
708
|
TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
|
|
637
709
|
VALIDATE_SCHEMA_NUMBER_POOLS,
|
|
@@ -7,6 +7,8 @@ from prefect.exceptions import ObjectAlreadyExists
|
|
|
7
7
|
from prefect.logging import get_run_logger
|
|
8
8
|
|
|
9
9
|
from infrahub import config
|
|
10
|
+
from infrahub.display_labels.gather import gather_trigger_display_labels_jinja2
|
|
11
|
+
from infrahub.hfid.gather import gather_trigger_hfid
|
|
10
12
|
from infrahub.trigger.catalogue import builtin_triggers
|
|
11
13
|
from infrahub.trigger.models import TriggerType
|
|
12
14
|
from infrahub.trigger.setup import setup_triggers
|
|
@@ -74,3 +76,17 @@ async def setup_task_manager() -> None:
|
|
|
74
76
|
await setup_triggers(
|
|
75
77
|
client=client, triggers=builtin_triggers, trigger_type=TriggerType.BUILTIN, force_update=True
|
|
76
78
|
)
|
|
79
|
+
display_label_triggers = await gather_trigger_display_labels_jinja2()
|
|
80
|
+
await setup_triggers(
|
|
81
|
+
client=client,
|
|
82
|
+
triggers=display_label_triggers,
|
|
83
|
+
trigger_type=TriggerType.DISPLAY_LABEL_JINJA2,
|
|
84
|
+
force_update=True,
|
|
85
|
+
) # type: ignore[misc]
|
|
86
|
+
hfid_triggers = await gather_trigger_hfid()
|
|
87
|
+
await setup_triggers(
|
|
88
|
+
client=client,
|
|
89
|
+
triggers=hfid_triggers,
|
|
90
|
+
trigger_type=TriggerType.HUMAN_FRIENDLY_ID,
|
|
91
|
+
force_update=True,
|
|
92
|
+
) # type: ignore[misc]
|
infrahub_sdk/checks.py
CHANGED
infrahub_sdk/ctl/cli_commands.py
CHANGED
|
@@ -25,6 +25,7 @@ from ..ctl.check import run as run_check
|
|
|
25
25
|
from ..ctl.client import initialize_client, initialize_client_sync
|
|
26
26
|
from ..ctl.exceptions import QueryNotFoundError
|
|
27
27
|
from ..ctl.generator import run as run_generator
|
|
28
|
+
from ..ctl.graphql import app as graphql_app
|
|
28
29
|
from ..ctl.menu import app as menu_app
|
|
29
30
|
from ..ctl.object import app as object_app
|
|
30
31
|
from ..ctl.render import list_jinja2_transforms, print_template_errors
|
|
@@ -62,6 +63,7 @@ app.add_typer(validate_app, name="validate")
|
|
|
62
63
|
app.add_typer(repository_app, name="repository")
|
|
63
64
|
app.add_typer(menu_app, name="menu")
|
|
64
65
|
app.add_typer(object_app, name="object")
|
|
66
|
+
app.add_typer(graphql_app, name="graphql")
|
|
65
67
|
|
|
66
68
|
app.command(name="dump")(dump)
|
|
67
69
|
app.command(name="load")(load)
|
infrahub_sdk/ctl/generator.py
CHANGED
|
@@ -64,6 +64,8 @@ async def run(
|
|
|
64
64
|
branch=branch or "",
|
|
65
65
|
params=variables_dict,
|
|
66
66
|
convert_query_response=generator_config.convert_query_response,
|
|
67
|
+
execute_in_proposed_change=generator_config.execute_in_proposed_change,
|
|
68
|
+
execute_after_merge=generator_config.execute_after_merge,
|
|
67
69
|
infrahub_node=InfrahubNode,
|
|
68
70
|
)
|
|
69
71
|
await generator._init_client.schema.all(branch=generator.branch_name)
|
|
@@ -93,6 +95,8 @@ async def run(
|
|
|
93
95
|
branch=branch or "",
|
|
94
96
|
params=params,
|
|
95
97
|
convert_query_response=generator_config.convert_query_response,
|
|
98
|
+
execute_in_proposed_change=generator_config.execute_in_proposed_change,
|
|
99
|
+
execute_after_merge=generator_config.execute_after_merge,
|
|
96
100
|
infrahub_node=InfrahubNode,
|
|
97
101
|
)
|
|
98
102
|
data = execute_graphql_query(
|
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
from collections import defaultdict
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from ariadne_codegen.client_generators.package import PackageGenerator, get_package_generator
|
|
10
|
+
from ariadne_codegen.exceptions import ParsingError
|
|
11
|
+
from ariadne_codegen.plugins.explorer import get_plugins_types
|
|
12
|
+
from ariadne_codegen.plugins.manager import PluginManager
|
|
13
|
+
from ariadne_codegen.schema import (
|
|
14
|
+
filter_fragments_definitions,
|
|
15
|
+
filter_operations_definitions,
|
|
16
|
+
get_graphql_schema_from_path,
|
|
17
|
+
)
|
|
18
|
+
from ariadne_codegen.settings import ClientSettings, CommentsStrategy
|
|
19
|
+
from ariadne_codegen.utils import ast_to_str
|
|
20
|
+
from graphql import DefinitionNode, GraphQLSchema, NoUnusedFragmentsRule, parse, specified_rules, validate
|
|
21
|
+
from rich.console import Console
|
|
22
|
+
|
|
23
|
+
from ..async_typer import AsyncTyper
|
|
24
|
+
from ..ctl.client import initialize_client
|
|
25
|
+
from ..ctl.utils import catch_exception
|
|
26
|
+
from ..graphql.utils import insert_fragments_inline, remove_fragment_import
|
|
27
|
+
from .parameters import CONFIG_PARAM
|
|
28
|
+
|
|
29
|
+
app = AsyncTyper()
|
|
30
|
+
console = Console()
|
|
31
|
+
|
|
32
|
+
ARIADNE_PLUGINS = [
|
|
33
|
+
"infrahub_sdk.graphql.plugin.PydanticBaseModelPlugin",
|
|
34
|
+
"infrahub_sdk.graphql.plugin.FutureAnnotationPlugin",
|
|
35
|
+
"infrahub_sdk.graphql.plugin.StandardTypeHintPlugin",
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def find_gql_files(query_path: Path) -> list[Path]:
|
|
40
|
+
"""
|
|
41
|
+
Find all files with .gql extension in the specified directory.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
query_path: Path to the directory to search for .gql files
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
List of Path objects for all .gql files found
|
|
48
|
+
"""
|
|
49
|
+
if not query_path.exists():
|
|
50
|
+
raise FileNotFoundError(f"File or directory not found: {query_path}")
|
|
51
|
+
|
|
52
|
+
if not query_path.is_dir() and query_path.is_file():
|
|
53
|
+
return [query_path]
|
|
54
|
+
|
|
55
|
+
return list(query_path.glob("**/*.gql"))
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_graphql_query(queries_path: Path, schema: GraphQLSchema) -> tuple[DefinitionNode, ...]:
|
|
59
|
+
"""Get GraphQL queries definitions from a single GraphQL file."""
|
|
60
|
+
|
|
61
|
+
if not queries_path.exists():
|
|
62
|
+
raise FileNotFoundError(f"File not found: {queries_path}")
|
|
63
|
+
if not queries_path.is_file():
|
|
64
|
+
raise ValueError(f"{queries_path} is not a file")
|
|
65
|
+
|
|
66
|
+
queries_str = queries_path.read_text(encoding="utf-8")
|
|
67
|
+
queries_ast = parse(queries_str)
|
|
68
|
+
validation_errors = validate(
|
|
69
|
+
schema=schema,
|
|
70
|
+
document_ast=queries_ast,
|
|
71
|
+
rules=[r for r in specified_rules if r is not NoUnusedFragmentsRule],
|
|
72
|
+
)
|
|
73
|
+
if validation_errors:
|
|
74
|
+
raise ValueError("\n\n".join(error.message for error in validation_errors))
|
|
75
|
+
return queries_ast.definitions
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def generate_result_types(directory: Path, package: PackageGenerator, fragment: ast.Module) -> None:
|
|
79
|
+
for file_name, module in package._result_types_files.items():
|
|
80
|
+
file_path = directory / file_name
|
|
81
|
+
|
|
82
|
+
insert_fragments_inline(module, fragment)
|
|
83
|
+
remove_fragment_import(module)
|
|
84
|
+
|
|
85
|
+
code = package._add_comments_to_code(ast_to_str(module), package.queries_source)
|
|
86
|
+
if package.plugin_manager:
|
|
87
|
+
code = package.plugin_manager.generate_result_types_code(code)
|
|
88
|
+
file_path.write_text(code)
|
|
89
|
+
package._generated_files.append(file_path.name)
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
@app.callback()
|
|
93
|
+
def callback() -> None:
|
|
94
|
+
"""
|
|
95
|
+
Various GraphQL related commands.
|
|
96
|
+
"""
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@app.command()
|
|
100
|
+
@catch_exception(console=console)
|
|
101
|
+
async def export_schema(
|
|
102
|
+
destination: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
|
|
103
|
+
_: str = CONFIG_PARAM,
|
|
104
|
+
) -> None:
|
|
105
|
+
"""Export the GraphQL schema to a file."""
|
|
106
|
+
|
|
107
|
+
client = initialize_client()
|
|
108
|
+
schema_text = await client.schema.get_graphql_schema()
|
|
109
|
+
|
|
110
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
111
|
+
destination.write_text(schema_text)
|
|
112
|
+
console.print(f"[green]Schema exported to {destination}")
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@app.command()
|
|
116
|
+
@catch_exception(console=console)
|
|
117
|
+
async def generate_return_types(
|
|
118
|
+
query: Optional[Path] = typer.Argument(
|
|
119
|
+
None, help="Location of the GraphQL query file(s). Defaults to current directory if not specified."
|
|
120
|
+
),
|
|
121
|
+
schema: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
|
|
122
|
+
_: str = CONFIG_PARAM,
|
|
123
|
+
) -> None:
|
|
124
|
+
"""Create Pydantic Models for GraphQL query return types"""
|
|
125
|
+
|
|
126
|
+
query = Path.cwd() if query is None else query
|
|
127
|
+
|
|
128
|
+
# Load the GraphQL schema
|
|
129
|
+
if not schema.exists():
|
|
130
|
+
raise FileNotFoundError(f"GraphQL Schema file not found: {schema}")
|
|
131
|
+
graphql_schema = get_graphql_schema_from_path(schema_path=str(schema))
|
|
132
|
+
|
|
133
|
+
# Initialize the plugin manager
|
|
134
|
+
plugin_manager = PluginManager(
|
|
135
|
+
schema=graphql_schema,
|
|
136
|
+
plugins_types=get_plugins_types(plugins_strs=ARIADNE_PLUGINS),
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
# Find the GraphQL files and organize them by directory
|
|
140
|
+
gql_files = find_gql_files(query)
|
|
141
|
+
gql_per_directory: dict[Path, list[Path]] = defaultdict(list)
|
|
142
|
+
for gql_file in gql_files:
|
|
143
|
+
gql_per_directory[gql_file.parent].append(gql_file)
|
|
144
|
+
|
|
145
|
+
# Generate the Pydantic Models for the GraphQL queries
|
|
146
|
+
for directory, gql_files in gql_per_directory.items():
|
|
147
|
+
for gql_file in gql_files:
|
|
148
|
+
try:
|
|
149
|
+
definitions = get_graphql_query(queries_path=gql_file, schema=graphql_schema)
|
|
150
|
+
except ValueError as exc:
|
|
151
|
+
console.print(f"[red]Error generating result types for {gql_file}: {exc}")
|
|
152
|
+
continue
|
|
153
|
+
queries = filter_operations_definitions(definitions)
|
|
154
|
+
fragments = filter_fragments_definitions(definitions)
|
|
155
|
+
|
|
156
|
+
package_generator = get_package_generator(
|
|
157
|
+
schema=graphql_schema,
|
|
158
|
+
fragments=fragments,
|
|
159
|
+
settings=ClientSettings(
|
|
160
|
+
schema_path=str(schema),
|
|
161
|
+
target_package_name=directory.name,
|
|
162
|
+
queries_path=str(directory),
|
|
163
|
+
include_comments=CommentsStrategy.NONE,
|
|
164
|
+
),
|
|
165
|
+
plugin_manager=plugin_manager,
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
parsing_failed = False
|
|
169
|
+
try:
|
|
170
|
+
for query_operation in queries:
|
|
171
|
+
package_generator.add_operation(query_operation)
|
|
172
|
+
except ParsingError as exc:
|
|
173
|
+
console.print(f"[red]Unable to process {gql_file.name}: {exc}")
|
|
174
|
+
parsing_failed = True
|
|
175
|
+
|
|
176
|
+
if parsing_failed:
|
|
177
|
+
continue
|
|
178
|
+
|
|
179
|
+
module_fragment = package_generator.fragments_generator.generate()
|
|
180
|
+
|
|
181
|
+
generate_result_types(directory=directory, package=package_generator, fragment=module_fragment)
|
|
182
|
+
|
|
183
|
+
for file_name in package_generator._result_types_files.keys():
|
|
184
|
+
console.print(f"[green]Generated {file_name} in {directory}")
|
infrahub_sdk/ctl/schema.py
CHANGED
|
@@ -73,7 +73,9 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche
|
|
|
73
73
|
loc_type = loc_path[-1]
|
|
74
74
|
input_str = error.get("input", None)
|
|
75
75
|
error_message = f"{loc_type} ({input_str}) | {error['msg']} ({error['type']})"
|
|
76
|
-
console.print(
|
|
76
|
+
console.print(
|
|
77
|
+
f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
|
|
78
|
+
)
|
|
77
79
|
|
|
78
80
|
elif len(loc_path) > 6:
|
|
79
81
|
loc_type = loc_path[5]
|
|
@@ -91,7 +93,9 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche
|
|
|
91
93
|
|
|
92
94
|
input_str = error.get("input", None)
|
|
93
95
|
error_message = f"{loc_type[:-1].title()}: {input_label} ({input_str}) | {error['msg']} ({error['type']})"
|
|
94
|
-
console.print(
|
|
96
|
+
console.print(
|
|
97
|
+
f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
|
|
98
|
+
)
|
|
95
99
|
|
|
96
100
|
|
|
97
101
|
def handle_non_detail_errors(response: dict[str, Any]) -> None:
|
infrahub_sdk/generator.py
CHANGED
|
@@ -26,6 +26,8 @@ class InfrahubGenerator(InfrahubOperation):
|
|
|
26
26
|
generator_instance: str = "",
|
|
27
27
|
params: dict | None = None,
|
|
28
28
|
convert_query_response: bool = False,
|
|
29
|
+
execute_in_proposed_change: bool = True,
|
|
30
|
+
execute_after_merge: bool = True,
|
|
29
31
|
logger: logging.Logger | None = None,
|
|
30
32
|
request_context: RequestContext | None = None,
|
|
31
33
|
) -> None:
|
|
@@ -44,6 +46,8 @@ class InfrahubGenerator(InfrahubOperation):
|
|
|
44
46
|
self._client: InfrahubClient | None = None
|
|
45
47
|
self.logger = logger if logger else logging.getLogger("infrahub.tasks")
|
|
46
48
|
self.request_context = request_context
|
|
49
|
+
self.execute_in_proposed_change = execute_in_proposed_change
|
|
50
|
+
self.execute_after_merge = execute_after_merge
|
|
47
51
|
|
|
48
52
|
@property
|
|
49
53
|
def subscribers(self) -> list[str] | None:
|
|
@@ -81,8 +85,10 @@ class InfrahubGenerator(InfrahubOperation):
|
|
|
81
85
|
unpacked = data.get("data") or data
|
|
82
86
|
await self.process_nodes(data=unpacked)
|
|
83
87
|
|
|
88
|
+
group_type = "CoreGeneratorGroup" if self.execute_after_merge else "CoreGeneratorAwareGroup"
|
|
89
|
+
|
|
84
90
|
async with self._init_client.start_tracking(
|
|
85
|
-
identifier=identifier, params=self.params, delete_unused_nodes=True, group_type=
|
|
91
|
+
identifier=identifier, params=self.params, delete_unused_nodes=True, group_type=group_type
|
|
86
92
|
) as self.client:
|
|
87
93
|
await self.generate(data=unpacked)
|
|
88
94
|
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
from .constants import VARIABLE_TYPE_MAPPING
|
|
2
|
+
from .query import Mutation, Query
|
|
3
|
+
from .renderers import render_input_block, render_query_block, render_variables_to_string
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"VARIABLE_TYPE_MAPPING",
|
|
7
|
+
"Mutation",
|
|
8
|
+
"Query",
|
|
9
|
+
"render_input_block",
|
|
10
|
+
"render_query_block",
|
|
11
|
+
"render_variables_to_string",
|
|
12
|
+
]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VARIABLE_TYPE_MAPPING = ((str, "String!"), (int, "Int!"), (float, "Float!"), (bool, "Boolean!"))
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
from ariadne_codegen.plugins.base import Plugin
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from graphql import ExecutableDefinitionNode
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class FutureAnnotationPlugin(Plugin):
|
|
13
|
+
@staticmethod
|
|
14
|
+
def insert_future_annotation(module: ast.Module) -> ast.Module:
|
|
15
|
+
# First check if the future annotation is already present
|
|
16
|
+
for item in module.body:
|
|
17
|
+
if isinstance(item, ast.ImportFrom) and item.module == "__future__":
|
|
18
|
+
if any(alias.name == "annotations" for alias in item.names):
|
|
19
|
+
return module
|
|
20
|
+
|
|
21
|
+
module.body.insert(0, ast.ImportFrom(module="__future__", names=[ast.alias(name="annotations")], level=0))
|
|
22
|
+
return module
|
|
23
|
+
|
|
24
|
+
def generate_result_types_module(
|
|
25
|
+
self,
|
|
26
|
+
module: ast.Module,
|
|
27
|
+
operation_definition: ExecutableDefinitionNode, # noqa: ARG002
|
|
28
|
+
) -> ast.Module:
|
|
29
|
+
return self.insert_future_annotation(module)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class StandardTypeHintPlugin(Plugin):
|
|
33
|
+
@classmethod
|
|
34
|
+
def replace_list_in_subscript(cls, subscript: ast.Subscript) -> ast.Subscript:
|
|
35
|
+
if isinstance(subscript.value, ast.Name) and subscript.value.id == "List":
|
|
36
|
+
subscript.value.id = "list"
|
|
37
|
+
if isinstance(subscript.slice, ast.Subscript):
|
|
38
|
+
subscript.slice = cls.replace_list_in_subscript(subscript.slice)
|
|
39
|
+
|
|
40
|
+
return subscript
|
|
41
|
+
|
|
42
|
+
@classmethod
|
|
43
|
+
def replace_list_annotations(cls, module: ast.Module) -> ast.Module:
|
|
44
|
+
for item in module.body:
|
|
45
|
+
if not isinstance(item, ast.ClassDef):
|
|
46
|
+
continue
|
|
47
|
+
|
|
48
|
+
# replace List with list in the annotations when list is used as a type
|
|
49
|
+
for class_item in item.body:
|
|
50
|
+
if not isinstance(class_item, ast.AnnAssign):
|
|
51
|
+
continue
|
|
52
|
+
if isinstance(class_item.annotation, ast.Subscript):
|
|
53
|
+
class_item.annotation = cls.replace_list_in_subscript(class_item.annotation)
|
|
54
|
+
|
|
55
|
+
return module
|
|
56
|
+
|
|
57
|
+
def generate_result_types_module(
|
|
58
|
+
self,
|
|
59
|
+
module: ast.Module,
|
|
60
|
+
operation_definition: ExecutableDefinitionNode, # noqa: ARG002
|
|
61
|
+
) -> ast.Module:
|
|
62
|
+
module = FutureAnnotationPlugin.insert_future_annotation(module)
|
|
63
|
+
return self.replace_list_annotations(module)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class PydanticBaseModelPlugin(Plugin):
|
|
67
|
+
@staticmethod
|
|
68
|
+
def find_base_model_index(module: ast.Module) -> int:
|
|
69
|
+
for idx, item in enumerate(module.body):
|
|
70
|
+
if isinstance(item, ast.ImportFrom) and item.module == "base_model":
|
|
71
|
+
return idx
|
|
72
|
+
raise ValueError("BaseModel not found in module")
|
|
73
|
+
|
|
74
|
+
@classmethod
|
|
75
|
+
def replace_base_model_import(cls, module: ast.Module) -> ast.Module:
|
|
76
|
+
base_model_index = cls.find_base_model_index(module)
|
|
77
|
+
module.body[base_model_index] = ast.ImportFrom(module="pydantic", names=[ast.alias(name="BaseModel")], level=0)
|
|
78
|
+
return module
|
|
79
|
+
|
|
80
|
+
def generate_result_types_module(
|
|
81
|
+
self,
|
|
82
|
+
module: ast.Module,
|
|
83
|
+
operation_definition: ExecutableDefinitionNode, # noqa: ARG002
|
|
84
|
+
) -> ast.Module:
|
|
85
|
+
return self.replace_base_model_import(module)
|