infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/tasks.py +8 -0
- infrahub/api/diff/diff.py +1 -1
- infrahub/api/internal.py +2 -0
- infrahub/api/oauth2.py +13 -19
- infrahub/api/oidc.py +15 -21
- infrahub/api/schema.py +24 -3
- infrahub/artifacts/models.py +2 -1
- infrahub/auth.py +137 -3
- infrahub/cli/__init__.py +2 -0
- infrahub/cli/db.py +103 -98
- infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
- infrahub/cli/dev.py +118 -0
- infrahub/cli/tasks.py +46 -0
- infrahub/cli/upgrade.py +30 -3
- infrahub/computed_attribute/tasks.py +20 -8
- infrahub/core/attribute.py +13 -5
- infrahub/core/branch/enums.py +1 -1
- infrahub/core/branch/models.py +7 -3
- infrahub/core/branch/tasks.py +70 -8
- infrahub/core/changelog/models.py +4 -12
- infrahub/core/constants/__init__.py +3 -0
- infrahub/core/constants/infrahubkind.py +1 -0
- infrahub/core/diff/model/path.py +4 -0
- infrahub/core/diff/payload_builder.py +1 -1
- infrahub/core/diff/query/artifact.py +1 -0
- infrahub/core/diff/query/field_summary.py +1 -0
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/initialization.py +5 -2
- infrahub/core/ipam/utilization.py +1 -1
- infrahub/core/manager.py +6 -3
- infrahub/core/migrations/__init__.py +3 -0
- infrahub/core/migrations/exceptions.py +4 -0
- infrahub/core/migrations/graph/__init__.py +12 -11
- infrahub/core/migrations/graph/load_schema_branch.py +21 -0
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
- infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
- infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
- infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
- infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
- infrahub/core/migrations/query/__init__.py +7 -8
- infrahub/core/migrations/query/attribute_add.py +8 -6
- infrahub/core/migrations/query/attribute_remove.py +134 -0
- infrahub/core/migrations/runner.py +54 -0
- infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
- infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
- infrahub/core/migrations/schema/node_attribute_add.py +35 -4
- infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
- infrahub/core/migrations/schema/node_kind_update.py +2 -1
- infrahub/core/migrations/schema/node_remove.py +2 -1
- infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
- infrahub/core/migrations/shared.py +52 -19
- infrahub/core/node/__init__.py +158 -51
- infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
- infrahub/core/node/create.py +46 -63
- infrahub/core/node/lock_utils.py +70 -44
- infrahub/core/node/node_property_attribute.py +230 -0
- infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
- infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
- infrahub/core/node/resource_manager/number_pool.py +2 -1
- infrahub/core/node/standard.py +1 -1
- infrahub/core/protocols.py +7 -1
- infrahub/core/query/attribute.py +55 -0
- infrahub/core/query/ipam.py +1 -0
- infrahub/core/query/node.py +23 -4
- infrahub/core/query/relationship.py +1 -0
- infrahub/core/registry.py +2 -2
- infrahub/core/relationship/constraints/count.py +1 -1
- infrahub/core/relationship/model.py +1 -1
- infrahub/core/schema/__init__.py +56 -0
- infrahub/core/schema/attribute_schema.py +4 -0
- infrahub/core/schema/basenode_schema.py +42 -2
- infrahub/core/schema/definitions/core/__init__.py +2 -0
- infrahub/core/schema/definitions/core/generator.py +2 -0
- infrahub/core/schema/definitions/core/group.py +16 -2
- infrahub/core/schema/definitions/internal.py +16 -3
- infrahub/core/schema/generated/attribute_schema.py +2 -2
- infrahub/core/schema/generated/base_node_schema.py +6 -1
- infrahub/core/schema/manager.py +22 -1
- infrahub/core/schema/node_schema.py +5 -2
- infrahub/core/schema/schema_branch.py +300 -8
- infrahub/core/schema/schema_branch_display.py +123 -0
- infrahub/core/schema/schema_branch_hfid.py +114 -0
- infrahub/core/validators/aggregated_checker.py +1 -1
- infrahub/core/validators/determiner.py +12 -1
- infrahub/core/validators/relationship/peer.py +1 -1
- infrahub/core/validators/tasks.py +1 -1
- infrahub/database/graph.py +21 -0
- infrahub/display_labels/__init__.py +0 -0
- infrahub/display_labels/gather.py +48 -0
- infrahub/display_labels/models.py +240 -0
- infrahub/display_labels/tasks.py +192 -0
- infrahub/display_labels/triggers.py +22 -0
- infrahub/events/branch_action.py +27 -1
- infrahub/events/group_action.py +1 -1
- infrahub/events/node_action.py +1 -1
- infrahub/generators/constants.py +7 -0
- infrahub/generators/models.py +7 -0
- infrahub/generators/tasks.py +34 -22
- infrahub/git/base.py +4 -1
- infrahub/git/integrator.py +23 -15
- infrahub/git/models.py +2 -1
- infrahub/git/repository.py +22 -5
- infrahub/git/tasks.py +66 -10
- infrahub/git/utils.py +123 -1
- infrahub/graphql/analyzer.py +1 -1
- infrahub/graphql/api/endpoints.py +14 -4
- infrahub/graphql/manager.py +4 -9
- infrahub/graphql/mutations/convert_object_type.py +11 -1
- infrahub/graphql/mutations/display_label.py +118 -0
- infrahub/graphql/mutations/generator.py +25 -7
- infrahub/graphql/mutations/hfid.py +125 -0
- infrahub/graphql/mutations/ipam.py +54 -35
- infrahub/graphql/mutations/main.py +27 -28
- infrahub/graphql/mutations/relationship.py +2 -2
- infrahub/graphql/mutations/resource_manager.py +2 -2
- infrahub/graphql/mutations/schema.py +5 -5
- infrahub/graphql/queries/resource_manager.py +1 -1
- infrahub/graphql/resolvers/resolver.py +2 -0
- infrahub/graphql/schema.py +4 -0
- infrahub/graphql/schema_sort.py +170 -0
- infrahub/graphql/types/branch.py +4 -1
- infrahub/graphql/types/enums.py +3 -0
- infrahub/groups/tasks.py +1 -1
- infrahub/hfid/__init__.py +0 -0
- infrahub/hfid/gather.py +48 -0
- infrahub/hfid/models.py +240 -0
- infrahub/hfid/tasks.py +191 -0
- infrahub/hfid/triggers.py +22 -0
- infrahub/lock.py +67 -16
- infrahub/message_bus/types.py +2 -1
- infrahub/middleware.py +26 -1
- infrahub/permissions/constants.py +2 -0
- infrahub/proposed_change/tasks.py +35 -17
- infrahub/server.py +21 -4
- infrahub/services/__init__.py +8 -5
- infrahub/services/adapters/http/__init__.py +5 -0
- infrahub/services/adapters/workflow/worker.py +14 -3
- infrahub/task_manager/event.py +5 -0
- infrahub/task_manager/models.py +7 -0
- infrahub/task_manager/task.py +73 -0
- infrahub/trigger/catalogue.py +4 -0
- infrahub/trigger/models.py +2 -0
- infrahub/trigger/setup.py +13 -4
- infrahub/trigger/tasks.py +6 -0
- infrahub/workers/dependencies.py +10 -1
- infrahub/workers/infrahub_async.py +10 -2
- infrahub/workflows/catalogue.py +80 -0
- infrahub/workflows/initialization.py +21 -0
- infrahub/workflows/utils.py +2 -1
- infrahub_sdk/checks.py +1 -1
- infrahub_sdk/client.py +13 -10
- infrahub_sdk/config.py +29 -2
- infrahub_sdk/ctl/cli_commands.py +2 -0
- infrahub_sdk/ctl/generator.py +4 -0
- infrahub_sdk/ctl/graphql.py +184 -0
- infrahub_sdk/ctl/schema.py +28 -9
- infrahub_sdk/generator.py +7 -1
- infrahub_sdk/graphql/__init__.py +12 -0
- infrahub_sdk/graphql/constants.py +1 -0
- infrahub_sdk/graphql/plugin.py +85 -0
- infrahub_sdk/graphql/query.py +77 -0
- infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
- infrahub_sdk/graphql/utils.py +40 -0
- infrahub_sdk/protocols.py +14 -0
- infrahub_sdk/schema/__init__.py +70 -4
- infrahub_sdk/schema/repository.py +8 -0
- infrahub_sdk/spec/models.py +7 -0
- infrahub_sdk/spec/object.py +53 -44
- infrahub_sdk/spec/processors/__init__.py +0 -0
- infrahub_sdk/spec/processors/data_processor.py +10 -0
- infrahub_sdk/spec/processors/factory.py +34 -0
- infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
- infrahub_sdk/spec/range_expansion.py +1 -1
- infrahub_sdk/transforms.py +1 -1
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
- infrahub_testcontainers/container.py +115 -3
- infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
- infrahub_testcontainers/docker-compose.test.yml +6 -1
- infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
- {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub/services/__init__.py
CHANGED
|
@@ -110,14 +110,17 @@ class InfrahubServices:
|
|
|
110
110
|
# This circular dependency could be removed if InfrahubScheduler only depends on what it needs.
|
|
111
111
|
scheduler.service = service
|
|
112
112
|
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
return service
|
|
114
|
+
|
|
115
|
+
async def initialize_workflow(self, is_initial_setup: bool = False) -> None:
|
|
116
|
+
if self.workflow is not None and isinstance(self.workflow, WorkflowWorkerExecution):
|
|
117
|
+
assert self.component is not None
|
|
115
118
|
# Ideally `WorkflowWorkerExecution.initialize` would be directly part of WorkflowWorkerExecution
|
|
116
119
|
# constructor but this requires some redesign as it depends on InfrahubComponent which is instantiated
|
|
117
120
|
# after workflow instantiation.
|
|
118
|
-
await
|
|
119
|
-
|
|
120
|
-
|
|
121
|
+
await self.component.refresh_heartbeat()
|
|
122
|
+
is_primary = await self.component.is_primary_gunicorn_worker()
|
|
123
|
+
await self.workflow.initialize(component_is_primary_server=is_primary, is_initial_setup=is_initial_setup)
|
|
121
124
|
|
|
122
125
|
@property
|
|
123
126
|
def component(self) -> InfrahubComponent:
|
|
@@ -3,10 +3,15 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any
|
|
4
4
|
|
|
5
5
|
if TYPE_CHECKING:
|
|
6
|
+
import ssl
|
|
7
|
+
|
|
6
8
|
import httpx
|
|
7
9
|
|
|
8
10
|
|
|
9
11
|
class InfrahubHTTP:
|
|
12
|
+
def verify_tls(self, verify: bool | None = None) -> bool | ssl.SSLContext:
|
|
13
|
+
raise NotImplementedError()
|
|
14
|
+
|
|
10
15
|
async def get(
|
|
11
16
|
self,
|
|
12
17
|
url: str,
|
|
@@ -3,10 +3,12 @@ from __future__ import annotations
|
|
|
3
3
|
from typing import TYPE_CHECKING, Any, overload
|
|
4
4
|
|
|
5
5
|
from prefect.client.schemas.objects import StateType
|
|
6
|
+
from prefect.context import AsyncClientContext
|
|
6
7
|
from prefect.deployments import run_deployment
|
|
7
8
|
|
|
9
|
+
from infrahub.services.adapters.http.httpx import HttpxAdapter
|
|
8
10
|
from infrahub.workers.utils import inject_context_parameter
|
|
9
|
-
from infrahub.workflows.initialization import setup_task_manager
|
|
11
|
+
from infrahub.workflows.initialization import setup_task_manager, setup_task_manager_identifiers
|
|
10
12
|
from infrahub.workflows.models import WorkflowInfo
|
|
11
13
|
|
|
12
14
|
from . import InfrahubWorkflow, Return
|
|
@@ -19,11 +21,19 @@ if TYPE_CHECKING:
|
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
class WorkflowWorkerExecution(InfrahubWorkflow):
|
|
24
|
+
# This is required to grab a cached SSLContext from the HttpAdapter.
|
|
25
|
+
# We cannot use the get_http() dependency since it introduces a circular dependency.
|
|
26
|
+
# We could remove this later on by introducing a cached SSLContext outside of this adapter.
|
|
27
|
+
_http_adapter = HttpxAdapter()
|
|
28
|
+
|
|
22
29
|
@staticmethod
|
|
23
|
-
async def initialize(component_is_primary_server: bool) -> None:
|
|
30
|
+
async def initialize(component_is_primary_server: bool, is_initial_setup: bool = False) -> None:
|
|
24
31
|
if component_is_primary_server:
|
|
25
32
|
await setup_task_manager()
|
|
26
33
|
|
|
34
|
+
if is_initial_setup:
|
|
35
|
+
await setup_task_manager_identifiers()
|
|
36
|
+
|
|
27
37
|
@overload
|
|
28
38
|
async def execute_workflow(
|
|
29
39
|
self,
|
|
@@ -79,5 +89,6 @@ class WorkflowWorkerExecution(InfrahubWorkflow):
|
|
|
79
89
|
parameters = dict(parameters) if parameters is not None else {}
|
|
80
90
|
inject_context_parameter(func=flow_func, parameters=parameters, context=context)
|
|
81
91
|
|
|
82
|
-
|
|
92
|
+
async with AsyncClientContext(httpx_settings={"verify": self._http_adapter.verify_tls()}):
|
|
93
|
+
flow_run = await run_deployment(name=workflow.full_name, timeout=0, parameters=parameters or {}, tags=tags) # type: ignore[return-value, misc]
|
|
83
94
|
return WorkflowInfo.from_flow(flow_run=flow_run)
|
infrahub/task_manager/event.py
CHANGED
|
@@ -160,6 +160,9 @@ class PrefectEventData(PrefectEventModel):
|
|
|
160
160
|
def _return_branch_rebased(self) -> dict[str, Any]:
|
|
161
161
|
return {"rebased_branch": self._get_branch_name_from_resource()}
|
|
162
162
|
|
|
163
|
+
def _return_branch_migrated(self) -> dict[str, Any]:
|
|
164
|
+
return {"migrated_branch": self._get_branch_name_from_resource()}
|
|
165
|
+
|
|
163
166
|
def _return_group_event(self) -> dict[str, Any]:
|
|
164
167
|
members = []
|
|
165
168
|
ancestors = []
|
|
@@ -228,6 +231,8 @@ class PrefectEventData(PrefectEventModel):
|
|
|
228
231
|
event_specifics = self._return_branch_deleted()
|
|
229
232
|
case "infrahub.branch.merged":
|
|
230
233
|
event_specifics = self._return_branch_merged()
|
|
234
|
+
case "infrahub.branch.migrated":
|
|
235
|
+
event_specifics = self._return_branch_migrated()
|
|
231
236
|
case "infrahub.branch.rebased":
|
|
232
237
|
event_specifics = self._return_branch_rebased()
|
|
233
238
|
case "infrahub.group.member_added" | "infrahub.group.member_removed":
|
infrahub/task_manager/models.py
CHANGED
|
@@ -141,6 +141,13 @@ class InfrahubEventFilter(EventFilter):
|
|
|
141
141
|
if branches:
|
|
142
142
|
self.resource = EventResourceFilter(labels=ResourceSpecification({"infrahub.branch.name": branches}))
|
|
143
143
|
|
|
144
|
+
if branch_migrated := event_type_filter.get("branch_migrated"):
|
|
145
|
+
branches = branch_migrated.get("branches") or []
|
|
146
|
+
if "infrahub.branch.created" not in event_type:
|
|
147
|
+
event_type.append("infrahub.branch.migrated")
|
|
148
|
+
if branches:
|
|
149
|
+
self.resource = EventResourceFilter(labels=ResourceSpecification({"infrahub.branch.name": branches}))
|
|
150
|
+
|
|
144
151
|
if branch_rebased := event_type_filter.get("branch_rebased"):
|
|
145
152
|
branches = branch_rebased.get("branches") or []
|
|
146
153
|
if "infrahub.branch.created" not in event_type:
|
infrahub/task_manager/task.py
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import uuid
|
|
3
|
+
from datetime import datetime, timedelta, timezone
|
|
2
4
|
from typing import Any
|
|
3
5
|
from uuid import UUID
|
|
4
6
|
|
|
7
|
+
from prefect import State
|
|
5
8
|
from prefect.client.orchestration import PrefectClient, get_client
|
|
6
9
|
from prefect.client.schemas.filters import (
|
|
7
10
|
ArtifactFilter,
|
|
@@ -12,6 +15,7 @@ from prefect.client.schemas.filters import (
|
|
|
12
15
|
FlowRunFilter,
|
|
13
16
|
FlowRunFilterId,
|
|
14
17
|
FlowRunFilterName,
|
|
18
|
+
FlowRunFilterStartTime,
|
|
15
19
|
FlowRunFilterState,
|
|
16
20
|
FlowRunFilterStateType,
|
|
17
21
|
FlowRunFilterTags,
|
|
@@ -311,3 +315,72 @@ class PrefectTask:
|
|
|
311
315
|
)
|
|
312
316
|
|
|
313
317
|
return {"count": count or 0, "edges": nodes}
|
|
318
|
+
|
|
319
|
+
@classmethod
|
|
320
|
+
async def delete_flow_runs(
|
|
321
|
+
cls,
|
|
322
|
+
states: list[StateType] = [StateType.COMPLETED, StateType.FAILED, StateType.CANCELLED], # noqa: B006
|
|
323
|
+
delete: bool = True,
|
|
324
|
+
days_to_keep: int = 2,
|
|
325
|
+
batch_size: int = 100,
|
|
326
|
+
) -> None:
|
|
327
|
+
"""Delete flow runs in the specified states and older than specified days."""
|
|
328
|
+
|
|
329
|
+
logger = get_logger()
|
|
330
|
+
|
|
331
|
+
async with get_client(sync_client=False) as client:
|
|
332
|
+
cutoff = datetime.now(timezone.utc) - timedelta(days=days_to_keep)
|
|
333
|
+
|
|
334
|
+
flow_run_filter = FlowRunFilter(
|
|
335
|
+
start_time=FlowRunFilterStartTime(before_=cutoff), # type: ignore[arg-type]
|
|
336
|
+
state=FlowRunFilterState(type=FlowRunFilterStateType(any_=states)),
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
# Get flow runs to delete
|
|
340
|
+
flow_runs = await client.read_flow_runs(flow_run_filter=flow_run_filter, limit=batch_size)
|
|
341
|
+
|
|
342
|
+
deleted_total = 0
|
|
343
|
+
|
|
344
|
+
while True:
|
|
345
|
+
batch_deleted = 0
|
|
346
|
+
failed_deletes = []
|
|
347
|
+
|
|
348
|
+
# Delete each flow run through the API
|
|
349
|
+
for flow_run in flow_runs:
|
|
350
|
+
try:
|
|
351
|
+
if delete:
|
|
352
|
+
await client.delete_flow_run(flow_run_id=flow_run.id)
|
|
353
|
+
else:
|
|
354
|
+
await client.set_flow_run_state(
|
|
355
|
+
flow_run_id=flow_run.id,
|
|
356
|
+
state=State(type=StateType.CRASHED),
|
|
357
|
+
force=True,
|
|
358
|
+
)
|
|
359
|
+
deleted_total += 1
|
|
360
|
+
batch_deleted += 1
|
|
361
|
+
except Exception as e:
|
|
362
|
+
logger.warning(f"Failed to delete flow run {flow_run.id}: {e}")
|
|
363
|
+
failed_deletes.append(flow_run.id)
|
|
364
|
+
|
|
365
|
+
# Rate limiting
|
|
366
|
+
if batch_deleted % 10 == 0:
|
|
367
|
+
await asyncio.sleep(0.5)
|
|
368
|
+
|
|
369
|
+
logger.info(f"Delete {batch_deleted}/{len(flow_runs)} flow runs (total: {deleted_total})")
|
|
370
|
+
|
|
371
|
+
# Get next batch
|
|
372
|
+
previous_flow_run_ids = [fr.id for fr in flow_runs]
|
|
373
|
+
flow_runs = await client.read_flow_runs(flow_run_filter=flow_run_filter, limit=batch_size)
|
|
374
|
+
|
|
375
|
+
if not flow_runs:
|
|
376
|
+
logger.info("No more flow runs to delete")
|
|
377
|
+
break
|
|
378
|
+
|
|
379
|
+
if previous_flow_run_ids == [fr.id for fr in flow_runs]:
|
|
380
|
+
logger.info("Found same flow runs to delete, aborting")
|
|
381
|
+
break
|
|
382
|
+
|
|
383
|
+
# Delay between batches to avoid overwhelming the API
|
|
384
|
+
await asyncio.sleep(1.0)
|
|
385
|
+
|
|
386
|
+
logger.info(f"Retention complete. Total deleted tasks: {deleted_total}")
|
infrahub/trigger/catalogue.py
CHANGED
|
@@ -4,6 +4,8 @@ from infrahub.computed_attribute.triggers import (
|
|
|
4
4
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
5
5
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
6
6
|
)
|
|
7
|
+
from infrahub.display_labels.triggers import TRIGGER_DISPLAY_LABELS_ALL_SCHEMA
|
|
8
|
+
from infrahub.hfid.triggers import TRIGGER_HFID_ALL_SCHEMA
|
|
7
9
|
from infrahub.schema.triggers import TRIGGER_SCHEMA_UPDATED
|
|
8
10
|
from infrahub.trigger.models import TriggerDefinition
|
|
9
11
|
from infrahub.webhook.triggers import TRIGGER_WEBHOOK_DELETE, TRIGGER_WEBHOOK_SETUP_UPDATE
|
|
@@ -13,6 +15,8 @@ builtin_triggers: list[TriggerDefinition] = [
|
|
|
13
15
|
TRIGGER_BRANCH_MERGED,
|
|
14
16
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
15
17
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
18
|
+
TRIGGER_DISPLAY_LABELS_ALL_SCHEMA,
|
|
19
|
+
TRIGGER_HFID_ALL_SCHEMA,
|
|
16
20
|
TRIGGER_SCHEMA_UPDATED,
|
|
17
21
|
TRIGGER_WEBHOOK_DELETE,
|
|
18
22
|
TRIGGER_WEBHOOK_SETUP_UPDATE,
|
infrahub/trigger/models.py
CHANGED
|
@@ -37,6 +37,8 @@ class TriggerType(str, Enum):
|
|
|
37
37
|
COMPUTED_ATTR_JINJA2 = "computed_attr_jinja2"
|
|
38
38
|
COMPUTED_ATTR_PYTHON = "computed_attr_python"
|
|
39
39
|
COMPUTED_ATTR_PYTHON_QUERY = "computed_attr_python_query"
|
|
40
|
+
DISPLAY_LABEL_JINJA2 = "display_label_jinja2"
|
|
41
|
+
HUMAN_FRIENDLY_ID = "human_friendly_id"
|
|
40
42
|
# OBJECT = "object"
|
|
41
43
|
|
|
42
44
|
|
infrahub/trigger/setup.py
CHANGED
|
@@ -6,6 +6,7 @@ from prefect.cache_policies import NONE
|
|
|
6
6
|
from prefect.client.orchestration import PrefectClient, get_client
|
|
7
7
|
from prefect.client.schemas.filters import DeploymentFilter, DeploymentFilterName
|
|
8
8
|
from prefect.events.schemas.automations import Automation
|
|
9
|
+
from prefect.exceptions import PrefectHTTPStatusError
|
|
9
10
|
|
|
10
11
|
from infrahub import lock
|
|
11
12
|
from infrahub.database import InfrahubDatabase
|
|
@@ -51,7 +52,7 @@ async def setup_triggers_specific(
|
|
|
51
52
|
) # type: ignore[misc]
|
|
52
53
|
|
|
53
54
|
|
|
54
|
-
@task(name="trigger-setup", task_run_name="Setup triggers", cache_policy=NONE)
|
|
55
|
+
@task(name="trigger-setup", task_run_name="Setup triggers", cache_policy=NONE)
|
|
55
56
|
async def setup_triggers(
|
|
56
57
|
client: PrefectClient,
|
|
57
58
|
triggers: list[TriggerDefinition],
|
|
@@ -83,7 +84,9 @@ async def setup_triggers(
|
|
|
83
84
|
existing_automations: dict[str, Automation] = {}
|
|
84
85
|
if trigger_type:
|
|
85
86
|
existing_automations = {
|
|
86
|
-
item.name: item
|
|
87
|
+
item.name: item
|
|
88
|
+
for item in await client.read_automations()
|
|
89
|
+
if item.name.startswith(f"{trigger_type.value}::")
|
|
87
90
|
}
|
|
88
91
|
else:
|
|
89
92
|
existing_automations = {item.name: item for item in await client.read_automations()}
|
|
@@ -133,8 +136,14 @@ async def setup_triggers(
|
|
|
133
136
|
continue
|
|
134
137
|
|
|
135
138
|
report.deleted.append(existing_automation)
|
|
136
|
-
|
|
137
|
-
|
|
139
|
+
try:
|
|
140
|
+
await client.delete_automation(automation_id=existing_automation.id)
|
|
141
|
+
log.info(f"{item_to_delete} Deleted")
|
|
142
|
+
except PrefectHTTPStatusError as exc:
|
|
143
|
+
if exc.response.status_code == 404:
|
|
144
|
+
log.info(f"{item_to_delete} was already deleted")
|
|
145
|
+
else:
|
|
146
|
+
raise
|
|
138
147
|
|
|
139
148
|
if trigger_type:
|
|
140
149
|
log.info(
|
infrahub/trigger/tasks.py
CHANGED
|
@@ -6,6 +6,8 @@ from infrahub.computed_attribute.gather import (
|
|
|
6
6
|
gather_trigger_computed_attribute_jinja2,
|
|
7
7
|
gather_trigger_computed_attribute_python,
|
|
8
8
|
)
|
|
9
|
+
from infrahub.display_labels.gather import gather_trigger_display_labels_jinja2
|
|
10
|
+
from infrahub.hfid.gather import gather_trigger_hfid
|
|
9
11
|
from infrahub.trigger.catalogue import builtin_triggers
|
|
10
12
|
from infrahub.webhook.gather import gather_trigger_webhook
|
|
11
13
|
from infrahub.workers.dependencies import get_database
|
|
@@ -18,6 +20,8 @@ async def trigger_configure_all() -> None:
|
|
|
18
20
|
database = await get_database()
|
|
19
21
|
async with database.start_session() as db:
|
|
20
22
|
webhook_trigger = await gather_trigger_webhook(db=db)
|
|
23
|
+
display_label_triggers = await gather_trigger_display_labels_jinja2()
|
|
24
|
+
human_friendly_id_triggers = await gather_trigger_hfid()
|
|
21
25
|
computed_attribute_j2_triggers = await gather_trigger_computed_attribute_jinja2()
|
|
22
26
|
(
|
|
23
27
|
computed_attribute_python_triggers,
|
|
@@ -28,6 +32,8 @@ async def trigger_configure_all() -> None:
|
|
|
28
32
|
computed_attribute_j2_triggers
|
|
29
33
|
+ computed_attribute_python_triggers
|
|
30
34
|
+ computed_attribute_python_query_triggers
|
|
35
|
+
+ display_label_triggers
|
|
36
|
+
+ human_friendly_id_triggers
|
|
31
37
|
+ builtin_triggers
|
|
32
38
|
+ webhook_trigger
|
|
33
39
|
+ action_rules
|
infrahub/workers/dependencies.py
CHANGED
|
@@ -7,6 +7,7 @@ from infrahub_sdk.config import Config
|
|
|
7
7
|
from infrahub import config
|
|
8
8
|
from infrahub.components import ComponentType
|
|
9
9
|
from infrahub.constants.environment import INSTALLATION_TYPE
|
|
10
|
+
from infrahub.core.registry import registry
|
|
10
11
|
from infrahub.database import InfrahubDatabase, get_db
|
|
11
12
|
from infrahub.services.adapters.cache import InfrahubCache
|
|
12
13
|
from infrahub.services.adapters.event import InfrahubEventService
|
|
@@ -34,7 +35,15 @@ def get_component_type() -> ComponentType:
|
|
|
34
35
|
|
|
35
36
|
|
|
36
37
|
def build_client() -> InfrahubClient:
|
|
37
|
-
|
|
38
|
+
client_config = Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True)
|
|
39
|
+
client_config.set_ssl_context(context=get_http().verify_tls())
|
|
40
|
+
client = InfrahubClient(config=client_config)
|
|
41
|
+
# Populate client schema cache using our internal schema cache
|
|
42
|
+
if registry.schema:
|
|
43
|
+
for branch in registry.schema.get_branches():
|
|
44
|
+
client.schema.set_cache(schema=registry.schema.get_sdk_schema_branch(name=branch), branch=branch)
|
|
45
|
+
|
|
46
|
+
return client
|
|
38
47
|
|
|
39
48
|
|
|
40
49
|
@inject
|
|
@@ -8,6 +8,7 @@ from infrahub_sdk import Config, InfrahubClient
|
|
|
8
8
|
from infrahub_sdk.exceptions import Error as SdkError
|
|
9
9
|
from prefect import settings as prefect_settings
|
|
10
10
|
from prefect.client.schemas.objects import FlowRun
|
|
11
|
+
from prefect.context import AsyncClientContext
|
|
11
12
|
from prefect.flow_engine import run_flow_async
|
|
12
13
|
from prefect.logging.handlers import APILogHandler
|
|
13
14
|
from prefect.workers.base import BaseJobConfiguration, BaseVariables, BaseWorker, BaseWorkerResult
|
|
@@ -18,6 +19,7 @@ from infrahub import config
|
|
|
18
19
|
from infrahub.components import ComponentType
|
|
19
20
|
from infrahub.core import registry
|
|
20
21
|
from infrahub.core.initialization import initialization
|
|
22
|
+
from infrahub.database.graph import validate_graph_version
|
|
21
23
|
from infrahub.dependencies.registry import build_component_registry
|
|
22
24
|
from infrahub.git import initialize_repositories_directory
|
|
23
25
|
from infrahub.lock import initialize_lock
|
|
@@ -27,6 +29,7 @@ from infrahub.workers.dependencies import (
|
|
|
27
29
|
get_cache,
|
|
28
30
|
get_component,
|
|
29
31
|
get_database,
|
|
32
|
+
get_http,
|
|
30
33
|
get_message_bus,
|
|
31
34
|
get_workflow,
|
|
32
35
|
set_component_type,
|
|
@@ -129,6 +132,9 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
129
132
|
|
|
130
133
|
await self.service.component.refresh_schema_hash()
|
|
131
134
|
|
|
135
|
+
async with self.service.database.start_session() as dbs:
|
|
136
|
+
await validate_graph_version(db=dbs)
|
|
137
|
+
|
|
132
138
|
initialize_repositories_directory()
|
|
133
139
|
build_component_registry()
|
|
134
140
|
await self.service.scheduler.start_schedule()
|
|
@@ -138,7 +144,7 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
138
144
|
self,
|
|
139
145
|
flow_run: FlowRun,
|
|
140
146
|
configuration: BaseJobConfiguration,
|
|
141
|
-
task_status: TaskStatus | None = None,
|
|
147
|
+
task_status: TaskStatus[int] | None = None,
|
|
142
148
|
) -> BaseWorkerResult:
|
|
143
149
|
flow_run_logger = self.get_flow_run_logger(flow_run)
|
|
144
150
|
|
|
@@ -154,7 +160,9 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
154
160
|
if task_status:
|
|
155
161
|
task_status.started(True)
|
|
156
162
|
|
|
157
|
-
|
|
163
|
+
async with AsyncClientContext(httpx_settings={"verify": get_http().verify_tls()}) as ctx:
|
|
164
|
+
ctx._httpx_settings = None # Hack to make all child task/flow runs use the same client
|
|
165
|
+
await run_flow_async(flow=flow_func, flow_run=flow_run, parameters=params, return_type="state")
|
|
158
166
|
|
|
159
167
|
return InfrahubWorkerAsyncResult(status_code=0, identifier=str(flow_run.id))
|
|
160
168
|
|
infrahub/workflows/catalogue.py
CHANGED
|
@@ -230,6 +230,13 @@ BRANCH_REBASE = WorkflowDefinition(
|
|
|
230
230
|
function="rebase_branch",
|
|
231
231
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
232
232
|
)
|
|
233
|
+
BRANCH_MIGRATE = WorkflowDefinition(
|
|
234
|
+
name="branch-migrate",
|
|
235
|
+
type=WorkflowType.CORE,
|
|
236
|
+
module="infrahub.core.branch.tasks",
|
|
237
|
+
function="migrate_branch",
|
|
238
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
239
|
+
)
|
|
233
240
|
|
|
234
241
|
BRANCH_CREATE = WorkflowDefinition(
|
|
235
242
|
name="create-branch",
|
|
@@ -323,6 +330,62 @@ COMPUTED_ATTRIBUTE_JINJA2_UPDATE_VALUE = WorkflowDefinition(
|
|
|
323
330
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
324
331
|
)
|
|
325
332
|
|
|
333
|
+
DISPLAY_LABELS_PROCESS_JINJA2 = WorkflowDefinition(
|
|
334
|
+
name="display-label-process-jinja2",
|
|
335
|
+
type=WorkflowType.CORE,
|
|
336
|
+
module="infrahub.display_labels.tasks",
|
|
337
|
+
function="process_display_label",
|
|
338
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
339
|
+
)
|
|
340
|
+
|
|
341
|
+
DISPLAY_LABEL_JINJA2_UPDATE_VALUE = WorkflowDefinition(
|
|
342
|
+
name="display-label-jinja2-update-value",
|
|
343
|
+
type=WorkflowType.CORE,
|
|
344
|
+
module="infrahub.display_labels.tasks",
|
|
345
|
+
function="display_label_jinja2_update_value",
|
|
346
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
HFID_PROCESS = WorkflowDefinition(
|
|
350
|
+
name="hfid-process",
|
|
351
|
+
type=WorkflowType.CORE,
|
|
352
|
+
module="infrahub.hfid.tasks",
|
|
353
|
+
function="process_hfid",
|
|
354
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
HFID_SETUP = WorkflowDefinition(
|
|
358
|
+
name="hfid-setup",
|
|
359
|
+
type=WorkflowType.CORE,
|
|
360
|
+
module="infrahub.hfid.tasks",
|
|
361
|
+
function="hfid_setup",
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
HFID_UPDATE_VALUE = WorkflowDefinition(
|
|
366
|
+
name="hfid-update-value",
|
|
367
|
+
type=WorkflowType.CORE,
|
|
368
|
+
module="infrahub.hfid.tasks",
|
|
369
|
+
function="hfid_update_value",
|
|
370
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
TRIGGER_UPDATE_DISPLAY_LABELS = WorkflowDefinition(
|
|
374
|
+
name="trigger-update-display-labels",
|
|
375
|
+
type=WorkflowType.CORE,
|
|
376
|
+
module="infrahub.display_labels.tasks",
|
|
377
|
+
function="trigger_update_display_labels",
|
|
378
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
TRIGGER_UPDATE_HFID = WorkflowDefinition(
|
|
382
|
+
name="trigger-update-hfid",
|
|
383
|
+
type=WorkflowType.CORE,
|
|
384
|
+
module="infrahub.hfid.tasks",
|
|
385
|
+
function="trigger_update_hfid",
|
|
386
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
387
|
+
)
|
|
388
|
+
|
|
326
389
|
TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES = WorkflowDefinition(
|
|
327
390
|
name="trigger_update_jinja2_computed_attributes",
|
|
328
391
|
type=WorkflowType.CORE,
|
|
@@ -359,6 +422,14 @@ COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM = WorkflowDefinition(
|
|
|
359
422
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
360
423
|
)
|
|
361
424
|
|
|
425
|
+
DISPLAY_LABELS_SETUP_JINJA2 = WorkflowDefinition(
|
|
426
|
+
name="display-labels-setup-jinja2",
|
|
427
|
+
type=WorkflowType.CORE,
|
|
428
|
+
module="infrahub.display_labels.tasks",
|
|
429
|
+
function="display_labels_setup_jinja2",
|
|
430
|
+
)
|
|
431
|
+
|
|
432
|
+
|
|
362
433
|
QUERY_COMPUTED_ATTRIBUTE_TRANSFORM_TARGETS = WorkflowDefinition(
|
|
363
434
|
name="query-computed-attribute-transform-targets",
|
|
364
435
|
type=WorkflowType.CORE,
|
|
@@ -577,6 +648,7 @@ WORKFLOWS = [
|
|
|
577
648
|
BRANCH_MERGED,
|
|
578
649
|
BRANCH_MERGE_MUTATION,
|
|
579
650
|
BRANCH_MERGE_POST_PROCESS,
|
|
651
|
+
BRANCH_MIGRATE,
|
|
580
652
|
BRANCH_REBASE,
|
|
581
653
|
BRANCH_VALIDATE,
|
|
582
654
|
CLEAN_UP_DEADLOCKS,
|
|
@@ -589,6 +661,9 @@ WORKFLOWS = [
|
|
|
589
661
|
DIFF_REFRESH,
|
|
590
662
|
DIFF_REFRESH_ALL,
|
|
591
663
|
DIFF_UPDATE,
|
|
664
|
+
DISPLAY_LABELS_PROCESS_JINJA2,
|
|
665
|
+
DISPLAY_LABELS_SETUP_JINJA2,
|
|
666
|
+
DISPLAY_LABEL_JINJA2_UPDATE_VALUE,
|
|
592
667
|
GIT_REPOSITORIES_CHECK_ARTIFACT_CREATE,
|
|
593
668
|
GIT_REPOSITORIES_CREATE_BRANCH,
|
|
594
669
|
GIT_REPOSITORIES_DIFF_NAMES_ONLY,
|
|
@@ -604,6 +679,9 @@ WORKFLOWS = [
|
|
|
604
679
|
GIT_REPOSITORY_USER_CHECKS_TRIGGER,
|
|
605
680
|
GIT_REPOSITORY_USER_CHECK_RUN,
|
|
606
681
|
GRAPHQL_QUERY_GROUP_UPDATE,
|
|
682
|
+
HFID_PROCESS,
|
|
683
|
+
HFID_SETUP,
|
|
684
|
+
HFID_UPDATE_VALUE,
|
|
607
685
|
IPAM_RECONCILIATION,
|
|
608
686
|
PROFILE_REFRESH,
|
|
609
687
|
PROFILE_REFRESH_MULTIPLE,
|
|
@@ -632,6 +710,8 @@ WORKFLOWS = [
|
|
|
632
710
|
TRIGGER_ARTIFACT_DEFINITION_GENERATE,
|
|
633
711
|
TRIGGER_CONFIGURE_ALL,
|
|
634
712
|
TRIGGER_GENERATOR_DEFINITION_RUN,
|
|
713
|
+
TRIGGER_UPDATE_DISPLAY_LABELS,
|
|
714
|
+
TRIGGER_UPDATE_HFID,
|
|
635
715
|
TRIGGER_UPDATE_JINJA_COMPUTED_ATTRIBUTES,
|
|
636
716
|
TRIGGER_UPDATE_PYTHON_COMPUTED_ATTRIBUTES,
|
|
637
717
|
VALIDATE_SCHEMA_NUMBER_POOLS,
|
|
@@ -7,6 +7,8 @@ from prefect.exceptions import ObjectAlreadyExists
|
|
|
7
7
|
from prefect.logging import get_run_logger
|
|
8
8
|
|
|
9
9
|
from infrahub import config
|
|
10
|
+
from infrahub.display_labels.gather import gather_trigger_display_labels_jinja2
|
|
11
|
+
from infrahub.hfid.gather import gather_trigger_hfid
|
|
10
12
|
from infrahub.trigger.catalogue import builtin_triggers
|
|
11
13
|
from infrahub.trigger.models import TriggerType
|
|
12
14
|
from infrahub.trigger.setup import setup_triggers
|
|
@@ -74,3 +76,22 @@ async def setup_task_manager() -> None:
|
|
|
74
76
|
await setup_triggers(
|
|
75
77
|
client=client, triggers=builtin_triggers, trigger_type=TriggerType.BUILTIN, force_update=True
|
|
76
78
|
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@flow(name="task-manager-identifiers", flow_run_name="Setup Task Manager Display Labels and HFID")
|
|
82
|
+
async def setup_task_manager_identifiers() -> None:
|
|
83
|
+
async with get_client(sync_client=False) as client:
|
|
84
|
+
display_label_triggers = await gather_trigger_display_labels_jinja2()
|
|
85
|
+
await setup_triggers(
|
|
86
|
+
client=client,
|
|
87
|
+
triggers=display_label_triggers,
|
|
88
|
+
trigger_type=TriggerType.DISPLAY_LABEL_JINJA2,
|
|
89
|
+
force_update=True,
|
|
90
|
+
) # type: ignore[misc]
|
|
91
|
+
hfid_triggers = await gather_trigger_hfid()
|
|
92
|
+
await setup_triggers(
|
|
93
|
+
client=client,
|
|
94
|
+
triggers=hfid_triggers,
|
|
95
|
+
trigger_type=TriggerType.HUMAN_FRIENDLY_ID,
|
|
96
|
+
force_update=True,
|
|
97
|
+
) # type: ignore[misc]
|
infrahub/workflows/utils.py
CHANGED
|
@@ -9,6 +9,7 @@ from prefect.runtime import flow_run
|
|
|
9
9
|
from infrahub.core.constants import GLOBAL_BRANCH_NAME
|
|
10
10
|
from infrahub.core.registry import registry
|
|
11
11
|
from infrahub.tasks.registry import refresh_branches
|
|
12
|
+
from infrahub.workers.dependencies import get_http
|
|
12
13
|
|
|
13
14
|
from .constants import TAG_NAMESPACE, WorkflowTag
|
|
14
15
|
|
|
@@ -26,7 +27,7 @@ async def add_tags(
|
|
|
26
27
|
namespace: bool = True,
|
|
27
28
|
db_change: bool = False,
|
|
28
29
|
) -> None:
|
|
29
|
-
client = get_client(sync_client=False)
|
|
30
|
+
client = get_client(httpx_settings={"verify": get_http().verify_tls()}, sync_client=False)
|
|
30
31
|
current_flow_run_id = flow_run.id
|
|
31
32
|
current_tags: list[str] = flow_run.tags
|
|
32
33
|
branch_tags = (
|