infrahub-server 1.3.5__py3-none-any.whl → 1.4.0b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/api/internal.py +5 -0
- infrahub/artifacts/tasks.py +17 -22
- infrahub/branch/merge_mutation_checker.py +38 -0
- infrahub/cli/__init__.py +2 -2
- infrahub/cli/context.py +7 -3
- infrahub/cli/db.py +5 -16
- infrahub/cli/upgrade.py +7 -29
- infrahub/computed_attribute/tasks.py +36 -46
- infrahub/config.py +53 -2
- infrahub/constants/environment.py +1 -0
- infrahub/core/attribute.py +9 -7
- infrahub/core/branch/tasks.py +43 -41
- infrahub/core/constants/__init__.py +20 -6
- infrahub/core/constants/infrahubkind.py +2 -0
- infrahub/core/diff/coordinator.py +3 -1
- infrahub/core/diff/repository/repository.py +0 -8
- infrahub/core/diff/tasks.py +11 -8
- infrahub/core/graph/__init__.py +1 -1
- infrahub/core/graph/index.py +1 -2
- infrahub/core/graph/schema.py +50 -29
- infrahub/core/initialization.py +62 -33
- infrahub/core/ipam/tasks.py +4 -3
- infrahub/core/merge.py +8 -10
- infrahub/core/migrations/graph/__init__.py +2 -0
- infrahub/core/migrations/graph/m035_drop_attr_value_index.py +45 -0
- infrahub/core/migrations/query/attribute_add.py +27 -2
- infrahub/core/migrations/schema/tasks.py +6 -5
- infrahub/core/node/proposed_change.py +43 -0
- infrahub/core/protocols.py +12 -0
- infrahub/core/query/attribute.py +32 -14
- infrahub/core/query/diff.py +11 -0
- infrahub/core/query/ipam.py +13 -7
- infrahub/core/query/node.py +51 -10
- infrahub/core/query/resource_manager.py +3 -3
- infrahub/core/schema/basenode_schema.py +8 -0
- infrahub/core/schema/definitions/core/__init__.py +10 -1
- infrahub/core/schema/definitions/core/ipam.py +28 -2
- infrahub/core/schema/definitions/core/propose_change.py +15 -0
- infrahub/core/schema/definitions/core/webhook.py +3 -0
- infrahub/core/schema/generic_schema.py +10 -0
- infrahub/core/schema/manager.py +10 -1
- infrahub/core/schema/node_schema.py +22 -17
- infrahub/core/schema/profile_schema.py +8 -0
- infrahub/core/schema/schema_branch.py +9 -5
- infrahub/core/schema/template_schema.py +8 -0
- infrahub/core/validators/checks_runner.py +5 -5
- infrahub/core/validators/tasks.py +6 -7
- infrahub/core/validators/uniqueness/checker.py +4 -2
- infrahub/core/validators/uniqueness/model.py +1 -0
- infrahub/core/validators/uniqueness/query.py +57 -7
- infrahub/database/__init__.py +2 -1
- infrahub/events/__init__.py +18 -0
- infrahub/events/constants.py +7 -0
- infrahub/events/generator.py +29 -2
- infrahub/events/proposed_change_action.py +181 -0
- infrahub/generators/tasks.py +24 -20
- infrahub/git/base.py +4 -7
- infrahub/git/integrator.py +21 -12
- infrahub/git/repository.py +15 -30
- infrahub/git/tasks.py +121 -106
- infrahub/graphql/field_extractor.py +69 -0
- infrahub/graphql/manager.py +15 -11
- infrahub/graphql/mutations/account.py +2 -2
- infrahub/graphql/mutations/action.py +8 -2
- infrahub/graphql/mutations/artifact_definition.py +4 -1
- infrahub/graphql/mutations/branch.py +10 -5
- infrahub/graphql/mutations/graphql_query.py +2 -1
- infrahub/graphql/mutations/main.py +14 -8
- infrahub/graphql/mutations/menu.py +2 -1
- infrahub/graphql/mutations/proposed_change.py +225 -8
- infrahub/graphql/mutations/relationship.py +5 -0
- infrahub/graphql/mutations/repository.py +2 -1
- infrahub/graphql/mutations/tasks.py +7 -9
- infrahub/graphql/mutations/webhook.py +4 -1
- infrahub/graphql/parser.py +15 -6
- infrahub/graphql/queries/__init__.py +10 -1
- infrahub/graphql/queries/account.py +3 -3
- infrahub/graphql/queries/branch.py +2 -2
- infrahub/graphql/queries/diff/tree.py +3 -3
- infrahub/graphql/queries/event.py +13 -3
- infrahub/graphql/queries/ipam.py +23 -1
- infrahub/graphql/queries/proposed_change.py +84 -0
- infrahub/graphql/queries/relationship.py +2 -2
- infrahub/graphql/queries/resource_manager.py +3 -3
- infrahub/graphql/queries/search.py +3 -2
- infrahub/graphql/queries/status.py +3 -2
- infrahub/graphql/queries/task.py +2 -2
- infrahub/graphql/resolvers/ipam.py +440 -0
- infrahub/graphql/resolvers/many_relationship.py +4 -3
- infrahub/graphql/resolvers/resolver.py +5 -5
- infrahub/graphql/resolvers/single_relationship.py +3 -2
- infrahub/graphql/schema.py +25 -5
- infrahub/graphql/types/__init__.py +2 -2
- infrahub/graphql/types/attribute.py +3 -3
- infrahub/graphql/types/event.py +60 -0
- infrahub/groups/tasks.py +6 -6
- infrahub/lock.py +3 -2
- infrahub/menu/generator.py +8 -0
- infrahub/message_bus/operations/__init__.py +9 -12
- infrahub/message_bus/operations/git/file.py +6 -5
- infrahub/message_bus/operations/git/repository.py +12 -20
- infrahub/message_bus/operations/refresh/registry.py +15 -9
- infrahub/message_bus/operations/send/echo.py +7 -4
- infrahub/message_bus/types.py +1 -0
- infrahub/permissions/globals.py +1 -4
- infrahub/permissions/manager.py +8 -5
- infrahub/pools/prefix.py +7 -5
- infrahub/prefect_server/app.py +31 -0
- infrahub/prefect_server/bootstrap.py +18 -0
- infrahub/proposed_change/action_checker.py +206 -0
- infrahub/proposed_change/approval_revoker.py +40 -0
- infrahub/proposed_change/branch_diff.py +3 -1
- infrahub/proposed_change/checker.py +45 -0
- infrahub/proposed_change/constants.py +32 -2
- infrahub/proposed_change/tasks.py +182 -150
- infrahub/py.typed +0 -0
- infrahub/server.py +29 -17
- infrahub/services/__init__.py +13 -28
- infrahub/services/adapters/cache/__init__.py +4 -0
- infrahub/services/adapters/cache/nats.py +2 -0
- infrahub/services/adapters/cache/redis.py +3 -0
- infrahub/services/adapters/message_bus/__init__.py +0 -2
- infrahub/services/adapters/message_bus/local.py +1 -2
- infrahub/services/adapters/message_bus/nats.py +6 -8
- infrahub/services/adapters/message_bus/rabbitmq.py +7 -9
- infrahub/services/adapters/workflow/__init__.py +1 -0
- infrahub/services/adapters/workflow/local.py +1 -8
- infrahub/services/component.py +2 -1
- infrahub/task_manager/event.py +52 -0
- infrahub/task_manager/models.py +9 -0
- infrahub/tasks/artifact.py +6 -7
- infrahub/tasks/check.py +4 -7
- infrahub/telemetry/tasks.py +15 -18
- infrahub/transformations/tasks.py +10 -6
- infrahub/trigger/tasks.py +4 -3
- infrahub/types.py +4 -0
- infrahub/validators/events.py +7 -7
- infrahub/validators/tasks.py +6 -7
- infrahub/webhook/models.py +45 -45
- infrahub/webhook/tasks.py +25 -24
- infrahub/workers/dependencies.py +143 -0
- infrahub/workers/infrahub_async.py +19 -43
- infrahub/workflows/catalogue.py +16 -2
- infrahub/workflows/initialization.py +5 -4
- infrahub/workflows/models.py +2 -0
- infrahub_sdk/client.py +6 -6
- infrahub_sdk/ctl/repository.py +51 -0
- infrahub_sdk/ctl/schema.py +9 -9
- infrahub_sdk/protocols.py +40 -6
- {infrahub_server-1.3.5.dist-info → infrahub_server-1.4.0b0.dist-info}/METADATA +5 -4
- {infrahub_server-1.3.5.dist-info → infrahub_server-1.4.0b0.dist-info}/RECORD +158 -144
- infrahub_testcontainers/container.py +17 -0
- infrahub_testcontainers/docker-compose-cluster.test.yml +56 -1
- infrahub_testcontainers/docker-compose.test.yml +56 -1
- infrahub_testcontainers/helpers.py +4 -1
- {infrahub_server-1.3.5.dist-info → infrahub_server-1.4.0b0.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.3.5.dist-info → infrahub_server-1.4.0b0.dist-info}/WHEEL +0 -0
- {infrahub_server-1.3.5.dist-info → infrahub_server-1.4.0b0.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from fast_depends import Depends, inject
|
|
4
|
+
from infrahub_sdk.client import InfrahubClient
|
|
5
|
+
from infrahub_sdk.config import Config
|
|
6
|
+
|
|
7
|
+
from infrahub import config
|
|
8
|
+
from infrahub.components import ComponentType
|
|
9
|
+
from infrahub.constants.environment import INSTALLATION_TYPE
|
|
10
|
+
from infrahub.database import InfrahubDatabase, get_db
|
|
11
|
+
from infrahub.services.adapters.cache import InfrahubCache
|
|
12
|
+
from infrahub.services.adapters.event import InfrahubEventService
|
|
13
|
+
from infrahub.services.adapters.http import InfrahubHTTP
|
|
14
|
+
from infrahub.services.adapters.http.httpx import HttpxAdapter
|
|
15
|
+
from infrahub.services.adapters.message_bus import InfrahubMessageBus
|
|
16
|
+
from infrahub.services.adapters.workflow import InfrahubWorkflow
|
|
17
|
+
from infrahub.services.adapters.workflow.local import WorkflowLocalExecution
|
|
18
|
+
from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
|
|
19
|
+
from infrahub.services.component import InfrahubComponent
|
|
20
|
+
|
|
21
|
+
_singletons: dict[str, Any] = {}
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def set_component_type(component_type: ComponentType) -> None:
|
|
25
|
+
if "component_type" not in _singletons:
|
|
26
|
+
_singletons["component_type"] = component_type
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def get_component_type() -> ComponentType:
|
|
30
|
+
try:
|
|
31
|
+
return _singletons["component_type"]
|
|
32
|
+
except KeyError as exc:
|
|
33
|
+
raise ValueError("Component type is not set. It needs to be initialized before working with services.") from exc
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def build_client() -> InfrahubClient:
|
|
37
|
+
return InfrahubClient(config=Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True))
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@inject
|
|
41
|
+
def get_client(client: InfrahubClient = Depends(build_client)) -> InfrahubClient: # noqa: B008
|
|
42
|
+
return client
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def build_installation_type() -> str:
|
|
46
|
+
return INSTALLATION_TYPE
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@inject
|
|
50
|
+
def get_installation_type(installation_type: str = Depends(build_installation_type)) -> str:
|
|
51
|
+
return installation_type
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
async def build_database() -> InfrahubDatabase:
|
|
55
|
+
if "database" not in _singletons:
|
|
56
|
+
_singletons["database"] = InfrahubDatabase(driver=await get_db(retry=5))
|
|
57
|
+
return _singletons["database"]
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@inject
|
|
61
|
+
async def get_database(database: InfrahubDatabase = Depends(build_database)) -> InfrahubDatabase: # noqa: B008
|
|
62
|
+
return database
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
async def build_cache() -> InfrahubCache:
|
|
66
|
+
if "cache" not in _singletons:
|
|
67
|
+
_singletons["cache"] = config.OVERRIDE.cache or await InfrahubCache.new_from_driver(
|
|
68
|
+
driver=config.SETTINGS.cache.driver
|
|
69
|
+
)
|
|
70
|
+
return _singletons["cache"]
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
@inject
|
|
74
|
+
async def get_cache(cache: InfrahubCache = Depends(build_cache)) -> InfrahubCache: # noqa: B008
|
|
75
|
+
return cache
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
async def build_message_bus() -> InfrahubMessageBus:
|
|
79
|
+
if "message_bus" not in _singletons:
|
|
80
|
+
_singletons["message_bus"] = config.OVERRIDE.message_bus or (
|
|
81
|
+
await InfrahubMessageBus.new_from_driver(
|
|
82
|
+
component_type=get_component_type(), driver=config.SETTINGS.broker.driver
|
|
83
|
+
)
|
|
84
|
+
)
|
|
85
|
+
return _singletons["message_bus"]
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
@inject
|
|
89
|
+
async def get_message_bus(message_bus: InfrahubMessageBus = Depends(build_message_bus)) -> InfrahubMessageBus: # noqa: B008
|
|
90
|
+
return message_bus
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
async def build_event_service() -> InfrahubEventService:
|
|
94
|
+
if "event_service" not in _singletons:
|
|
95
|
+
_singletons["event_service"] = InfrahubEventService(message_bus=await get_message_bus())
|
|
96
|
+
return _singletons["event_service"]
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
@inject
|
|
100
|
+
async def get_event_service(event_service: InfrahubEventService = Depends(build_event_service)) -> InfrahubEventService: # noqa: B008
|
|
101
|
+
return event_service
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def build_workflow() -> InfrahubWorkflow:
|
|
105
|
+
if "workflow" not in _singletons:
|
|
106
|
+
_singletons["workflow"] = config.OVERRIDE.workflow or (
|
|
107
|
+
WorkflowWorkerExecution()
|
|
108
|
+
if config.SETTINGS.workflow.driver == config.WorkflowDriver.WORKER
|
|
109
|
+
else WorkflowLocalExecution()
|
|
110
|
+
)
|
|
111
|
+
return _singletons["workflow"]
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
@inject
|
|
115
|
+
def get_workflow(workflow: InfrahubWorkflow = Depends(build_workflow)) -> InfrahubWorkflow: # noqa: B008
|
|
116
|
+
return workflow
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def build_http_service() -> InfrahubHTTP:
|
|
120
|
+
if "http_service" not in _singletons:
|
|
121
|
+
_singletons["http_service"] = HttpxAdapter()
|
|
122
|
+
return _singletons["http_service"]
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
@inject
|
|
126
|
+
def get_http(http_service: InfrahubHTTP = Depends(build_http_service)) -> InfrahubHTTP: # noqa: B008
|
|
127
|
+
return http_service
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
async def build_component() -> InfrahubComponent:
|
|
131
|
+
if "component" not in _singletons:
|
|
132
|
+
_singletons["component"] = await InfrahubComponent.new(
|
|
133
|
+
cache=await get_cache(),
|
|
134
|
+
component_type=get_component_type(),
|
|
135
|
+
db=await get_database(),
|
|
136
|
+
message_bus=await get_message_bus(),
|
|
137
|
+
)
|
|
138
|
+
return _singletons["component"]
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@inject
|
|
142
|
+
async def get_component(component: InfrahubComponent = Depends(build_component)) -> InfrahubComponent: # noqa: B008
|
|
143
|
+
return component
|
|
@@ -18,17 +18,19 @@ from infrahub import config
|
|
|
18
18
|
from infrahub.components import ComponentType
|
|
19
19
|
from infrahub.core import registry
|
|
20
20
|
from infrahub.core.initialization import initialization
|
|
21
|
-
from infrahub.database import InfrahubDatabase, get_db
|
|
22
21
|
from infrahub.dependencies.registry import build_component_registry
|
|
23
22
|
from infrahub.git import initialize_repositories_directory
|
|
24
23
|
from infrahub.lock import initialize_lock
|
|
25
24
|
from infrahub.services import InfrahubServices
|
|
26
|
-
from infrahub.services.adapters.cache import InfrahubCache
|
|
27
|
-
from infrahub.services.adapters.message_bus import InfrahubMessageBus
|
|
28
|
-
from infrahub.services.adapters.workflow import InfrahubWorkflow
|
|
29
|
-
from infrahub.services.adapters.workflow.local import WorkflowLocalExecution
|
|
30
|
-
from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
|
|
31
25
|
from infrahub.trace import configure_trace
|
|
26
|
+
from infrahub.workers.dependencies import (
|
|
27
|
+
get_cache,
|
|
28
|
+
get_component,
|
|
29
|
+
get_database,
|
|
30
|
+
get_message_bus,
|
|
31
|
+
get_workflow,
|
|
32
|
+
set_component_type,
|
|
33
|
+
)
|
|
32
34
|
from infrahub.workers.utils import inject_service_parameter, load_flow_function
|
|
33
35
|
from infrahub.workflows.models import TASK_RESULT_STORAGE_NAME
|
|
34
36
|
|
|
@@ -63,6 +65,7 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
63
65
|
_logo_url = "https://example.com/logo"
|
|
64
66
|
_description = "Infrahub worker designed to run the flow in the main async loop."
|
|
65
67
|
service: InfrahubServices # keep a reference to `service` so we can inject it within flows parameters.
|
|
68
|
+
component_type = ComponentType.GIT_AGENT
|
|
66
69
|
|
|
67
70
|
async def setup(
|
|
68
71
|
self,
|
|
@@ -115,6 +118,7 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
115
118
|
)
|
|
116
119
|
)
|
|
117
120
|
|
|
121
|
+
set_component_type(component_type=self.component_type)
|
|
118
122
|
await self._init_services(client=client)
|
|
119
123
|
|
|
120
124
|
if not registry.schema_has_been_initialized():
|
|
@@ -141,22 +145,18 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
141
145
|
entrypoint: str = configuration._related_objects["deployment"].entrypoint
|
|
142
146
|
|
|
143
147
|
file_path, flow_name = entrypoint.split(":")
|
|
144
|
-
file_path.replace("/", ".")
|
|
145
|
-
module_path = file_path.replace("backend/", "").replace(".py", "").replace("/", ".")
|
|
148
|
+
module_path = file_path.removeprefix("backend/").removesuffix(".py").replace("/", ".")
|
|
146
149
|
flow_func = load_flow_function(module_path=module_path, flow_name=flow_name)
|
|
147
150
|
inject_service_parameter(func=flow_func, parameters=flow_run.parameters, service=self.service)
|
|
148
151
|
flow_run_logger.debug("Validating parameters")
|
|
149
152
|
params = flow_func.validate_parameters(parameters=flow_run.parameters)
|
|
150
153
|
|
|
151
154
|
if task_status:
|
|
152
|
-
task_status.started()
|
|
155
|
+
task_status.started(True)
|
|
153
156
|
|
|
154
157
|
await run_flow_async(flow=flow_func, flow_run=flow_run, parameters=params, return_type="state")
|
|
155
158
|
|
|
156
|
-
return InfrahubWorkerAsyncResult(
|
|
157
|
-
status_code=0,
|
|
158
|
-
identifier=str(flow_run.id),
|
|
159
|
-
)
|
|
159
|
+
return InfrahubWorkerAsyncResult(status_code=0, identifier=str(flow_run.id))
|
|
160
160
|
|
|
161
161
|
def _init_logger(self) -> None:
|
|
162
162
|
"""Initialize loggers to use the API handle provided by Prefect."""
|
|
@@ -182,41 +182,17 @@ class InfrahubWorkerAsync(BaseWorker):
|
|
|
182
182
|
|
|
183
183
|
return client
|
|
184
184
|
|
|
185
|
-
async def _init_database(self) -> InfrahubDatabase:
|
|
186
|
-
return InfrahubDatabase(driver=await get_db(retry=1))
|
|
187
|
-
|
|
188
|
-
async def _init_workflow(self) -> InfrahubWorkflow:
|
|
189
|
-
return config.OVERRIDE.workflow or (
|
|
190
|
-
WorkflowWorkerExecution()
|
|
191
|
-
if config.SETTINGS.workflow.driver == config.WorkflowDriver.WORKER
|
|
192
|
-
else WorkflowLocalExecution()
|
|
193
|
-
)
|
|
194
|
-
|
|
195
|
-
async def _init_message_bus(self, component_type: ComponentType) -> InfrahubMessageBus:
|
|
196
|
-
return config.OVERRIDE.message_bus or (
|
|
197
|
-
await InfrahubMessageBus.new_from_driver(
|
|
198
|
-
component_type=component_type, driver=config.SETTINGS.broker.driver
|
|
199
|
-
)
|
|
200
|
-
)
|
|
201
|
-
|
|
202
|
-
async def _init_cache(self) -> InfrahubCache:
|
|
203
|
-
return config.OVERRIDE.cache or (await InfrahubCache.new_from_driver(driver=config.SETTINGS.cache.driver))
|
|
204
|
-
|
|
205
185
|
async def _init_services(self, client: InfrahubClient) -> None:
|
|
206
|
-
component_type = ComponentType.GIT_AGENT
|
|
207
186
|
client = await self._init_infrahub_client(client=client)
|
|
208
|
-
database = await self._init_database()
|
|
209
|
-
workflow = await self._init_workflow()
|
|
210
|
-
message_bus = await self._init_message_bus(component_type=component_type)
|
|
211
|
-
cache = await self._init_cache()
|
|
212
187
|
|
|
213
188
|
service = await InfrahubServices.new(
|
|
214
|
-
cache=
|
|
189
|
+
cache=await get_cache(),
|
|
215
190
|
client=client,
|
|
216
|
-
database=
|
|
217
|
-
message_bus=
|
|
218
|
-
workflow=
|
|
219
|
-
|
|
191
|
+
database=await get_database(),
|
|
192
|
+
message_bus=await get_message_bus(),
|
|
193
|
+
workflow=get_workflow(),
|
|
194
|
+
component=await get_component(),
|
|
195
|
+
component_type=self.component_type,
|
|
220
196
|
)
|
|
221
197
|
|
|
222
198
|
self.service = service
|
infrahub/workflows/catalogue.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import random
|
|
2
2
|
|
|
3
|
+
from fast_depends import Depends, inject
|
|
4
|
+
|
|
3
5
|
from .constants import WorkflowTag, WorkflowType
|
|
4
6
|
from .models import WorkerPoolDefinition, WorkflowDefinition
|
|
5
7
|
|
|
@@ -529,9 +531,9 @@ VALIDATE_SCHEMA_NUMBER_POOLS = WorkflowDefinition(
|
|
|
529
531
|
)
|
|
530
532
|
|
|
531
533
|
|
|
532
|
-
|
|
534
|
+
WORKER_POOLS = [INFRAHUB_WORKER_POOL]
|
|
533
535
|
|
|
534
|
-
|
|
536
|
+
WORKFLOWS = [
|
|
535
537
|
ACTION_ADD_NODE_TO_GROUP,
|
|
536
538
|
ACTION_RUN_GENERATOR,
|
|
537
539
|
ACTION_RUN_GENERATOR_GROUP_EVENT,
|
|
@@ -603,3 +605,15 @@ workflows = [
|
|
|
603
605
|
WEBHOOK_DELETE_AUTOMATION,
|
|
604
606
|
WEBHOOK_PROCESS,
|
|
605
607
|
]
|
|
608
|
+
|
|
609
|
+
|
|
610
|
+
# Use this dependency injection mechanism to easily add new workflows within infrahub-enterprise
|
|
611
|
+
def build_workflows_definitions() -> list[WorkflowDefinition]:
|
|
612
|
+
return WORKFLOWS
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
@inject
|
|
616
|
+
def get_workflows(
|
|
617
|
+
workflows: list[WorkflowDefinition] = Depends(build_workflows_definitions), # noqa: B008
|
|
618
|
+
) -> list[WorkflowDefinition]:
|
|
619
|
+
return workflows
|
|
@@ -11,19 +11,20 @@ from infrahub.trigger.catalogue import builtin_triggers
|
|
|
11
11
|
from infrahub.trigger.models import TriggerType
|
|
12
12
|
from infrahub.trigger.setup import setup_triggers
|
|
13
13
|
|
|
14
|
-
from .catalogue import
|
|
14
|
+
from .catalogue import WORKER_POOLS, get_workflows
|
|
15
15
|
from .models import TASK_RESULT_STORAGE_NAME
|
|
16
16
|
|
|
17
17
|
|
|
18
18
|
@task(name="task-manager-setup-worker-pools", task_run_name="Setup Worker pools", cache_policy=NONE) # type: ignore[arg-type]
|
|
19
19
|
async def setup_worker_pools(client: PrefectClient) -> None:
|
|
20
20
|
log = get_run_logger()
|
|
21
|
-
for worker in
|
|
21
|
+
for worker in WORKER_POOLS:
|
|
22
22
|
wp = WorkPoolCreate(
|
|
23
23
|
name=worker.name,
|
|
24
24
|
type=worker.worker_type or config.SETTINGS.workflow.default_worker_type,
|
|
25
25
|
description=worker.description,
|
|
26
26
|
)
|
|
27
|
+
|
|
27
28
|
try:
|
|
28
29
|
await client.create_work_pool(work_pool=wp, overwrite=True)
|
|
29
30
|
log.info(f"Work pool {worker.name} created successfully ... ")
|
|
@@ -34,10 +35,10 @@ async def setup_worker_pools(client: PrefectClient) -> None:
|
|
|
34
35
|
@task(name="task-manager-setup-deployments", task_run_name="Setup Deployments", cache_policy=NONE) # type: ignore[arg-type]
|
|
35
36
|
async def setup_deployments(client: PrefectClient) -> None:
|
|
36
37
|
log = get_run_logger()
|
|
37
|
-
for workflow in
|
|
38
|
+
for workflow in get_workflows():
|
|
38
39
|
# For now the workpool is hardcoded but
|
|
39
40
|
# later we need to make it dynamic to have a different worker based on the type of the workflow
|
|
40
|
-
work_pool =
|
|
41
|
+
work_pool = WORKER_POOLS[0]
|
|
41
42
|
await workflow.save(client=client, work_pool=work_pool)
|
|
42
43
|
log.info(f"Flow {workflow.name}, created successfully ... ")
|
|
43
44
|
|
infrahub/workflows/models.py
CHANGED
infrahub_sdk/client.py
CHANGED
|
@@ -250,7 +250,7 @@ class BaseClient:
|
|
|
250
250
|
|
|
251
251
|
return Mutation(
|
|
252
252
|
name="AllocateIPAddress",
|
|
253
|
-
mutation="
|
|
253
|
+
mutation="InfrahubIPAddressPoolGetResource",
|
|
254
254
|
query={"ok": None, "node": {"id": None, "kind": None, "identifier": None, "display_label": None}},
|
|
255
255
|
input_data={"data": input_data},
|
|
256
256
|
)
|
|
@@ -281,7 +281,7 @@ class BaseClient:
|
|
|
281
281
|
|
|
282
282
|
return Mutation(
|
|
283
283
|
name="AllocateIPPrefix",
|
|
284
|
-
mutation="
|
|
284
|
+
mutation="InfrahubIPPrefixPoolGetResource",
|
|
285
285
|
query={"ok": None, "node": {"id": None, "kind": None, "identifier": None, "display_label": None}},
|
|
286
286
|
input_data={"data": input_data},
|
|
287
287
|
)
|
|
@@ -1300,7 +1300,7 @@ class InfrahubClient(BaseClient):
|
|
|
1300
1300
|
raise ValueError("resource_pool is not an IP address pool")
|
|
1301
1301
|
|
|
1302
1302
|
branch = branch or self.default_branch
|
|
1303
|
-
mutation_name = "
|
|
1303
|
+
mutation_name = "InfrahubIPAddressPoolGetResource"
|
|
1304
1304
|
|
|
1305
1305
|
query = self._build_ip_address_allocation_query(
|
|
1306
1306
|
resource_pool_id=resource_pool.id,
|
|
@@ -1452,7 +1452,7 @@ class InfrahubClient(BaseClient):
|
|
|
1452
1452
|
raise ValueError("resource_pool is not an IP prefix pool")
|
|
1453
1453
|
|
|
1454
1454
|
branch = branch or self.default_branch
|
|
1455
|
-
mutation_name = "
|
|
1455
|
+
mutation_name = "InfrahubIPPrefixPoolGetResource"
|
|
1456
1456
|
|
|
1457
1457
|
query = self._build_ip_prefix_allocation_query(
|
|
1458
1458
|
resource_pool_id=resource_pool.id,
|
|
@@ -2438,7 +2438,7 @@ class InfrahubClientSync(BaseClient):
|
|
|
2438
2438
|
raise ValueError("resource_pool is not an IP address pool")
|
|
2439
2439
|
|
|
2440
2440
|
branch = branch or self.default_branch
|
|
2441
|
-
mutation_name = "
|
|
2441
|
+
mutation_name = "InfrahubIPAddressPoolGetResource"
|
|
2442
2442
|
|
|
2443
2443
|
query = self._build_ip_address_allocation_query(
|
|
2444
2444
|
resource_pool_id=resource_pool.id,
|
|
@@ -2586,7 +2586,7 @@ class InfrahubClientSync(BaseClient):
|
|
|
2586
2586
|
raise ValueError("resource_pool is not an IP prefix pool")
|
|
2587
2587
|
|
|
2588
2588
|
branch = branch or self.default_branch
|
|
2589
|
-
mutation_name = "
|
|
2589
|
+
mutation_name = "InfrahubIPPrefixPoolGetResource"
|
|
2590
2590
|
|
|
2591
2591
|
query = self._build_ip_prefix_allocation_query(
|
|
2592
2592
|
resource_pool_id=resource_pool.id,
|
infrahub_sdk/ctl/repository.py
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import asyncio
|
|
3
4
|
from pathlib import Path
|
|
4
5
|
from typing import Optional
|
|
5
6
|
|
|
6
7
|
import typer
|
|
7
8
|
import yaml
|
|
9
|
+
from copier import run_copy
|
|
8
10
|
from pydantic import ValidationError
|
|
9
11
|
from rich.console import Console
|
|
10
12
|
from rich.table import Table
|
|
@@ -165,3 +167,52 @@ async def list(
|
|
|
165
167
|
)
|
|
166
168
|
|
|
167
169
|
console.print(table)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
@app.command()
|
|
173
|
+
async def init(
|
|
174
|
+
directory: Path = typer.Argument(help="Directory path for the new project."),
|
|
175
|
+
template: str = typer.Option(
|
|
176
|
+
default="https://github.com/opsmill/infrahub-template.git",
|
|
177
|
+
help="Template to use for the new repository. Can be a local path or a git repository URL.",
|
|
178
|
+
),
|
|
179
|
+
data: Optional[Path] = typer.Option(default=None, help="Path to YAML file containing answers to CLI prompt."),
|
|
180
|
+
vcs_ref: Optional[str] = typer.Option(
|
|
181
|
+
default="HEAD",
|
|
182
|
+
help="VCS reference to use for the template. Defaults to HEAD.",
|
|
183
|
+
),
|
|
184
|
+
trust: Optional[bool] = typer.Option(
|
|
185
|
+
default=False,
|
|
186
|
+
help="Trust the template repository. If set, the template will be cloned without verification.",
|
|
187
|
+
),
|
|
188
|
+
_: str = CONFIG_PARAM,
|
|
189
|
+
) -> None:
|
|
190
|
+
"""Initialize a new Infrahub repository."""
|
|
191
|
+
|
|
192
|
+
config_data = None
|
|
193
|
+
if data:
|
|
194
|
+
try:
|
|
195
|
+
with Path.open(data, encoding="utf-8") as file:
|
|
196
|
+
config_data = yaml.safe_load(file)
|
|
197
|
+
typer.echo(f"Loaded config: {config_data}")
|
|
198
|
+
except Exception as exc:
|
|
199
|
+
typer.echo(f"Error loading YAML file: {exc}", err=True)
|
|
200
|
+
raise typer.Exit(code=1)
|
|
201
|
+
|
|
202
|
+
# Allow template to be a local path or a URL
|
|
203
|
+
template_source = template or ""
|
|
204
|
+
if template and Path(template).exists():
|
|
205
|
+
template_source = str(Path(template).resolve())
|
|
206
|
+
|
|
207
|
+
try:
|
|
208
|
+
await asyncio.to_thread(
|
|
209
|
+
run_copy,
|
|
210
|
+
template_source,
|
|
211
|
+
str(directory),
|
|
212
|
+
data=config_data,
|
|
213
|
+
vcs_ref=vcs_ref,
|
|
214
|
+
unsafe=trust,
|
|
215
|
+
)
|
|
216
|
+
except Exception as e:
|
|
217
|
+
typer.echo(f"Error running copier: {e}", err=True)
|
|
218
|
+
raise typer.Exit(code=1)
|
infrahub_sdk/ctl/schema.py
CHANGED
|
@@ -36,7 +36,7 @@ def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[Schem
|
|
|
36
36
|
has_error: bool = False
|
|
37
37
|
for schema_file in schemas:
|
|
38
38
|
try:
|
|
39
|
-
client.schema.validate(data=schema_file.
|
|
39
|
+
client.schema.validate(data=schema_file.payload)
|
|
40
40
|
except ValidationError as exc:
|
|
41
41
|
console.print(f"[red]Schema not valid, found '{len(exc.errors())}' error(s) in {schema_file.location}")
|
|
42
42
|
has_error = True
|
|
@@ -48,7 +48,7 @@ def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[Schem
|
|
|
48
48
|
raise typer.Exit(1)
|
|
49
49
|
|
|
50
50
|
|
|
51
|
-
def display_schema_load_errors(response: dict[str, Any], schemas_data: list[
|
|
51
|
+
def display_schema_load_errors(response: dict[str, Any], schemas_data: list[SchemaFile]) -> None:
|
|
52
52
|
console.print("[red]Unable to load the schema:")
|
|
53
53
|
if "detail" not in response:
|
|
54
54
|
handle_non_detail_errors(response=response)
|
|
@@ -87,7 +87,7 @@ def handle_non_detail_errors(response: dict[str, Any]) -> None:
|
|
|
87
87
|
if "error" in response:
|
|
88
88
|
console.print(f" {response.get('error')}")
|
|
89
89
|
elif "errors" in response:
|
|
90
|
-
for error in response
|
|
90
|
+
for error in response["errors"]:
|
|
91
91
|
console.print(f" {error.get('message')}")
|
|
92
92
|
else:
|
|
93
93
|
console.print(f" '{response}'")
|
|
@@ -97,9 +97,9 @@ def valid_error_path(loc_path: list[Any]) -> bool:
|
|
|
97
97
|
return len(loc_path) >= 6 and loc_path[0] == "body" and loc_path[1] == "schemas"
|
|
98
98
|
|
|
99
99
|
|
|
100
|
-
def get_node(schemas_data: list[
|
|
101
|
-
if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].
|
|
102
|
-
return schemas_data[schema_index].
|
|
100
|
+
def get_node(schemas_data: list[SchemaFile], schema_index: int, node_index: int) -> dict | None:
|
|
101
|
+
if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].payload["nodes"]):
|
|
102
|
+
return schemas_data[schema_index].payload["nodes"][node_index]
|
|
103
103
|
return None
|
|
104
104
|
|
|
105
105
|
|
|
@@ -122,7 +122,7 @@ async def load(
|
|
|
122
122
|
validate_schema_content_and_exit(client=client, schemas=schemas_data)
|
|
123
123
|
|
|
124
124
|
start_time = time.time()
|
|
125
|
-
response = await client.schema.load(schemas=[item.
|
|
125
|
+
response = await client.schema.load(schemas=[item.payload for item in schemas_data], branch=branch)
|
|
126
126
|
loading_time = time.time() - start_time
|
|
127
127
|
|
|
128
128
|
if response.errors:
|
|
@@ -170,10 +170,10 @@ async def check(
|
|
|
170
170
|
client = initialize_client()
|
|
171
171
|
validate_schema_content_and_exit(client=client, schemas=schemas_data)
|
|
172
172
|
|
|
173
|
-
success, response = await client.schema.check(schemas=[item.
|
|
173
|
+
success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch)
|
|
174
174
|
|
|
175
175
|
if not success:
|
|
176
|
-
display_schema_load_errors(response=response, schemas_data=schemas_data)
|
|
176
|
+
display_schema_load_errors(response=response or {}, schemas_data=schemas_data)
|
|
177
177
|
else:
|
|
178
178
|
for schema_file in schemas_data:
|
|
179
179
|
console.print(f"[green] schema '{schema_file.location}' is Valid!")
|
infrahub_sdk/protocols.py
CHANGED
|
@@ -233,6 +233,10 @@ class CoreWebhook(CoreNode):
|
|
|
233
233
|
validate_certificates: BooleanOptional
|
|
234
234
|
|
|
235
235
|
|
|
236
|
+
class CoreWeightedPoolResource(CoreNode):
|
|
237
|
+
allocation_weight: IntegerOptional
|
|
238
|
+
|
|
239
|
+
|
|
236
240
|
class LineageOwner(CoreNode):
|
|
237
241
|
pass
|
|
238
242
|
|
|
@@ -321,6 +325,7 @@ class CoreCheckDefinition(CoreTaskTarget):
|
|
|
321
325
|
|
|
322
326
|
|
|
323
327
|
class CoreCustomWebhook(CoreWebhook, CoreTaskTarget):
|
|
328
|
+
shared_key: StringOptional
|
|
324
329
|
transformation: RelatedNode
|
|
325
330
|
|
|
326
331
|
|
|
@@ -405,12 +410,12 @@ class CoreGraphQLQueryGroup(CoreGroup):
|
|
|
405
410
|
|
|
406
411
|
|
|
407
412
|
class CoreGroupAction(CoreAction):
|
|
408
|
-
|
|
413
|
+
member_action: Dropdown
|
|
409
414
|
group: RelatedNode
|
|
410
415
|
|
|
411
416
|
|
|
412
417
|
class CoreGroupTriggerRule(CoreTriggerRule):
|
|
413
|
-
|
|
418
|
+
member_update: Dropdown
|
|
414
419
|
group: RelatedNode
|
|
415
420
|
|
|
416
421
|
|
|
@@ -442,7 +447,7 @@ class CoreNodeTriggerAttributeMatch(CoreNodeTriggerMatch):
|
|
|
442
447
|
|
|
443
448
|
class CoreNodeTriggerRelationshipMatch(CoreNodeTriggerMatch):
|
|
444
449
|
relationship_name: String
|
|
445
|
-
|
|
450
|
+
modification_type: Dropdown
|
|
446
451
|
peer: StringOptional
|
|
447
452
|
|
|
448
453
|
|
|
@@ -457,6 +462,7 @@ class CoreNumberPool(CoreResourcePool, LineageSource):
|
|
|
457
462
|
node_attribute: String
|
|
458
463
|
start_range: Integer
|
|
459
464
|
end_range: Integer
|
|
465
|
+
pool_type: Enum
|
|
460
466
|
|
|
461
467
|
|
|
462
468
|
class CoreObjectPermission(CoreBasePermission):
|
|
@@ -481,7 +487,10 @@ class CoreProposedChange(CoreTaskTarget):
|
|
|
481
487
|
source_branch: String
|
|
482
488
|
destination_branch: String
|
|
483
489
|
state: Enum
|
|
490
|
+
is_draft: Boolean
|
|
491
|
+
total_comments: IntegerOptional
|
|
484
492
|
approved_by: RelationshipManager
|
|
493
|
+
rejected_by: RelationshipManager
|
|
485
494
|
reviewers: RelationshipManager
|
|
486
495
|
created_by: RelatedNode
|
|
487
496
|
comments: RelationshipManager
|
|
@@ -555,6 +564,14 @@ class InternalAccountToken(CoreNode):
|
|
|
555
564
|
account: RelatedNode
|
|
556
565
|
|
|
557
566
|
|
|
567
|
+
class InternalIPPrefixAvailable(BuiltinIPPrefix):
|
|
568
|
+
pass
|
|
569
|
+
|
|
570
|
+
|
|
571
|
+
class InternalIPRangeAvailable(BuiltinIPAddress):
|
|
572
|
+
last_address: IPHost
|
|
573
|
+
|
|
574
|
+
|
|
558
575
|
class InternalRefreshToken(CoreNode):
|
|
559
576
|
expiration: DateTime
|
|
560
577
|
account: RelatedNode
|
|
@@ -766,6 +783,10 @@ class CoreWebhookSync(CoreNodeSync):
|
|
|
766
783
|
validate_certificates: BooleanOptional
|
|
767
784
|
|
|
768
785
|
|
|
786
|
+
class CoreWeightedPoolResourceSync(CoreNodeSync):
|
|
787
|
+
allocation_weight: IntegerOptional
|
|
788
|
+
|
|
789
|
+
|
|
769
790
|
class LineageOwnerSync(CoreNodeSync):
|
|
770
791
|
pass
|
|
771
792
|
|
|
@@ -854,6 +875,7 @@ class CoreCheckDefinitionSync(CoreTaskTargetSync):
|
|
|
854
875
|
|
|
855
876
|
|
|
856
877
|
class CoreCustomWebhookSync(CoreWebhookSync, CoreTaskTargetSync):
|
|
878
|
+
shared_key: StringOptional
|
|
857
879
|
transformation: RelatedNodeSync
|
|
858
880
|
|
|
859
881
|
|
|
@@ -938,12 +960,12 @@ class CoreGraphQLQueryGroupSync(CoreGroupSync):
|
|
|
938
960
|
|
|
939
961
|
|
|
940
962
|
class CoreGroupActionSync(CoreActionSync):
|
|
941
|
-
|
|
963
|
+
member_action: Dropdown
|
|
942
964
|
group: RelatedNodeSync
|
|
943
965
|
|
|
944
966
|
|
|
945
967
|
class CoreGroupTriggerRuleSync(CoreTriggerRuleSync):
|
|
946
|
-
|
|
968
|
+
member_update: Dropdown
|
|
947
969
|
group: RelatedNodeSync
|
|
948
970
|
|
|
949
971
|
|
|
@@ -975,7 +997,7 @@ class CoreNodeTriggerAttributeMatchSync(CoreNodeTriggerMatchSync):
|
|
|
975
997
|
|
|
976
998
|
class CoreNodeTriggerRelationshipMatchSync(CoreNodeTriggerMatchSync):
|
|
977
999
|
relationship_name: String
|
|
978
|
-
|
|
1000
|
+
modification_type: Dropdown
|
|
979
1001
|
peer: StringOptional
|
|
980
1002
|
|
|
981
1003
|
|
|
@@ -990,6 +1012,7 @@ class CoreNumberPoolSync(CoreResourcePoolSync, LineageSourceSync):
|
|
|
990
1012
|
node_attribute: String
|
|
991
1013
|
start_range: Integer
|
|
992
1014
|
end_range: Integer
|
|
1015
|
+
pool_type: Enum
|
|
993
1016
|
|
|
994
1017
|
|
|
995
1018
|
class CoreObjectPermissionSync(CoreBasePermissionSync):
|
|
@@ -1014,7 +1037,10 @@ class CoreProposedChangeSync(CoreTaskTargetSync):
|
|
|
1014
1037
|
source_branch: String
|
|
1015
1038
|
destination_branch: String
|
|
1016
1039
|
state: Enum
|
|
1040
|
+
is_draft: Boolean
|
|
1041
|
+
total_comments: IntegerOptional
|
|
1017
1042
|
approved_by: RelationshipManagerSync
|
|
1043
|
+
rejected_by: RelationshipManagerSync
|
|
1018
1044
|
reviewers: RelationshipManagerSync
|
|
1019
1045
|
created_by: RelatedNodeSync
|
|
1020
1046
|
comments: RelationshipManagerSync
|
|
@@ -1088,6 +1114,14 @@ class InternalAccountTokenSync(CoreNodeSync):
|
|
|
1088
1114
|
account: RelatedNodeSync
|
|
1089
1115
|
|
|
1090
1116
|
|
|
1117
|
+
class InternalIPPrefixAvailableSync(BuiltinIPPrefixSync):
|
|
1118
|
+
pass
|
|
1119
|
+
|
|
1120
|
+
|
|
1121
|
+
class InternalIPRangeAvailableSync(BuiltinIPAddressSync):
|
|
1122
|
+
last_address: IPHost
|
|
1123
|
+
|
|
1124
|
+
|
|
1091
1125
|
class InternalRefreshTokenSync(CoreNodeSync):
|
|
1092
1126
|
expiration: DateTime
|
|
1093
1127
|
account: RelatedNodeSync
|