infrahub-server 1.3.7__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (174) hide show
  1. infrahub/api/internal.py +5 -0
  2. infrahub/artifacts/tasks.py +17 -22
  3. infrahub/branch/merge_mutation_checker.py +38 -0
  4. infrahub/cli/__init__.py +2 -2
  5. infrahub/cli/context.py +7 -3
  6. infrahub/cli/db.py +5 -16
  7. infrahub/cli/upgrade.py +10 -29
  8. infrahub/computed_attribute/tasks.py +36 -46
  9. infrahub/config.py +57 -6
  10. infrahub/constants/environment.py +1 -0
  11. infrahub/core/attribute.py +15 -7
  12. infrahub/core/branch/tasks.py +43 -41
  13. infrahub/core/constants/__init__.py +21 -6
  14. infrahub/core/constants/infrahubkind.py +2 -0
  15. infrahub/core/diff/coordinator.py +3 -1
  16. infrahub/core/diff/model/path.py +0 -39
  17. infrahub/core/diff/repository/repository.py +0 -8
  18. infrahub/core/diff/tasks.py +11 -8
  19. infrahub/core/graph/__init__.py +1 -1
  20. infrahub/core/graph/index.py +1 -2
  21. infrahub/core/graph/schema.py +50 -29
  22. infrahub/core/initialization.py +81 -47
  23. infrahub/core/ipam/tasks.py +4 -3
  24. infrahub/core/merge.py +8 -10
  25. infrahub/core/migrations/__init__.py +2 -0
  26. infrahub/core/migrations/graph/__init__.py +4 -0
  27. infrahub/core/migrations/graph/m036_drop_attr_value_index.py +45 -0
  28. infrahub/core/migrations/graph/m037_index_attr_vals.py +577 -0
  29. infrahub/core/migrations/query/attribute_add.py +27 -2
  30. infrahub/core/migrations/schema/attribute_kind_update.py +156 -0
  31. infrahub/core/migrations/schema/tasks.py +6 -5
  32. infrahub/core/models.py +5 -1
  33. infrahub/core/node/proposed_change.py +43 -0
  34. infrahub/core/protocols.py +12 -0
  35. infrahub/core/query/attribute.py +32 -14
  36. infrahub/core/query/diff.py +11 -0
  37. infrahub/core/query/ipam.py +13 -7
  38. infrahub/core/query/node.py +51 -10
  39. infrahub/core/query/resource_manager.py +3 -3
  40. infrahub/core/schema/basenode_schema.py +8 -0
  41. infrahub/core/schema/definitions/core/__init__.py +10 -1
  42. infrahub/core/schema/definitions/core/ipam.py +28 -2
  43. infrahub/core/schema/definitions/core/propose_change.py +15 -0
  44. infrahub/core/schema/definitions/core/webhook.py +3 -0
  45. infrahub/core/schema/definitions/internal.py +1 -1
  46. infrahub/core/schema/generated/attribute_schema.py +1 -1
  47. infrahub/core/schema/generic_schema.py +10 -0
  48. infrahub/core/schema/manager.py +10 -1
  49. infrahub/core/schema/node_schema.py +22 -22
  50. infrahub/core/schema/profile_schema.py +8 -0
  51. infrahub/core/schema/schema_branch.py +11 -7
  52. infrahub/core/schema/template_schema.py +8 -0
  53. infrahub/core/validators/attribute/kind.py +5 -1
  54. infrahub/core/validators/checks_runner.py +5 -5
  55. infrahub/core/validators/determiner.py +22 -2
  56. infrahub/core/validators/tasks.py +6 -7
  57. infrahub/core/validators/uniqueness/checker.py +4 -2
  58. infrahub/core/validators/uniqueness/model.py +1 -0
  59. infrahub/core/validators/uniqueness/query.py +57 -7
  60. infrahub/database/__init__.py +2 -1
  61. infrahub/events/__init__.py +20 -0
  62. infrahub/events/constants.py +7 -0
  63. infrahub/events/generator.py +29 -2
  64. infrahub/events/proposed_change_action.py +203 -0
  65. infrahub/generators/tasks.py +24 -20
  66. infrahub/git/base.py +4 -7
  67. infrahub/git/integrator.py +21 -12
  68. infrahub/git/repository.py +15 -30
  69. infrahub/git/tasks.py +121 -106
  70. infrahub/graphql/app.py +2 -1
  71. infrahub/graphql/field_extractor.py +69 -0
  72. infrahub/graphql/manager.py +15 -11
  73. infrahub/graphql/mutations/account.py +2 -2
  74. infrahub/graphql/mutations/action.py +8 -2
  75. infrahub/graphql/mutations/artifact_definition.py +4 -1
  76. infrahub/graphql/mutations/branch.py +10 -5
  77. infrahub/graphql/mutations/graphql_query.py +2 -1
  78. infrahub/graphql/mutations/main.py +14 -8
  79. infrahub/graphql/mutations/menu.py +2 -1
  80. infrahub/graphql/mutations/proposed_change.py +230 -8
  81. infrahub/graphql/mutations/relationship.py +5 -0
  82. infrahub/graphql/mutations/repository.py +2 -1
  83. infrahub/graphql/mutations/tasks.py +7 -9
  84. infrahub/graphql/mutations/webhook.py +4 -1
  85. infrahub/graphql/parser.py +15 -6
  86. infrahub/graphql/queries/__init__.py +10 -1
  87. infrahub/graphql/queries/account.py +3 -3
  88. infrahub/graphql/queries/branch.py +2 -2
  89. infrahub/graphql/queries/diff/tree.py +56 -5
  90. infrahub/graphql/queries/event.py +13 -3
  91. infrahub/graphql/queries/ipam.py +23 -1
  92. infrahub/graphql/queries/proposed_change.py +84 -0
  93. infrahub/graphql/queries/relationship.py +2 -2
  94. infrahub/graphql/queries/resource_manager.py +3 -3
  95. infrahub/graphql/queries/search.py +3 -2
  96. infrahub/graphql/queries/status.py +3 -2
  97. infrahub/graphql/queries/task.py +2 -2
  98. infrahub/graphql/resolvers/ipam.py +440 -0
  99. infrahub/graphql/resolvers/many_relationship.py +4 -3
  100. infrahub/graphql/resolvers/resolver.py +5 -5
  101. infrahub/graphql/resolvers/single_relationship.py +3 -2
  102. infrahub/graphql/schema.py +25 -5
  103. infrahub/graphql/types/__init__.py +2 -2
  104. infrahub/graphql/types/attribute.py +3 -3
  105. infrahub/graphql/types/event.py +68 -0
  106. infrahub/groups/tasks.py +6 -6
  107. infrahub/lock.py +3 -2
  108. infrahub/menu/generator.py +8 -0
  109. infrahub/message_bus/operations/__init__.py +9 -12
  110. infrahub/message_bus/operations/git/file.py +6 -5
  111. infrahub/message_bus/operations/git/repository.py +12 -20
  112. infrahub/message_bus/operations/refresh/registry.py +15 -9
  113. infrahub/message_bus/operations/send/echo.py +7 -4
  114. infrahub/message_bus/types.py +1 -0
  115. infrahub/permissions/__init__.py +2 -1
  116. infrahub/permissions/constants.py +13 -0
  117. infrahub/permissions/globals.py +31 -2
  118. infrahub/permissions/manager.py +8 -5
  119. infrahub/pools/prefix.py +7 -5
  120. infrahub/prefect_server/app.py +31 -0
  121. infrahub/prefect_server/bootstrap.py +18 -0
  122. infrahub/proposed_change/action_checker.py +206 -0
  123. infrahub/proposed_change/approval_revoker.py +40 -0
  124. infrahub/proposed_change/branch_diff.py +3 -1
  125. infrahub/proposed_change/checker.py +45 -0
  126. infrahub/proposed_change/constants.py +32 -2
  127. infrahub/proposed_change/tasks.py +182 -150
  128. infrahub/py.typed +0 -0
  129. infrahub/server.py +29 -17
  130. infrahub/services/__init__.py +13 -28
  131. infrahub/services/adapters/cache/__init__.py +4 -0
  132. infrahub/services/adapters/cache/nats.py +2 -0
  133. infrahub/services/adapters/cache/redis.py +3 -0
  134. infrahub/services/adapters/message_bus/__init__.py +0 -2
  135. infrahub/services/adapters/message_bus/local.py +1 -2
  136. infrahub/services/adapters/message_bus/nats.py +6 -8
  137. infrahub/services/adapters/message_bus/rabbitmq.py +7 -9
  138. infrahub/services/adapters/workflow/__init__.py +1 -0
  139. infrahub/services/adapters/workflow/local.py +1 -8
  140. infrahub/services/component.py +2 -1
  141. infrahub/task_manager/event.py +56 -0
  142. infrahub/task_manager/models.py +9 -0
  143. infrahub/tasks/artifact.py +6 -7
  144. infrahub/tasks/check.py +4 -7
  145. infrahub/telemetry/tasks.py +15 -18
  146. infrahub/transformations/tasks.py +10 -6
  147. infrahub/trigger/tasks.py +4 -3
  148. infrahub/types.py +4 -0
  149. infrahub/validators/events.py +7 -7
  150. infrahub/validators/tasks.py +6 -7
  151. infrahub/webhook/models.py +45 -45
  152. infrahub/webhook/tasks.py +25 -24
  153. infrahub/workers/dependencies.py +143 -0
  154. infrahub/workers/infrahub_async.py +19 -43
  155. infrahub/workflows/catalogue.py +16 -2
  156. infrahub/workflows/initialization.py +5 -4
  157. infrahub/workflows/models.py +2 -0
  158. infrahub_sdk/client.py +2 -2
  159. infrahub_sdk/ctl/repository.py +51 -0
  160. infrahub_sdk/ctl/schema.py +9 -9
  161. infrahub_sdk/node/node.py +2 -2
  162. infrahub_sdk/pytest_plugin/items/graphql_query.py +1 -1
  163. infrahub_sdk/schema/repository.py +1 -1
  164. infrahub_sdk/testing/docker.py +1 -1
  165. infrahub_sdk/utils.py +2 -2
  166. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/METADATA +7 -5
  167. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/RECORD +174 -158
  168. infrahub_testcontainers/container.py +17 -0
  169. infrahub_testcontainers/docker-compose-cluster.test.yml +56 -1
  170. infrahub_testcontainers/docker-compose.test.yml +56 -1
  171. infrahub_testcontainers/helpers.py +4 -1
  172. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/LICENSE.txt +0 -0
  173. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/WHEEL +0 -0
  174. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,143 @@
1
+ from typing import Any
2
+
3
+ from fast_depends import Depends, inject
4
+ from infrahub_sdk.client import InfrahubClient
5
+ from infrahub_sdk.config import Config
6
+
7
+ from infrahub import config
8
+ from infrahub.components import ComponentType
9
+ from infrahub.constants.environment import INSTALLATION_TYPE
10
+ from infrahub.database import InfrahubDatabase, get_db
11
+ from infrahub.services.adapters.cache import InfrahubCache
12
+ from infrahub.services.adapters.event import InfrahubEventService
13
+ from infrahub.services.adapters.http import InfrahubHTTP
14
+ from infrahub.services.adapters.http.httpx import HttpxAdapter
15
+ from infrahub.services.adapters.message_bus import InfrahubMessageBus
16
+ from infrahub.services.adapters.workflow import InfrahubWorkflow
17
+ from infrahub.services.adapters.workflow.local import WorkflowLocalExecution
18
+ from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
19
+ from infrahub.services.component import InfrahubComponent
20
+
21
+ _singletons: dict[str, Any] = {}
22
+
23
+
24
+ def set_component_type(component_type: ComponentType) -> None:
25
+ if "component_type" not in _singletons:
26
+ _singletons["component_type"] = component_type
27
+
28
+
29
+ def get_component_type() -> ComponentType:
30
+ try:
31
+ return _singletons["component_type"]
32
+ except KeyError as exc:
33
+ raise ValueError("Component type is not set. It needs to be initialized before working with services.") from exc
34
+
35
+
36
+ def build_client() -> InfrahubClient:
37
+ return InfrahubClient(config=Config(address=config.SETTINGS.main.internal_address, retry_on_failure=True))
38
+
39
+
40
+ @inject
41
+ def get_client(client: InfrahubClient = Depends(build_client)) -> InfrahubClient: # noqa: B008
42
+ return client
43
+
44
+
45
+ def build_installation_type() -> str:
46
+ return INSTALLATION_TYPE
47
+
48
+
49
+ @inject
50
+ def get_installation_type(installation_type: str = Depends(build_installation_type)) -> str:
51
+ return installation_type
52
+
53
+
54
+ async def build_database() -> InfrahubDatabase:
55
+ if "database" not in _singletons:
56
+ _singletons["database"] = InfrahubDatabase(driver=await get_db(retry=5))
57
+ return _singletons["database"]
58
+
59
+
60
+ @inject
61
+ async def get_database(database: InfrahubDatabase = Depends(build_database)) -> InfrahubDatabase: # noqa: B008
62
+ return database
63
+
64
+
65
+ async def build_cache() -> InfrahubCache:
66
+ if "cache" not in _singletons:
67
+ _singletons["cache"] = config.OVERRIDE.cache or await InfrahubCache.new_from_driver(
68
+ driver=config.SETTINGS.cache.driver
69
+ )
70
+ return _singletons["cache"]
71
+
72
+
73
+ @inject
74
+ async def get_cache(cache: InfrahubCache = Depends(build_cache)) -> InfrahubCache: # noqa: B008
75
+ return cache
76
+
77
+
78
+ async def build_message_bus() -> InfrahubMessageBus:
79
+ if "message_bus" not in _singletons:
80
+ _singletons["message_bus"] = config.OVERRIDE.message_bus or (
81
+ await InfrahubMessageBus.new_from_driver(
82
+ component_type=get_component_type(), driver=config.SETTINGS.broker.driver
83
+ )
84
+ )
85
+ return _singletons["message_bus"]
86
+
87
+
88
+ @inject
89
+ async def get_message_bus(message_bus: InfrahubMessageBus = Depends(build_message_bus)) -> InfrahubMessageBus: # noqa: B008
90
+ return message_bus
91
+
92
+
93
+ async def build_event_service() -> InfrahubEventService:
94
+ if "event_service" not in _singletons:
95
+ _singletons["event_service"] = InfrahubEventService(message_bus=await get_message_bus())
96
+ return _singletons["event_service"]
97
+
98
+
99
+ @inject
100
+ async def get_event_service(event_service: InfrahubEventService = Depends(build_event_service)) -> InfrahubEventService: # noqa: B008
101
+ return event_service
102
+
103
+
104
+ def build_workflow() -> InfrahubWorkflow:
105
+ if "workflow" not in _singletons:
106
+ _singletons["workflow"] = config.OVERRIDE.workflow or (
107
+ WorkflowWorkerExecution()
108
+ if config.SETTINGS.workflow.driver == config.WorkflowDriver.WORKER
109
+ else WorkflowLocalExecution()
110
+ )
111
+ return _singletons["workflow"]
112
+
113
+
114
+ @inject
115
+ def get_workflow(workflow: InfrahubWorkflow = Depends(build_workflow)) -> InfrahubWorkflow: # noqa: B008
116
+ return workflow
117
+
118
+
119
+ def build_http_service() -> InfrahubHTTP:
120
+ if "http_service" not in _singletons:
121
+ _singletons["http_service"] = HttpxAdapter()
122
+ return _singletons["http_service"]
123
+
124
+
125
+ @inject
126
+ def get_http(http_service: InfrahubHTTP = Depends(build_http_service)) -> InfrahubHTTP: # noqa: B008
127
+ return http_service
128
+
129
+
130
+ async def build_component() -> InfrahubComponent:
131
+ if "component" not in _singletons:
132
+ _singletons["component"] = await InfrahubComponent.new(
133
+ cache=await get_cache(),
134
+ component_type=get_component_type(),
135
+ db=await get_database(),
136
+ message_bus=await get_message_bus(),
137
+ )
138
+ return _singletons["component"]
139
+
140
+
141
+ @inject
142
+ async def get_component(component: InfrahubComponent = Depends(build_component)) -> InfrahubComponent: # noqa: B008
143
+ return component
@@ -18,17 +18,19 @@ from infrahub import config
18
18
  from infrahub.components import ComponentType
19
19
  from infrahub.core import registry
20
20
  from infrahub.core.initialization import initialization
21
- from infrahub.database import InfrahubDatabase, get_db
22
21
  from infrahub.dependencies.registry import build_component_registry
23
22
  from infrahub.git import initialize_repositories_directory
24
23
  from infrahub.lock import initialize_lock
25
24
  from infrahub.services import InfrahubServices
26
- from infrahub.services.adapters.cache import InfrahubCache
27
- from infrahub.services.adapters.message_bus import InfrahubMessageBus
28
- from infrahub.services.adapters.workflow import InfrahubWorkflow
29
- from infrahub.services.adapters.workflow.local import WorkflowLocalExecution
30
- from infrahub.services.adapters.workflow.worker import WorkflowWorkerExecution
31
25
  from infrahub.trace import configure_trace
26
+ from infrahub.workers.dependencies import (
27
+ get_cache,
28
+ get_component,
29
+ get_database,
30
+ get_message_bus,
31
+ get_workflow,
32
+ set_component_type,
33
+ )
32
34
  from infrahub.workers.utils import inject_service_parameter, load_flow_function
33
35
  from infrahub.workflows.models import TASK_RESULT_STORAGE_NAME
34
36
 
@@ -63,6 +65,7 @@ class InfrahubWorkerAsync(BaseWorker):
63
65
  _logo_url = "https://example.com/logo"
64
66
  _description = "Infrahub worker designed to run the flow in the main async loop."
65
67
  service: InfrahubServices # keep a reference to `service` so we can inject it within flows parameters.
68
+ component_type = ComponentType.GIT_AGENT
66
69
 
67
70
  async def setup(
68
71
  self,
@@ -115,6 +118,7 @@ class InfrahubWorkerAsync(BaseWorker):
115
118
  )
116
119
  )
117
120
 
121
+ set_component_type(component_type=self.component_type)
118
122
  await self._init_services(client=client)
119
123
 
120
124
  if not registry.schema_has_been_initialized():
@@ -141,22 +145,18 @@ class InfrahubWorkerAsync(BaseWorker):
141
145
  entrypoint: str = configuration._related_objects["deployment"].entrypoint
142
146
 
143
147
  file_path, flow_name = entrypoint.split(":")
144
- file_path.replace("/", ".")
145
- module_path = file_path.replace("backend/", "").replace(".py", "").replace("/", ".")
148
+ module_path = file_path.removeprefix("backend/").removesuffix(".py").replace("/", ".")
146
149
  flow_func = load_flow_function(module_path=module_path, flow_name=flow_name)
147
150
  inject_service_parameter(func=flow_func, parameters=flow_run.parameters, service=self.service)
148
151
  flow_run_logger.debug("Validating parameters")
149
152
  params = flow_func.validate_parameters(parameters=flow_run.parameters)
150
153
 
151
154
  if task_status:
152
- task_status.started()
155
+ task_status.started(True)
153
156
 
154
157
  await run_flow_async(flow=flow_func, flow_run=flow_run, parameters=params, return_type="state")
155
158
 
156
- return InfrahubWorkerAsyncResult(
157
- status_code=0,
158
- identifier=str(flow_run.id),
159
- )
159
+ return InfrahubWorkerAsyncResult(status_code=0, identifier=str(flow_run.id))
160
160
 
161
161
  def _init_logger(self) -> None:
162
162
  """Initialize loggers to use the API handle provided by Prefect."""
@@ -182,41 +182,17 @@ class InfrahubWorkerAsync(BaseWorker):
182
182
 
183
183
  return client
184
184
 
185
- async def _init_database(self) -> InfrahubDatabase:
186
- return InfrahubDatabase(driver=await get_db(retry=1))
187
-
188
- async def _init_workflow(self) -> InfrahubWorkflow:
189
- return config.OVERRIDE.workflow or (
190
- WorkflowWorkerExecution()
191
- if config.SETTINGS.workflow.driver == config.WorkflowDriver.WORKER
192
- else WorkflowLocalExecution()
193
- )
194
-
195
- async def _init_message_bus(self, component_type: ComponentType) -> InfrahubMessageBus:
196
- return config.OVERRIDE.message_bus or (
197
- await InfrahubMessageBus.new_from_driver(
198
- component_type=component_type, driver=config.SETTINGS.broker.driver
199
- )
200
- )
201
-
202
- async def _init_cache(self) -> InfrahubCache:
203
- return config.OVERRIDE.cache or (await InfrahubCache.new_from_driver(driver=config.SETTINGS.cache.driver))
204
-
205
185
  async def _init_services(self, client: InfrahubClient) -> None:
206
- component_type = ComponentType.GIT_AGENT
207
186
  client = await self._init_infrahub_client(client=client)
208
- database = await self._init_database()
209
- workflow = await self._init_workflow()
210
- message_bus = await self._init_message_bus(component_type=component_type)
211
- cache = await self._init_cache()
212
187
 
213
188
  service = await InfrahubServices.new(
214
- cache=cache,
189
+ cache=await get_cache(),
215
190
  client=client,
216
- database=database,
217
- message_bus=message_bus,
218
- workflow=workflow,
219
- component_type=component_type,
191
+ database=await get_database(),
192
+ message_bus=await get_message_bus(),
193
+ workflow=get_workflow(),
194
+ component=await get_component(),
195
+ component_type=self.component_type,
220
196
  )
221
197
 
222
198
  self.service = service
@@ -1,5 +1,7 @@
1
1
  import random
2
2
 
3
+ from fast_depends import Depends, inject
4
+
3
5
  from .constants import WorkflowTag, WorkflowType
4
6
  from .models import WorkerPoolDefinition, WorkflowDefinition
5
7
 
@@ -529,9 +531,9 @@ VALIDATE_SCHEMA_NUMBER_POOLS = WorkflowDefinition(
529
531
  )
530
532
 
531
533
 
532
- worker_pools = [INFRAHUB_WORKER_POOL]
534
+ WORKER_POOLS = [INFRAHUB_WORKER_POOL]
533
535
 
534
- workflows = [
536
+ WORKFLOWS = [
535
537
  ACTION_ADD_NODE_TO_GROUP,
536
538
  ACTION_RUN_GENERATOR,
537
539
  ACTION_RUN_GENERATOR_GROUP_EVENT,
@@ -603,3 +605,15 @@ workflows = [
603
605
  WEBHOOK_DELETE_AUTOMATION,
604
606
  WEBHOOK_PROCESS,
605
607
  ]
608
+
609
+
610
+ # Use this dependency injection mechanism to easily add new workflows within infrahub-enterprise
611
+ def build_workflows_definitions() -> list[WorkflowDefinition]:
612
+ return WORKFLOWS
613
+
614
+
615
+ @inject
616
+ def get_workflows(
617
+ workflows: list[WorkflowDefinition] = Depends(build_workflows_definitions), # noqa: B008
618
+ ) -> list[WorkflowDefinition]:
619
+ return workflows
@@ -11,19 +11,20 @@ from infrahub.trigger.catalogue import builtin_triggers
11
11
  from infrahub.trigger.models import TriggerType
12
12
  from infrahub.trigger.setup import setup_triggers
13
13
 
14
- from .catalogue import worker_pools, workflows
14
+ from .catalogue import WORKER_POOLS, get_workflows
15
15
  from .models import TASK_RESULT_STORAGE_NAME
16
16
 
17
17
 
18
18
  @task(name="task-manager-setup-worker-pools", task_run_name="Setup Worker pools", cache_policy=NONE) # type: ignore[arg-type]
19
19
  async def setup_worker_pools(client: PrefectClient) -> None:
20
20
  log = get_run_logger()
21
- for worker in worker_pools:
21
+ for worker in WORKER_POOLS:
22
22
  wp = WorkPoolCreate(
23
23
  name=worker.name,
24
24
  type=worker.worker_type or config.SETTINGS.workflow.default_worker_type,
25
25
  description=worker.description,
26
26
  )
27
+
27
28
  try:
28
29
  await client.create_work_pool(work_pool=wp, overwrite=True)
29
30
  log.info(f"Work pool {worker.name} created successfully ... ")
@@ -34,10 +35,10 @@ async def setup_worker_pools(client: PrefectClient) -> None:
34
35
  @task(name="task-manager-setup-deployments", task_run_name="Setup Deployments", cache_policy=NONE) # type: ignore[arg-type]
35
36
  async def setup_deployments(client: PrefectClient) -> None:
36
37
  log = get_run_logger()
37
- for workflow in workflows:
38
+ for workflow in get_workflows():
38
39
  # For now the workpool is hardcoded but
39
40
  # later we need to make it dynamic to have a different worker based on the type of the workflow
40
- work_pool = worker_pools[0]
41
+ work_pool = WORKER_POOLS[0]
41
42
  await workflow.save(client=client, work_pool=work_pool)
42
43
  log.info(f"Flow {workflow.name}, created successfully ... ")
43
44
 
@@ -51,6 +51,8 @@ class WorkflowDefinition(BaseModel):
51
51
 
52
52
  @property
53
53
  def entrypoint(self) -> str:
54
+ if self.type == WorkflowType.USER:
55
+ return f"{self.module}:{self.function}"
54
56
  return f"backend/{self.module.replace('.', '/')}:{self.function}"
55
57
 
56
58
  @property
infrahub_sdk/client.py CHANGED
@@ -790,7 +790,7 @@ class InfrahubClient(BaseClient):
790
790
  async def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelationsNode]:
791
791
  """Process a single page of results."""
792
792
  query_data = await InfrahubNode(client=self, schema=schema, branch=branch).generate_query_data(
793
- offset=offset or page_offset,
793
+ offset=page_offset if offset is None else offset,
794
794
  limit=limit or pagination_size,
795
795
  filters=filters,
796
796
  include=include,
@@ -1954,7 +1954,7 @@ class InfrahubClientSync(BaseClient):
1954
1954
  def process_page(page_offset: int, page_number: int) -> tuple[dict, ProcessRelationsNodeSync]:
1955
1955
  """Process a single page of results."""
1956
1956
  query_data = InfrahubNodeSync(client=self, schema=schema, branch=branch).generate_query_data(
1957
- offset=offset or page_offset,
1957
+ offset=page_offset if offset is None else offset,
1958
1958
  limit=limit or pagination_size,
1959
1959
  filters=filters,
1960
1960
  include=include,
@@ -1,10 +1,12 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import asyncio
3
4
  from pathlib import Path
4
5
  from typing import Optional
5
6
 
6
7
  import typer
7
8
  import yaml
9
+ from copier import run_copy
8
10
  from pydantic import ValidationError
9
11
  from rich.console import Console
10
12
  from rich.table import Table
@@ -165,3 +167,52 @@ async def list(
165
167
  )
166
168
 
167
169
  console.print(table)
170
+
171
+
172
+ @app.command()
173
+ async def init(
174
+ directory: Path = typer.Argument(help="Directory path for the new project."),
175
+ template: str = typer.Option(
176
+ default="https://github.com/opsmill/infrahub-template.git",
177
+ help="Template to use for the new repository. Can be a local path or a git repository URL.",
178
+ ),
179
+ data: Optional[Path] = typer.Option(default=None, help="Path to YAML file containing answers to CLI prompt."),
180
+ vcs_ref: Optional[str] = typer.Option(
181
+ default="HEAD",
182
+ help="VCS reference to use for the template. Defaults to HEAD.",
183
+ ),
184
+ trust: Optional[bool] = typer.Option(
185
+ default=False,
186
+ help="Trust the template repository. If set, the template will be cloned without verification.",
187
+ ),
188
+ _: str = CONFIG_PARAM,
189
+ ) -> None:
190
+ """Initialize a new Infrahub repository."""
191
+
192
+ config_data = None
193
+ if data:
194
+ try:
195
+ with Path.open(data, encoding="utf-8") as file:
196
+ config_data = yaml.safe_load(file)
197
+ typer.echo(f"Loaded config: {config_data}")
198
+ except Exception as exc:
199
+ typer.echo(f"Error loading YAML file: {exc}", err=True)
200
+ raise typer.Exit(code=1)
201
+
202
+ # Allow template to be a local path or a URL
203
+ template_source = template or ""
204
+ if template and Path(template).exists():
205
+ template_source = str(Path(template).resolve())
206
+
207
+ try:
208
+ await asyncio.to_thread(
209
+ run_copy,
210
+ template_source,
211
+ str(directory),
212
+ data=config_data,
213
+ vcs_ref=vcs_ref,
214
+ unsafe=trust,
215
+ )
216
+ except Exception as e:
217
+ typer.echo(f"Error running copier: {e}", err=True)
218
+ raise typer.Exit(code=1)
@@ -36,7 +36,7 @@ def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[Schem
36
36
  has_error: bool = False
37
37
  for schema_file in schemas:
38
38
  try:
39
- client.schema.validate(data=schema_file.content)
39
+ client.schema.validate(data=schema_file.payload)
40
40
  except ValidationError as exc:
41
41
  console.print(f"[red]Schema not valid, found '{len(exc.errors())}' error(s) in {schema_file.location}")
42
42
  has_error = True
@@ -48,7 +48,7 @@ def validate_schema_content_and_exit(client: InfrahubClient, schemas: list[Schem
48
48
  raise typer.Exit(1)
49
49
 
50
50
 
51
- def display_schema_load_errors(response: dict[str, Any], schemas_data: list[dict]) -> None:
51
+ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[SchemaFile]) -> None:
52
52
  console.print("[red]Unable to load the schema:")
53
53
  if "detail" not in response:
54
54
  handle_non_detail_errors(response=response)
@@ -87,7 +87,7 @@ def handle_non_detail_errors(response: dict[str, Any]) -> None:
87
87
  if "error" in response:
88
88
  console.print(f" {response.get('error')}")
89
89
  elif "errors" in response:
90
- for error in response.get("errors"):
90
+ for error in response["errors"]:
91
91
  console.print(f" {error.get('message')}")
92
92
  else:
93
93
  console.print(f" '{response}'")
@@ -97,9 +97,9 @@ def valid_error_path(loc_path: list[Any]) -> bool:
97
97
  return len(loc_path) >= 6 and loc_path[0] == "body" and loc_path[1] == "schemas"
98
98
 
99
99
 
100
- def get_node(schemas_data: list[dict], schema_index: int, node_index: int) -> dict | None:
101
- if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].content["nodes"]):
102
- return schemas_data[schema_index].content["nodes"][node_index]
100
+ def get_node(schemas_data: list[SchemaFile], schema_index: int, node_index: int) -> dict | None:
101
+ if schema_index < len(schemas_data) and node_index < len(schemas_data[schema_index].payload["nodes"]):
102
+ return schemas_data[schema_index].payload["nodes"][node_index]
103
103
  return None
104
104
 
105
105
 
@@ -122,7 +122,7 @@ async def load(
122
122
  validate_schema_content_and_exit(client=client, schemas=schemas_data)
123
123
 
124
124
  start_time = time.time()
125
- response = await client.schema.load(schemas=[item.content for item in schemas_data], branch=branch)
125
+ response = await client.schema.load(schemas=[item.payload for item in schemas_data], branch=branch)
126
126
  loading_time = time.time() - start_time
127
127
 
128
128
  if response.errors:
@@ -170,10 +170,10 @@ async def check(
170
170
  client = initialize_client()
171
171
  validate_schema_content_and_exit(client=client, schemas=schemas_data)
172
172
 
173
- success, response = await client.schema.check(schemas=[item.content for item in schemas_data], branch=branch)
173
+ success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch)
174
174
 
175
175
  if not success:
176
- display_schema_load_errors(response=response, schemas_data=schemas_data)
176
+ display_schema_load_errors(response=response or {}, schemas_data=schemas_data)
177
177
  else:
178
178
  for schema_file in schemas_data:
179
179
  console.print(f"[green] schema '{schema_file.location}' is Valid!")
infrahub_sdk/node/node.py CHANGED
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections.abc import Iterable
4
- from copy import copy
4
+ from copy import copy, deepcopy
5
5
  from typing import TYPE_CHECKING, Any
6
6
 
7
7
  from ..constants import InfrahubClientMode
@@ -397,7 +397,7 @@ class InfrahubNodeBase:
397
397
  "edges": {"node": {"id": None, "hfid": None, "display_label": None, "__typename": None}},
398
398
  }
399
399
 
400
- data["@filters"] = filters or {}
400
+ data["@filters"] = deepcopy(filters) if filters is not None else {}
401
401
 
402
402
  if order:
403
403
  data["@filters"]["order"] = order
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
16
16
 
17
17
  class InfrahubGraphQLQueryItem(InfrahubItem):
18
18
  def validate_resource_config(self) -> None:
19
- # Resource name does not need to match against infrahub repo config
19
+ # Resource name does not need to match against Infrahub repository configuration
20
20
  return
21
21
 
22
22
  def execute_query(self) -> Any:
@@ -24,7 +24,7 @@ ResourceClass = TypeVar("ResourceClass")
24
24
 
25
25
 
26
26
  class InfrahubRepositoryConfigElement(BaseModel):
27
- """Class to regroup all elements of the infrahub configuration for a repository for typing purpose."""
27
+ """Class to regroup all elements of the Infrahub configuration for a repository for typing purpose."""
28
28
 
29
29
 
30
30
  class InfrahubRepositoryArtifactDefinitionConfig(InfrahubRepositoryConfigElement):
@@ -13,7 +13,7 @@ INFRAHUB_VERSION = os.getenv("INFRAHUB_TESTING_IMAGE_VER")
13
13
 
14
14
  def skip_version(min_infrahub_version: str | None = None, max_infrahub_version: str | None = None) -> bool:
15
15
  """
16
- Check if a test should be skipped depending on infrahub version.
16
+ Check if a test should be skipped depending on Infrahub version.
17
17
  """
18
18
  if INFRAHUB_VERSION is None:
19
19
  return True
infrahub_sdk/utils.py CHANGED
@@ -95,7 +95,7 @@ def decode_json(response: httpx.Response) -> dict:
95
95
  try:
96
96
  return response.json()
97
97
  except json.decoder.JSONDecodeError as exc:
98
- raise JsonDecodeError(content=response.text, url=response.url) from exc
98
+ raise JsonDecodeError(content=response.text, url=str(response.url)) from exc
99
99
 
100
100
 
101
101
  def generate_uuid() -> str:
@@ -232,7 +232,7 @@ def get_branch(branch: str | None = None, directory: str | Path = ".") -> str:
232
232
  if branch:
233
233
  return branch
234
234
 
235
- repo = GitRepoManager(directory)
235
+ repo = GitRepoManager(root_directory=str(directory))
236
236
  return str(repo.active_branch)
237
237
 
238
238
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: infrahub-server
3
- Version: 1.3.7
3
+ Version: 1.4.0
4
4
  Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
5
5
  License: Apache-2.0
6
6
  Author: OpsMill
@@ -19,9 +19,11 @@ Requires-Dist: asgi-correlation-id (==4.2.0)
19
19
  Requires-Dist: authlib (==1.3.2)
20
20
  Requires-Dist: bcrypt (>=4.1,<4.2)
21
21
  Requires-Dist: boto3 (==1.34.129)
22
+ Requires-Dist: copier (>=9.8.0,<10.0.0)
22
23
  Requires-Dist: dulwich (>=0.22.7,<0.23.0)
23
24
  Requires-Dist: email-validator (>=2.1,<2.2)
24
- Requires-Dist: fastapi (>=0.115,<0.116)
25
+ Requires-Dist: fast-depends (>=2.4.12,<3.0.0)
26
+ Requires-Dist: fastapi (==0.116.1)
25
27
  Requires-Dist: fastapi-storages (>=0.3,<0.4)
26
28
  Requires-Dist: gitpython (>=3,<4)
27
29
  Requires-Dist: graphene (>=3.4,<3.5)
@@ -38,8 +40,8 @@ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (==1.28.1)
38
40
  Requires-Dist: opentelemetry-exporter-otlp-proto-http (==1.28.1)
39
41
  Requires-Dist: opentelemetry-instrumentation-aio-pika (==0.49b1)
40
42
  Requires-Dist: opentelemetry-instrumentation-fastapi (==0.49b1)
41
- Requires-Dist: prefect (==3.4.1)
42
- Requires-Dist: prefect-redis (==0.2.2)
43
+ Requires-Dist: prefect (==3.4.13)
44
+ Requires-Dist: prefect-redis (==0.2.4)
43
45
  Requires-Dist: pyarrow (>=14,<15)
44
46
  Requires-Dist: pydantic (>=2.10,<2.11)
45
47
  Requires-Dist: pydantic-settings (>=2.8,<2.9)
@@ -47,7 +49,7 @@ Requires-Dist: pyjwt (>=2.8,<2.9)
47
49
  Requires-Dist: pytest (>=7.4,<7.5)
48
50
  Requires-Dist: python-multipart (==0.0.18)
49
51
  Requires-Dist: pyyaml (>=6,<7)
50
- Requires-Dist: redis[hiredis] (>=5.0.0,<6.0.0)
52
+ Requires-Dist: redis[hiredis] (>=6.0.0,<7.0.0)
51
53
  Requires-Dist: rich (>=13,<14)
52
54
  Requires-Dist: starlette-exporter (>=0.23,<0.24)
53
55
  Requires-Dist: structlog (==24.1.0)