infrahub-server 1.4.12__py3-none-any.whl → 1.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (234) hide show
  1. infrahub/actions/tasks.py +208 -16
  2. infrahub/api/artifact.py +3 -0
  3. infrahub/api/diff/diff.py +1 -1
  4. infrahub/api/internal.py +2 -0
  5. infrahub/api/query.py +2 -0
  6. infrahub/api/schema.py +27 -3
  7. infrahub/auth.py +5 -5
  8. infrahub/cli/__init__.py +2 -0
  9. infrahub/cli/db.py +160 -157
  10. infrahub/cli/dev.py +118 -0
  11. infrahub/cli/tasks.py +46 -0
  12. infrahub/cli/upgrade.py +56 -9
  13. infrahub/computed_attribute/tasks.py +19 -7
  14. infrahub/config.py +7 -2
  15. infrahub/core/attribute.py +35 -24
  16. infrahub/core/branch/enums.py +1 -1
  17. infrahub/core/branch/models.py +9 -5
  18. infrahub/core/branch/needs_rebase_status.py +11 -0
  19. infrahub/core/branch/tasks.py +72 -10
  20. infrahub/core/changelog/models.py +2 -10
  21. infrahub/core/constants/__init__.py +4 -0
  22. infrahub/core/constants/infrahubkind.py +1 -0
  23. infrahub/core/convert_object_type/object_conversion.py +201 -0
  24. infrahub/core/convert_object_type/repository_conversion.py +89 -0
  25. infrahub/core/convert_object_type/schema_mapping.py +27 -3
  26. infrahub/core/diff/calculator.py +2 -2
  27. infrahub/core/diff/model/path.py +4 -0
  28. infrahub/core/diff/payload_builder.py +1 -1
  29. infrahub/core/diff/query/artifact.py +1 -0
  30. infrahub/core/diff/query/delete_query.py +9 -5
  31. infrahub/core/diff/query/field_summary.py +1 -0
  32. infrahub/core/diff/query/merge.py +39 -23
  33. infrahub/core/graph/__init__.py +1 -1
  34. infrahub/core/initialization.py +7 -4
  35. infrahub/core/manager.py +3 -81
  36. infrahub/core/migrations/__init__.py +3 -0
  37. infrahub/core/migrations/exceptions.py +4 -0
  38. infrahub/core/migrations/graph/__init__.py +13 -10
  39. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  40. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  41. infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
  42. infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
  43. infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
  44. infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
  45. infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
  46. infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
  47. infrahub/core/migrations/query/__init__.py +7 -8
  48. infrahub/core/migrations/query/attribute_add.py +8 -6
  49. infrahub/core/migrations/query/attribute_remove.py +134 -0
  50. infrahub/core/migrations/runner.py +54 -0
  51. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  52. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  53. infrahub/core/migrations/schema/node_attribute_add.py +26 -5
  54. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  55. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  56. infrahub/core/migrations/schema/node_remove.py +2 -1
  57. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  58. infrahub/core/migrations/shared.py +66 -19
  59. infrahub/core/models.py +2 -2
  60. infrahub/core/node/__init__.py +207 -54
  61. infrahub/core/node/create.py +53 -49
  62. infrahub/core/node/lock_utils.py +124 -0
  63. infrahub/core/node/node_property_attribute.py +230 -0
  64. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  65. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  66. infrahub/core/node/resource_manager/number_pool.py +2 -1
  67. infrahub/core/node/standard.py +1 -1
  68. infrahub/core/property.py +11 -0
  69. infrahub/core/protocols.py +8 -1
  70. infrahub/core/query/attribute.py +82 -15
  71. infrahub/core/query/diff.py +61 -16
  72. infrahub/core/query/ipam.py +16 -4
  73. infrahub/core/query/node.py +92 -212
  74. infrahub/core/query/relationship.py +44 -26
  75. infrahub/core/query/subquery.py +0 -8
  76. infrahub/core/relationship/model.py +69 -24
  77. infrahub/core/schema/__init__.py +56 -0
  78. infrahub/core/schema/attribute_schema.py +4 -2
  79. infrahub/core/schema/basenode_schema.py +42 -2
  80. infrahub/core/schema/definitions/core/__init__.py +2 -0
  81. infrahub/core/schema/definitions/core/check.py +1 -1
  82. infrahub/core/schema/definitions/core/generator.py +2 -0
  83. infrahub/core/schema/definitions/core/group.py +16 -2
  84. infrahub/core/schema/definitions/core/repository.py +7 -0
  85. infrahub/core/schema/definitions/core/transform.py +1 -1
  86. infrahub/core/schema/definitions/internal.py +12 -3
  87. infrahub/core/schema/generated/attribute_schema.py +2 -2
  88. infrahub/core/schema/generated/base_node_schema.py +6 -1
  89. infrahub/core/schema/manager.py +3 -0
  90. infrahub/core/schema/node_schema.py +1 -0
  91. infrahub/core/schema/relationship_schema.py +0 -1
  92. infrahub/core/schema/schema_branch.py +295 -10
  93. infrahub/core/schema/schema_branch_display.py +135 -0
  94. infrahub/core/schema/schema_branch_hfid.py +120 -0
  95. infrahub/core/validators/aggregated_checker.py +1 -1
  96. infrahub/database/graph.py +21 -0
  97. infrahub/display_labels/__init__.py +0 -0
  98. infrahub/display_labels/gather.py +48 -0
  99. infrahub/display_labels/models.py +240 -0
  100. infrahub/display_labels/tasks.py +192 -0
  101. infrahub/display_labels/triggers.py +22 -0
  102. infrahub/events/branch_action.py +27 -1
  103. infrahub/events/group_action.py +1 -1
  104. infrahub/events/node_action.py +1 -1
  105. infrahub/generators/constants.py +7 -0
  106. infrahub/generators/models.py +38 -12
  107. infrahub/generators/tasks.py +34 -16
  108. infrahub/git/base.py +42 -2
  109. infrahub/git/integrator.py +22 -14
  110. infrahub/git/tasks.py +52 -2
  111. infrahub/graphql/analyzer.py +9 -0
  112. infrahub/graphql/api/dependencies.py +2 -4
  113. infrahub/graphql/api/endpoints.py +16 -6
  114. infrahub/graphql/app.py +2 -4
  115. infrahub/graphql/initialization.py +2 -3
  116. infrahub/graphql/manager.py +213 -137
  117. infrahub/graphql/middleware.py +12 -0
  118. infrahub/graphql/mutations/branch.py +16 -0
  119. infrahub/graphql/mutations/computed_attribute.py +110 -3
  120. infrahub/graphql/mutations/convert_object_type.py +44 -13
  121. infrahub/graphql/mutations/display_label.py +118 -0
  122. infrahub/graphql/mutations/generator.py +25 -7
  123. infrahub/graphql/mutations/hfid.py +125 -0
  124. infrahub/graphql/mutations/ipam.py +73 -41
  125. infrahub/graphql/mutations/main.py +61 -178
  126. infrahub/graphql/mutations/profile.py +195 -0
  127. infrahub/graphql/mutations/proposed_change.py +8 -1
  128. infrahub/graphql/mutations/relationship.py +2 -2
  129. infrahub/graphql/mutations/repository.py +22 -83
  130. infrahub/graphql/mutations/resource_manager.py +2 -2
  131. infrahub/graphql/mutations/webhook.py +1 -1
  132. infrahub/graphql/queries/resource_manager.py +1 -1
  133. infrahub/graphql/registry.py +173 -0
  134. infrahub/graphql/resolvers/resolver.py +2 -0
  135. infrahub/graphql/schema.py +8 -1
  136. infrahub/graphql/schema_sort.py +170 -0
  137. infrahub/graphql/types/branch.py +4 -1
  138. infrahub/graphql/types/enums.py +3 -0
  139. infrahub/groups/tasks.py +1 -1
  140. infrahub/hfid/__init__.py +0 -0
  141. infrahub/hfid/gather.py +48 -0
  142. infrahub/hfid/models.py +240 -0
  143. infrahub/hfid/tasks.py +191 -0
  144. infrahub/hfid/triggers.py +22 -0
  145. infrahub/lock.py +119 -42
  146. infrahub/locks/__init__.py +0 -0
  147. infrahub/locks/tasks.py +37 -0
  148. infrahub/message_bus/types.py +1 -0
  149. infrahub/patch/plan_writer.py +2 -2
  150. infrahub/permissions/constants.py +2 -0
  151. infrahub/profiles/__init__.py +0 -0
  152. infrahub/profiles/node_applier.py +101 -0
  153. infrahub/profiles/queries/__init__.py +0 -0
  154. infrahub/profiles/queries/get_profile_data.py +98 -0
  155. infrahub/profiles/tasks.py +63 -0
  156. infrahub/proposed_change/tasks.py +67 -14
  157. infrahub/repositories/__init__.py +0 -0
  158. infrahub/repositories/create_repository.py +113 -0
  159. infrahub/server.py +9 -1
  160. infrahub/services/__init__.py +8 -5
  161. infrahub/services/adapters/http/__init__.py +5 -0
  162. infrahub/services/adapters/workflow/worker.py +14 -3
  163. infrahub/task_manager/event.py +5 -0
  164. infrahub/task_manager/models.py +7 -0
  165. infrahub/task_manager/task.py +73 -0
  166. infrahub/tasks/registry.py +6 -4
  167. infrahub/trigger/catalogue.py +4 -0
  168. infrahub/trigger/models.py +2 -0
  169. infrahub/trigger/setup.py +13 -4
  170. infrahub/trigger/tasks.py +6 -0
  171. infrahub/webhook/models.py +1 -1
  172. infrahub/workers/dependencies.py +3 -1
  173. infrahub/workers/infrahub_async.py +10 -2
  174. infrahub/workflows/catalogue.py +118 -3
  175. infrahub/workflows/initialization.py +21 -0
  176. infrahub/workflows/models.py +17 -2
  177. infrahub/workflows/utils.py +2 -1
  178. infrahub_sdk/branch.py +17 -8
  179. infrahub_sdk/checks.py +1 -1
  180. infrahub_sdk/client.py +376 -95
  181. infrahub_sdk/config.py +29 -2
  182. infrahub_sdk/convert_object_type.py +61 -0
  183. infrahub_sdk/ctl/branch.py +3 -0
  184. infrahub_sdk/ctl/check.py +2 -3
  185. infrahub_sdk/ctl/cli_commands.py +20 -12
  186. infrahub_sdk/ctl/config.py +8 -2
  187. infrahub_sdk/ctl/generator.py +6 -3
  188. infrahub_sdk/ctl/graphql.py +184 -0
  189. infrahub_sdk/ctl/repository.py +39 -1
  190. infrahub_sdk/ctl/schema.py +40 -10
  191. infrahub_sdk/ctl/task.py +110 -0
  192. infrahub_sdk/ctl/utils.py +4 -0
  193. infrahub_sdk/ctl/validate.py +5 -3
  194. infrahub_sdk/diff.py +4 -5
  195. infrahub_sdk/exceptions.py +2 -0
  196. infrahub_sdk/generator.py +7 -1
  197. infrahub_sdk/graphql/__init__.py +12 -0
  198. infrahub_sdk/graphql/constants.py +1 -0
  199. infrahub_sdk/graphql/plugin.py +85 -0
  200. infrahub_sdk/graphql/query.py +77 -0
  201. infrahub_sdk/{graphql.py → graphql/renderers.py} +88 -75
  202. infrahub_sdk/graphql/utils.py +40 -0
  203. infrahub_sdk/node/attribute.py +2 -0
  204. infrahub_sdk/node/node.py +28 -20
  205. infrahub_sdk/node/relationship.py +1 -3
  206. infrahub_sdk/playback.py +1 -2
  207. infrahub_sdk/protocols.py +54 -6
  208. infrahub_sdk/pytest_plugin/plugin.py +7 -4
  209. infrahub_sdk/pytest_plugin/utils.py +40 -0
  210. infrahub_sdk/repository.py +1 -2
  211. infrahub_sdk/schema/__init__.py +70 -4
  212. infrahub_sdk/schema/main.py +1 -0
  213. infrahub_sdk/schema/repository.py +8 -0
  214. infrahub_sdk/spec/models.py +7 -0
  215. infrahub_sdk/spec/object.py +54 -6
  216. infrahub_sdk/spec/processors/__init__.py +0 -0
  217. infrahub_sdk/spec/processors/data_processor.py +10 -0
  218. infrahub_sdk/spec/processors/factory.py +34 -0
  219. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  220. infrahub_sdk/spec/range_expansion.py +118 -0
  221. infrahub_sdk/task/models.py +6 -4
  222. infrahub_sdk/timestamp.py +18 -6
  223. infrahub_sdk/transforms.py +1 -1
  224. {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/METADATA +9 -10
  225. {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/RECORD +233 -176
  226. infrahub_testcontainers/container.py +114 -2
  227. infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
  228. infrahub_testcontainers/docker-compose.test.yml +5 -0
  229. infrahub_testcontainers/models.py +2 -2
  230. infrahub_testcontainers/performance_test.py +4 -4
  231. infrahub/core/convert_object_type/conversion.py +0 -134
  232. {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/LICENSE.txt +0 -0
  233. {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/WHEEL +0 -0
  234. {infrahub_server-1.4.12.dist-info → infrahub_server-1.5.0.dist-info}/entry_points.txt +0 -0
infrahub_sdk/client.py CHANGED
@@ -4,7 +4,9 @@ import asyncio
4
4
  import copy
5
5
  import logging
6
6
  import time
7
- from collections.abc import Coroutine, MutableMapping
7
+ import warnings
8
+ from collections.abc import Coroutine, Mapping, MutableMapping
9
+ from datetime import datetime
8
10
  from functools import wraps
9
11
  from time import sleep
10
12
  from typing import (
@@ -24,12 +26,14 @@ from typing_extensions import Self
24
26
 
25
27
  from .batch import InfrahubBatch, InfrahubBatchSync
26
28
  from .branch import (
29
+ MUTATION_QUERY_TASK,
27
30
  BranchData,
28
31
  InfrahubBranchManager,
29
32
  InfrahubBranchManagerSync,
30
33
  )
31
34
  from .config import Config
32
35
  from .constants import InfrahubClientMode
36
+ from .convert_object_type import CONVERT_OBJECT_MUTATION, ConversionFieldInput
33
37
  from .data import RepositoryBranchInfo, RepositoryData
34
38
  from .diff import NodeDiff, diff_tree_node_to_node_diff, get_diff_summary_query
35
39
  from .exceptions import (
@@ -60,6 +64,9 @@ from .utils import decode_json, get_user_permissions, is_valid_uuid
60
64
  if TYPE_CHECKING:
61
65
  from types import TracebackType
62
66
 
67
+ from httpx._transports.base import AsyncBaseTransport, BaseTransport
68
+ from httpx._types import ProxyTypes
69
+
63
70
  from .context import RequestContext
64
71
 
65
72
 
@@ -72,12 +79,24 @@ class ProcessRelationsNode(TypedDict):
72
79
  related_nodes: list[InfrahubNode]
73
80
 
74
81
 
82
+ class ProxyConfig(TypedDict):
83
+ proxy: ProxyTypes | None
84
+ mounts: Mapping[str, AsyncBaseTransport | None] | None
85
+
86
+
87
+ class ProxyConfigSync(TypedDict):
88
+ proxy: ProxyTypes | None
89
+ mounts: Mapping[str, BaseTransport | None] | None
90
+
91
+
75
92
  class ProcessRelationsNodeSync(TypedDict):
76
93
  nodes: list[InfrahubNodeSync]
77
94
  related_nodes: list[InfrahubNodeSync]
78
95
 
79
96
 
80
- def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): # type: ignore[no-untyped-def]
97
+ def handle_relogin(
98
+ func: Callable[..., Coroutine[Any, Any, httpx.Response]],
99
+ ) -> Callable[..., Coroutine[Any, Any, httpx.Response]]:
81
100
  @wraps(func)
82
101
  async def wrapper(client: InfrahubClient, *args: Any, **kwargs: Any) -> httpx.Response:
83
102
  response = await func(client, *args, **kwargs)
@@ -91,7 +110,7 @@ def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): #
91
110
  return wrapper
92
111
 
93
112
 
94
- def handle_relogin_sync(func: Callable[..., httpx.Response]): # type: ignore[no-untyped-def]
113
+ def handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response]:
95
114
  @wraps(func)
96
115
  def wrapper(client: InfrahubClientSync, *args: Any, **kwargs: Any) -> httpx.Response:
97
116
  response = func(client, *args, **kwargs)
@@ -105,6 +124,15 @@ def handle_relogin_sync(func: Callable[..., httpx.Response]): # type: ignore[no
105
124
  return wrapper
106
125
 
107
126
 
127
+ def raise_for_error_deprecation_warning(value: bool | None) -> None:
128
+ if value is not None:
129
+ warnings.warn(
130
+ "Using `raise_for_error` is deprecated, use `try/except` to handle errors.",
131
+ DeprecationWarning,
132
+ stacklevel=1,
133
+ )
134
+
135
+
108
136
  class BaseClient:
109
137
  """Base class for InfrahubClient and InfrahubClientSync"""
110
138
 
@@ -144,6 +172,7 @@ class BaseClient:
144
172
  self.group_context: InfrahubGroupContext | InfrahubGroupContextSync
145
173
  self._initialize()
146
174
  self._request_context: RequestContext | None = None
175
+ _ = self.config.tls_context # Early load of the TLS context to catch errors
147
176
 
148
177
  def _initialize(self) -> None:
149
178
  """Sets the properties for each version of the client"""
@@ -209,7 +238,7 @@ class BaseClient:
209
238
  delete_unused_nodes=delete_unused_nodes,
210
239
  group_type=group_type,
211
240
  group_params=group_params,
212
- branch=branch,
241
+ branch=branch or self.default_branch,
213
242
  )
214
243
 
215
244
  def _graphql_url(
@@ -250,7 +279,7 @@ class BaseClient:
250
279
 
251
280
  return Mutation(
252
281
  name="AllocateIPAddress",
253
- mutation="IPAddressPoolGetResource",
282
+ mutation="InfrahubIPAddressPoolGetResource",
254
283
  query={"ok": None, "node": {"id": None, "kind": None, "identifier": None, "display_label": None}},
255
284
  input_data={"data": input_data},
256
285
  )
@@ -281,7 +310,7 @@ class BaseClient:
281
310
 
282
311
  return Mutation(
283
312
  name="AllocateIPPrefix",
284
- mutation="IPPrefixPoolGetResource",
313
+ mutation="InfrahubIPPrefixPoolGetResource",
285
314
  query={"ok": None, "node": {"id": None, "kind": None, "identifier": None, "display_label": None}},
286
315
  input_data={"data": input_data},
287
316
  )
@@ -298,20 +327,17 @@ class InfrahubClient(BaseClient):
298
327
  self.object_store = ObjectStore(self)
299
328
  self.store = NodeStore(default_branch=self.default_branch)
300
329
  self.task = InfrahubTaskManager(self)
301
- self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution)
302
330
  self._request_method: AsyncRequester = self.config.requester or self._default_request_method
303
331
  self.group_context = InfrahubGroupContext(self)
304
332
 
305
333
  async def get_version(self) -> str:
306
334
  """Return the Infrahub version."""
307
335
  response = await self.execute_graphql(query="query { InfrahubInfo { version }}")
308
- version = response.get("InfrahubInfo", {}).get("version", "")
309
- return version
336
+ return response.get("InfrahubInfo", {}).get("version", "")
310
337
 
311
338
  async def get_user(self) -> dict:
312
339
  """Return user information"""
313
- user_info = await self.execute_graphql(query=QUERY_USER)
314
- return user_info
340
+ return await self.execute_graphql(query=QUERY_USER)
315
341
 
316
342
  async def get_user_permissions(self) -> dict:
317
343
  """Return user permissions"""
@@ -540,6 +566,7 @@ class InfrahubClient(BaseClient):
540
566
  schema_kind: str,
541
567
  branch: str,
542
568
  prefetch_relationships: bool,
569
+ include: list[str] | None,
543
570
  timeout: int | None = None,
544
571
  ) -> ProcessRelationsNode:
545
572
  """Processes InfrahubNode and their Relationships from the GraphQL query response.
@@ -549,7 +576,7 @@ class InfrahubClient(BaseClient):
549
576
  schema_kind (str): The kind of schema being queried.
550
577
  branch (str): The branch name.
551
578
  prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
552
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
579
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
553
580
 
554
581
  Returns:
555
582
  ProcessRelationsNodeSync: A TypedDict containing two lists:
@@ -564,9 +591,12 @@ class InfrahubClient(BaseClient):
564
591
  node = await InfrahubNode.from_graphql(client=self, branch=branch, data=item, timeout=timeout)
565
592
  nodes.append(node)
566
593
 
567
- if prefetch_relationships:
594
+ if prefetch_relationships or (include and any(rel in include for rel in node._relationships)):
568
595
  await node._process_relationships(
569
- node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout
596
+ node_data=item,
597
+ branch=branch,
598
+ related_nodes=related_nodes,
599
+ timeout=timeout,
570
600
  )
571
601
 
572
602
  return ProcessRelationsNode(nodes=nodes, related_nodes=related_nodes)
@@ -666,7 +696,7 @@ class InfrahubClient(BaseClient):
666
696
  at (Timestamp, optional): Time of the query. Defaults to Now.
667
697
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
668
698
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
669
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
699
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
670
700
  offset (int, optional): The offset for pagination.
671
701
  limit (int, optional): The limit for pagination.
672
702
  include (list[str], optional): List of attributes or relationships to include in the query.
@@ -763,7 +793,7 @@ class InfrahubClient(BaseClient):
763
793
  kind (str): kind of the nodes to query
764
794
  at (Timestamp, optional): Time of the query. Defaults to Now.
765
795
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
766
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
796
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
767
797
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
768
798
  offset (int, optional): The offset for pagination.
769
799
  limit (int, optional): The limit for pagination.
@@ -816,6 +846,7 @@ class InfrahubClient(BaseClient):
816
846
  branch=branch,
817
847
  prefetch_relationships=prefetch_relationships,
818
848
  timeout=timeout,
849
+ include=include,
819
850
  )
820
851
  return response, process_result
821
852
 
@@ -881,7 +912,7 @@ class InfrahubClient(BaseClient):
881
912
  branch_name: str | None = None,
882
913
  at: str | Timestamp | None = None,
883
914
  timeout: int | None = None,
884
- raise_for_error: bool = True,
915
+ raise_for_error: bool | None = None,
885
916
  tracker: str | None = None,
886
917
  ) -> dict:
887
918
  """Execute a GraphQL query (or mutation).
@@ -893,13 +924,17 @@ class InfrahubClient(BaseClient):
893
924
  branch_name (str, optional): Name of the branch on which the query will be executed. Defaults to None.
894
925
  at (str, optional): Time when the query should be executed. Defaults to None.
895
926
  timeout (int, optional): Timeout in second for the query. Defaults to None.
896
- raise_for_error (bool, optional): Flag to indicate that we need to raise an exception if the response has some errors. Defaults to True.
927
+ raise_for_error (bool | None, optional): Deprecated. Controls only HTTP status handling.
928
+ - None (default) or True: HTTP errors raise via resp.raise_for_status().
929
+ - False: HTTP errors are not automatically raised. Defaults to None.
930
+
897
931
  Raises:
898
- GraphQLError: _description_
932
+ GraphQLError: When the GraphQL response contains errors.
899
933
 
900
934
  Returns:
901
- _type_: _description_
935
+ dict: The GraphQL data payload (response["data"]).
902
936
  """
937
+ raise_for_error_deprecation_warning(value=raise_for_error)
903
938
 
904
939
  branch_name = branch_name or self.default_branch
905
940
  url = self._graphql_url(branch_name=branch_name, at=at)
@@ -922,7 +957,7 @@ class InfrahubClient(BaseClient):
922
957
  try:
923
958
  resp = await self._post(url=url, payload=payload, headers=headers, timeout=timeout)
924
959
 
925
- if raise_for_error:
960
+ if raise_for_error in (None, True):
926
961
  resp.raise_for_status()
927
962
 
928
963
  retry = False
@@ -958,7 +993,11 @@ class InfrahubClient(BaseClient):
958
993
 
959
994
  @handle_relogin
960
995
  async def _post(
961
- self, url: str, payload: dict, headers: dict | None = None, timeout: int | None = None
996
+ self,
997
+ url: str,
998
+ payload: dict,
999
+ headers: dict | None = None,
1000
+ timeout: int | None = None,
962
1001
  ) -> httpx.Response:
963
1002
  """Execute a HTTP POST with HTTPX.
964
1003
 
@@ -973,7 +1012,11 @@ class InfrahubClient(BaseClient):
973
1012
  headers.update(base_headers)
974
1013
 
975
1014
  return await self._request(
976
- url=url, method=HTTPMethod.POST, headers=headers, timeout=timeout or self.default_timeout, payload=payload
1015
+ url=url,
1016
+ method=HTTPMethod.POST,
1017
+ headers=headers,
1018
+ timeout=timeout or self.default_timeout,
1019
+ payload=payload,
977
1020
  )
978
1021
 
979
1022
  @handle_relogin
@@ -991,35 +1034,48 @@ class InfrahubClient(BaseClient):
991
1034
  headers.update(base_headers)
992
1035
 
993
1036
  return await self._request(
994
- url=url, method=HTTPMethod.GET, headers=headers, timeout=timeout or self.default_timeout
1037
+ url=url,
1038
+ method=HTTPMethod.GET,
1039
+ headers=headers,
1040
+ timeout=timeout or self.default_timeout,
995
1041
  )
996
1042
 
997
1043
  async def _request(
998
- self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: dict | None = None
1044
+ self,
1045
+ url: str,
1046
+ method: HTTPMethod,
1047
+ headers: dict[str, Any],
1048
+ timeout: int,
1049
+ payload: dict | None = None,
999
1050
  ) -> httpx.Response:
1000
1051
  response = await self._request_method(url=url, method=method, headers=headers, timeout=timeout, payload=payload)
1001
1052
  self._record(response)
1002
1053
  return response
1003
1054
 
1004
1055
  async def _default_request_method(
1005
- self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: dict | None = None
1056
+ self,
1057
+ url: str,
1058
+ method: HTTPMethod,
1059
+ headers: dict[str, Any],
1060
+ timeout: int,
1061
+ payload: dict | None = None,
1006
1062
  ) -> httpx.Response:
1007
1063
  params: dict[str, Any] = {}
1008
1064
  if payload:
1009
1065
  params["json"] = payload
1010
1066
 
1011
- proxy_config: dict[str, str | dict[str, httpx.HTTPTransport]] = {}
1067
+ proxy_config: ProxyConfig = {"proxy": None, "mounts": None}
1012
1068
  if self.config.proxy:
1013
1069
  proxy_config["proxy"] = self.config.proxy
1014
1070
  elif self.config.proxy_mounts.is_set:
1015
1071
  proxy_config["mounts"] = {
1016
- key: httpx.HTTPTransport(proxy=value)
1072
+ key: httpx.AsyncHTTPTransport(proxy=value)
1017
1073
  for key, value in self.config.proxy_mounts.model_dump(by_alias=True).items()
1018
1074
  }
1019
1075
 
1020
1076
  async with httpx.AsyncClient(
1021
- **proxy_config, # type: ignore[arg-type]
1022
- verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure,
1077
+ **proxy_config,
1078
+ verify=self.config.tls_context,
1023
1079
  ) as client:
1024
1080
  try:
1025
1081
  response = await client.request(
@@ -1044,7 +1100,10 @@ class InfrahubClient(BaseClient):
1044
1100
  response = await self._request(
1045
1101
  url=url,
1046
1102
  method=HTTPMethod.POST,
1047
- headers={"content-type": "application/json", "Authorization": f"Bearer {self.refresh_token}"},
1103
+ headers={
1104
+ "content-type": "application/json",
1105
+ "Authorization": f"Bearer {self.refresh_token}",
1106
+ },
1048
1107
  timeout=self.default_timeout,
1049
1108
  )
1050
1109
 
@@ -1077,7 +1136,10 @@ class InfrahubClient(BaseClient):
1077
1136
  response = await self._request(
1078
1137
  url=url,
1079
1138
  method=HTTPMethod.POST,
1080
- payload={"username": self.config.username, "password": self.config.password},
1139
+ payload={
1140
+ "username": self.config.username,
1141
+ "password": self.config.password,
1142
+ },
1081
1143
  headers={"content-type": "application/json"},
1082
1144
  timeout=self.default_timeout,
1083
1145
  )
@@ -1099,17 +1161,19 @@ class InfrahubClient(BaseClient):
1099
1161
  at: str | None = None,
1100
1162
  timeout: int | None = None,
1101
1163
  tracker: str | None = None,
1102
- raise_for_error: bool = True,
1164
+ raise_for_error: bool | None = None,
1103
1165
  ) -> dict:
1166
+ raise_for_error_deprecation_warning(value=raise_for_error)
1167
+
1104
1168
  url = f"{self.address}/api/query/{name}"
1105
1169
  url_params = copy.deepcopy(params or {})
1170
+ url_params["branch"] = branch_name or self.default_branch
1171
+
1106
1172
  headers = copy.copy(self.headers or {})
1107
1173
 
1108
1174
  if self.insert_tracker and tracker:
1109
1175
  headers["X-Infrahub-Tracker"] = tracker
1110
1176
 
1111
- if branch_name:
1112
- url_params["branch"] = branch_name
1113
1177
  if at:
1114
1178
  url_params["at"] = at
1115
1179
 
@@ -1145,26 +1209,67 @@ class InfrahubClient(BaseClient):
1145
1209
  timeout=timeout or self.default_timeout,
1146
1210
  )
1147
1211
 
1148
- if raise_for_error:
1212
+ if raise_for_error in (None, True):
1149
1213
  resp.raise_for_status()
1150
1214
 
1151
1215
  return decode_json(response=resp)
1152
1216
 
1217
+ async def create_diff(
1218
+ self,
1219
+ branch: str,
1220
+ name: str,
1221
+ from_time: datetime,
1222
+ to_time: datetime,
1223
+ wait_until_completion: bool = True,
1224
+ ) -> bool | str:
1225
+ if from_time > to_time:
1226
+ raise ValueError("from_time must be <= to_time")
1227
+ input_data = {
1228
+ "wait_until_completion": wait_until_completion,
1229
+ "data": {
1230
+ "name": name,
1231
+ "branch": branch,
1232
+ "from_time": from_time.isoformat(),
1233
+ "to_time": to_time.isoformat(),
1234
+ },
1235
+ }
1236
+
1237
+ mutation_query = MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None}
1238
+ query = Mutation(mutation="DiffUpdate", input_data=input_data, query=mutation_query)
1239
+ response = await self.execute_graphql(query=query.render(), tracker="mutation-diff-update")
1240
+
1241
+ if not wait_until_completion and "task" in response["DiffUpdate"]:
1242
+ return response["DiffUpdate"]["task"]["id"]
1243
+
1244
+ return response["DiffUpdate"]["ok"]
1245
+
1153
1246
  async def get_diff_summary(
1154
1247
  self,
1155
1248
  branch: str,
1249
+ name: str | None = None,
1250
+ from_time: datetime | None = None,
1251
+ to_time: datetime | None = None,
1156
1252
  timeout: int | None = None,
1157
1253
  tracker: str | None = None,
1158
- raise_for_error: bool = True,
1254
+ raise_for_error: bool | None = None,
1159
1255
  ) -> list[NodeDiff]:
1160
1256
  query = get_diff_summary_query()
1257
+ input_data = {"branch_name": branch}
1258
+ if name:
1259
+ input_data["name"] = name
1260
+ if from_time and to_time and from_time > to_time:
1261
+ raise ValueError("from_time must be <= to_time")
1262
+ if from_time:
1263
+ input_data["from_time"] = from_time.isoformat()
1264
+ if to_time:
1265
+ input_data["to_time"] = to_time.isoformat()
1161
1266
  response = await self.execute_graphql(
1162
1267
  query=query,
1163
1268
  branch_name=branch,
1164
1269
  timeout=timeout,
1165
1270
  tracker=tracker,
1166
1271
  raise_for_error=raise_for_error,
1167
- variables={"branch_name": branch},
1272
+ variables=input_data,
1168
1273
  )
1169
1274
 
1170
1275
  node_diffs: list[NodeDiff] = []
@@ -1220,7 +1325,7 @@ class InfrahubClient(BaseClient):
1220
1325
  branch: str | None = ...,
1221
1326
  timeout: int | None = ...,
1222
1327
  tracker: str | None = ...,
1223
- raise_for_error: bool = ...,
1328
+ raise_for_error: bool | None = ...,
1224
1329
  ) -> SchemaType: ...
1225
1330
 
1226
1331
  @overload
@@ -1265,7 +1370,7 @@ class InfrahubClient(BaseClient):
1265
1370
  branch: str | None = ...,
1266
1371
  timeout: int | None = ...,
1267
1372
  tracker: str | None = ...,
1268
- raise_for_error: bool = ...,
1373
+ raise_for_error: bool | None = ...,
1269
1374
  ) -> CoreNode | None: ...
1270
1375
 
1271
1376
  async def allocate_next_ip_address(
@@ -1279,7 +1384,7 @@ class InfrahubClient(BaseClient):
1279
1384
  branch: str | None = None,
1280
1385
  timeout: int | None = None,
1281
1386
  tracker: str | None = None,
1282
- raise_for_error: bool = True,
1387
+ raise_for_error: bool | None = None,
1283
1388
  ) -> CoreNode | SchemaType | None:
1284
1389
  """Allocate a new IP address by using the provided resource pool.
1285
1390
 
@@ -1292,7 +1397,7 @@ class InfrahubClient(BaseClient):
1292
1397
  branch (str, optional): Name of the branch to allocate from. Defaults to default_branch.
1293
1398
  timeout (int, optional): Flag to indicate whether to populate the store with the retrieved nodes.
1294
1399
  tracker (str, optional): The offset for pagination.
1295
- raise_for_error (bool, optional): The limit for pagination.
1400
+ raise_for_error (bool, optional): Deprecated, raise an error if the HTTP status is not 2XX.
1296
1401
  Returns:
1297
1402
  InfrahubNode: Node corresponding to the allocated resource.
1298
1403
  """
@@ -1300,7 +1405,7 @@ class InfrahubClient(BaseClient):
1300
1405
  raise ValueError("resource_pool is not an IP address pool")
1301
1406
 
1302
1407
  branch = branch or self.default_branch
1303
- mutation_name = "IPAddressPoolGetResource"
1408
+ mutation_name = "InfrahubIPAddressPoolGetResource"
1304
1409
 
1305
1410
  query = self._build_ip_address_allocation_query(
1306
1411
  resource_pool_id=resource_pool.id,
@@ -1367,7 +1472,7 @@ class InfrahubClient(BaseClient):
1367
1472
  branch: str | None = ...,
1368
1473
  timeout: int | None = ...,
1369
1474
  tracker: str | None = ...,
1370
- raise_for_error: bool = ...,
1475
+ raise_for_error: bool | None = ...,
1371
1476
  ) -> SchemaType: ...
1372
1477
 
1373
1478
  @overload
@@ -1415,7 +1520,7 @@ class InfrahubClient(BaseClient):
1415
1520
  branch: str | None = ...,
1416
1521
  timeout: int | None = ...,
1417
1522
  tracker: str | None = ...,
1418
- raise_for_error: bool = ...,
1523
+ raise_for_error: bool | None = ...,
1419
1524
  ) -> CoreNode | None: ...
1420
1525
 
1421
1526
  async def allocate_next_ip_prefix(
@@ -1430,7 +1535,7 @@ class InfrahubClient(BaseClient):
1430
1535
  branch: str | None = None,
1431
1536
  timeout: int | None = None,
1432
1537
  tracker: str | None = None,
1433
- raise_for_error: bool = True,
1538
+ raise_for_error: bool | None = None,
1434
1539
  ) -> CoreNode | SchemaType | None:
1435
1540
  """Allocate a new IP prefix by using the provided resource pool.
1436
1541
 
@@ -1444,7 +1549,7 @@ class InfrahubClient(BaseClient):
1444
1549
  branch: Name of the branch to allocate from. Defaults to default_branch.
1445
1550
  timeout: Flag to indicate whether to populate the store with the retrieved nodes.
1446
1551
  tracker: The offset for pagination.
1447
- raise_for_error: The limit for pagination.
1552
+ raise_for_error (bool, optional): Deprecated, raise an error if the HTTP status is not 2XX.
1448
1553
  Returns:
1449
1554
  InfrahubNode: Node corresponding to the allocated resource.
1450
1555
  """
@@ -1452,7 +1557,7 @@ class InfrahubClient(BaseClient):
1452
1557
  raise ValueError("resource_pool is not an IP prefix pool")
1453
1558
 
1454
1559
  branch = branch or self.default_branch
1455
- mutation_name = "IPPrefixPoolGetResource"
1560
+ mutation_name = "InfrahubIPPrefixPoolGetResource"
1456
1561
 
1457
1562
  query = self._build_ip_prefix_allocation_query(
1458
1563
  resource_pool_id=resource_pool.id,
@@ -1463,7 +1568,11 @@ class InfrahubClient(BaseClient):
1463
1568
  data=data,
1464
1569
  )
1465
1570
  response = await self.execute_graphql(
1466
- query=query.render(), branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error
1571
+ query=query.render(),
1572
+ branch_name=branch,
1573
+ timeout=timeout,
1574
+ tracker=tracker,
1575
+ raise_for_error=raise_for_error,
1467
1576
  )
1468
1577
 
1469
1578
  if response[mutation_name]["ok"]:
@@ -1472,10 +1581,14 @@ class InfrahubClient(BaseClient):
1472
1581
  return None
1473
1582
 
1474
1583
  async def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch:
1475
- return InfrahubBatch(semaphore=self.concurrent_execution_limit, return_exceptions=return_exceptions)
1584
+ return InfrahubBatch(
1585
+ max_concurrent_execution=self.max_concurrent_execution, return_exceptions=return_exceptions
1586
+ )
1476
1587
 
1477
1588
  async def get_list_repositories(
1478
- self, branches: dict[str, BranchData] | None = None, kind: str = "CoreGenericRepository"
1589
+ self,
1590
+ branches: dict[str, BranchData] | None = None,
1591
+ kind: str = "CoreGenericRepository",
1479
1592
  ) -> dict[str, RepositoryData]:
1480
1593
  branches = branches or await self.branch.all()
1481
1594
 
@@ -1513,7 +1626,11 @@ class InfrahubClient(BaseClient):
1513
1626
  return repositories
1514
1627
 
1515
1628
  async def repository_update_commit(
1516
- self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False
1629
+ self,
1630
+ branch_name: str,
1631
+ repository_id: str,
1632
+ commit: str,
1633
+ is_read_only: bool = False,
1517
1634
  ) -> bool:
1518
1635
  variables = {"repository_id": str(repository_id), "commit": str(commit)}
1519
1636
  await self.execute_graphql(
@@ -1539,6 +1656,38 @@ class InfrahubClient(BaseClient):
1539
1656
 
1540
1657
  self.mode = InfrahubClientMode.DEFAULT
1541
1658
 
1659
+ async def convert_object_type(
1660
+ self,
1661
+ node_id: str,
1662
+ target_kind: str,
1663
+ branch: str | None = None,
1664
+ fields_mapping: dict[str, ConversionFieldInput] | None = None,
1665
+ ) -> InfrahubNode:
1666
+ """
1667
+ Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names
1668
+ and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field
1669
+ in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type
1670
+ for more information.
1671
+ """
1672
+
1673
+ if fields_mapping is None:
1674
+ mapping_dict = {}
1675
+ else:
1676
+ mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()}
1677
+
1678
+ branch_name = branch or self.default_branch
1679
+ response = await self.execute_graphql(
1680
+ query=CONVERT_OBJECT_MUTATION,
1681
+ variables={
1682
+ "node_id": node_id,
1683
+ "fields_mapping": mapping_dict,
1684
+ "target_kind": target_kind,
1685
+ },
1686
+ branch_name=branch_name,
1687
+ raise_for_error=True,
1688
+ )
1689
+ return await InfrahubNode.from_graphql(client=self, branch=branch_name, data=response["ConvertObjectType"])
1690
+
1542
1691
 
1543
1692
  class InfrahubClientSync(BaseClient):
1544
1693
  schema: InfrahubSchemaSync
@@ -1560,13 +1709,11 @@ class InfrahubClientSync(BaseClient):
1560
1709
  def get_version(self) -> str:
1561
1710
  """Return the Infrahub version."""
1562
1711
  response = self.execute_graphql(query="query { InfrahubInfo { version }}")
1563
- version = response.get("InfrahubInfo", {}).get("version", "")
1564
- return version
1712
+ return response.get("InfrahubInfo", {}).get("version", "")
1565
1713
 
1566
1714
  def get_user(self) -> dict:
1567
1715
  """Return user information"""
1568
- user_info = self.execute_graphql(query=QUERY_USER)
1569
- return user_info
1716
+ return self.execute_graphql(query=QUERY_USER)
1570
1717
 
1571
1718
  def get_user_permissions(self) -> dict:
1572
1719
  """Return user permissions"""
@@ -1625,7 +1772,7 @@ class InfrahubClientSync(BaseClient):
1625
1772
  branch_name: str | None = None,
1626
1773
  at: str | Timestamp | None = None,
1627
1774
  timeout: int | None = None,
1628
- raise_for_error: bool = True,
1775
+ raise_for_error: bool | None = None,
1629
1776
  tracker: str | None = None,
1630
1777
  ) -> dict:
1631
1778
  """Execute a GraphQL query (or mutation).
@@ -1637,13 +1784,18 @@ class InfrahubClientSync(BaseClient):
1637
1784
  branch_name (str, optional): Name of the branch on which the query will be executed. Defaults to None.
1638
1785
  at (str, optional): Time when the query should be executed. Defaults to None.
1639
1786
  timeout (int, optional): Timeout in second for the query. Defaults to None.
1640
- raise_for_error (bool, optional): Flag to indicate that we need to raise an exception if the response has some errors. Defaults to True.
1787
+ raise_for_error (bool | None, optional): Deprecated. Controls only HTTP status handling.
1788
+ - None (default) or True: HTTP errors raise via `resp.raise_for_status()`.
1789
+ - False: HTTP errors are not automatically raised.
1790
+ GraphQL errors always raise `GraphQLError`. Defaults to None.
1791
+
1641
1792
  Raises:
1642
- GraphQLError: When an error occurs during the execution of the GraphQL query or mutation.
1793
+ GraphQLError: When the GraphQL response contains errors.
1643
1794
 
1644
1795
  Returns:
1645
- dict: The result of the GraphQL query or mutation.
1796
+ dict: The GraphQL data payload (`response["data"]`).
1646
1797
  """
1798
+ raise_for_error_deprecation_warning(value=raise_for_error)
1647
1799
 
1648
1800
  branch_name = branch_name or self.default_branch
1649
1801
  url = self._graphql_url(branch_name=branch_name, at=at)
@@ -1666,7 +1818,7 @@ class InfrahubClientSync(BaseClient):
1666
1818
  try:
1667
1819
  resp = self._post(url=url, payload=payload, headers=headers, timeout=timeout)
1668
1820
 
1669
- if raise_for_error:
1821
+ if raise_for_error in (None, True):
1670
1822
  resp.raise_for_status()
1671
1823
 
1672
1824
  retry = False
@@ -1794,7 +1946,7 @@ class InfrahubClientSync(BaseClient):
1794
1946
  kind (str): kind of the nodes to query
1795
1947
  at (Timestamp, optional): Time of the query. Defaults to Now.
1796
1948
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
1797
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1949
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1798
1950
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
1799
1951
  offset (int, optional): The offset for pagination.
1800
1952
  limit (int, optional): The limit for pagination.
@@ -1831,6 +1983,7 @@ class InfrahubClientSync(BaseClient):
1831
1983
  schema_kind: str,
1832
1984
  branch: str,
1833
1985
  prefetch_relationships: bool,
1986
+ include: list[str] | None,
1834
1987
  timeout: int | None = None,
1835
1988
  ) -> ProcessRelationsNodeSync:
1836
1989
  """Processes InfrahubNodeSync and their Relationships from the GraphQL query response.
@@ -1840,7 +1993,7 @@ class InfrahubClientSync(BaseClient):
1840
1993
  schema_kind (str): The kind of schema being queried.
1841
1994
  branch (str): The branch name.
1842
1995
  prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
1843
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1996
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1844
1997
 
1845
1998
  Returns:
1846
1999
  ProcessRelationsNodeSync: A TypedDict containing two lists:
@@ -1855,8 +2008,13 @@ class InfrahubClientSync(BaseClient):
1855
2008
  node = InfrahubNodeSync.from_graphql(client=self, branch=branch, data=item, timeout=timeout)
1856
2009
  nodes.append(node)
1857
2010
 
1858
- if prefetch_relationships:
1859
- node._process_relationships(node_data=item, branch=branch, related_nodes=related_nodes, timeout=timeout)
2011
+ if prefetch_relationships or (include and any(rel in include for rel in node._relationships)):
2012
+ node._process_relationships(
2013
+ node_data=item,
2014
+ branch=branch,
2015
+ related_nodes=related_nodes,
2016
+ timeout=timeout,
2017
+ )
1860
2018
 
1861
2019
  return ProcessRelationsNodeSync(nodes=nodes, related_nodes=related_nodes)
1862
2020
 
@@ -1927,7 +2085,7 @@ class InfrahubClientSync(BaseClient):
1927
2085
  kind (str): kind of the nodes to query
1928
2086
  at (Timestamp, optional): Time of the query. Defaults to Now.
1929
2087
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
1930
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
2088
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1931
2089
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
1932
2090
  offset (int, optional): The offset for pagination.
1933
2091
  limit (int, optional): The limit for pagination.
@@ -1980,6 +2138,7 @@ class InfrahubClientSync(BaseClient):
1980
2138
  branch=branch,
1981
2139
  prefetch_relationships=prefetch_relationships,
1982
2140
  timeout=timeout,
2141
+ include=include,
1983
2142
  )
1984
2143
  return response, process_result
1985
2144
 
@@ -2217,11 +2376,14 @@ class InfrahubClientSync(BaseClient):
2217
2376
  batch to manipulate objects that depend on each others.
2218
2377
  """
2219
2378
  return InfrahubBatchSync(
2220
- max_concurrent_execution=self.max_concurrent_execution, return_exceptions=return_exceptions
2379
+ max_concurrent_execution=self.max_concurrent_execution,
2380
+ return_exceptions=return_exceptions,
2221
2381
  )
2222
2382
 
2223
2383
  def get_list_repositories(
2224
- self, branches: dict[str, BranchData] | None = None, kind: str = "CoreGenericRepository"
2384
+ self,
2385
+ branches: dict[str, BranchData] | None = None,
2386
+ kind: str = "CoreGenericRepository",
2225
2387
  ) -> dict[str, RepositoryData]:
2226
2388
  raise NotImplementedError(
2227
2389
  "This method is deprecated in the async client and won't be implemented in the sync client."
@@ -2238,17 +2400,19 @@ class InfrahubClientSync(BaseClient):
2238
2400
  at: str | None = None,
2239
2401
  timeout: int | None = None,
2240
2402
  tracker: str | None = None,
2241
- raise_for_error: bool = True,
2403
+ raise_for_error: bool | None = None,
2242
2404
  ) -> dict:
2405
+ raise_for_error_deprecation_warning(value=raise_for_error)
2406
+
2243
2407
  url = f"{self.address}/api/query/{name}"
2244
2408
  url_params = copy.deepcopy(params or {})
2409
+ url_params["branch"] = branch_name or self.default_branch
2410
+
2245
2411
  headers = copy.copy(self.headers or {})
2246
2412
 
2247
2413
  if self.insert_tracker and tracker:
2248
2414
  headers["X-Infrahub-Tracker"] = tracker
2249
2415
 
2250
- if branch_name:
2251
- url_params["branch"] = branch_name
2252
2416
  if at:
2253
2417
  url_params["at"] = at
2254
2418
  if subscribers:
@@ -2283,26 +2447,67 @@ class InfrahubClientSync(BaseClient):
2283
2447
  timeout=timeout or self.default_timeout,
2284
2448
  )
2285
2449
 
2286
- if raise_for_error:
2450
+ if raise_for_error in (None, True):
2287
2451
  resp.raise_for_status()
2288
2452
 
2289
2453
  return decode_json(response=resp)
2290
2454
 
2455
+ def create_diff(
2456
+ self,
2457
+ branch: str,
2458
+ name: str,
2459
+ from_time: datetime,
2460
+ to_time: datetime,
2461
+ wait_until_completion: bool = True,
2462
+ ) -> bool | str:
2463
+ if from_time > to_time:
2464
+ raise ValueError("from_time must be <= to_time")
2465
+ input_data = {
2466
+ "wait_until_completion": wait_until_completion,
2467
+ "data": {
2468
+ "name": name,
2469
+ "branch": branch,
2470
+ "from_time": from_time.isoformat(),
2471
+ "to_time": to_time.isoformat(),
2472
+ },
2473
+ }
2474
+
2475
+ mutation_query = MUTATION_QUERY_TASK if not wait_until_completion else {"ok": None}
2476
+ query = Mutation(mutation="DiffUpdate", input_data=input_data, query=mutation_query)
2477
+ response = self.execute_graphql(query=query.render(), tracker="mutation-diff-update")
2478
+
2479
+ if not wait_until_completion and "task" in response["DiffUpdate"]:
2480
+ return response["DiffUpdate"]["task"]["id"]
2481
+
2482
+ return response["DiffUpdate"]["ok"]
2483
+
2291
2484
  def get_diff_summary(
2292
2485
  self,
2293
2486
  branch: str,
2487
+ name: str | None = None,
2488
+ from_time: datetime | None = None,
2489
+ to_time: datetime | None = None,
2294
2490
  timeout: int | None = None,
2295
2491
  tracker: str | None = None,
2296
- raise_for_error: bool = True,
2492
+ raise_for_error: bool | None = None,
2297
2493
  ) -> list[NodeDiff]:
2298
2494
  query = get_diff_summary_query()
2495
+ input_data = {"branch_name": branch}
2496
+ if name:
2497
+ input_data["name"] = name
2498
+ if from_time and to_time and from_time > to_time:
2499
+ raise ValueError("from_time must be <= to_time")
2500
+ if from_time:
2501
+ input_data["from_time"] = from_time.isoformat()
2502
+ if to_time:
2503
+ input_data["to_time"] = to_time.isoformat()
2299
2504
  response = self.execute_graphql(
2300
2505
  query=query,
2301
2506
  branch_name=branch,
2302
2507
  timeout=timeout,
2303
2508
  tracker=tracker,
2304
2509
  raise_for_error=raise_for_error,
2305
- variables={"branch_name": branch},
2510
+ variables=input_data,
2306
2511
  )
2307
2512
 
2308
2513
  node_diffs: list[NodeDiff] = []
@@ -2358,7 +2563,7 @@ class InfrahubClientSync(BaseClient):
2358
2563
  branch: str | None = ...,
2359
2564
  timeout: int | None = ...,
2360
2565
  tracker: str | None = ...,
2361
- raise_for_error: bool = ...,
2566
+ raise_for_error: bool | None = ...,
2362
2567
  ) -> SchemaTypeSync: ...
2363
2568
 
2364
2569
  @overload
@@ -2403,7 +2608,7 @@ class InfrahubClientSync(BaseClient):
2403
2608
  branch: str | None = ...,
2404
2609
  timeout: int | None = ...,
2405
2610
  tracker: str | None = ...,
2406
- raise_for_error: bool = ...,
2611
+ raise_for_error: bool | None = ...,
2407
2612
  ) -> CoreNodeSync | None: ...
2408
2613
 
2409
2614
  def allocate_next_ip_address(
@@ -2417,7 +2622,7 @@ class InfrahubClientSync(BaseClient):
2417
2622
  branch: str | None = None,
2418
2623
  timeout: int | None = None,
2419
2624
  tracker: str | None = None,
2420
- raise_for_error: bool = True,
2625
+ raise_for_error: bool | None = None,
2421
2626
  ) -> CoreNodeSync | SchemaTypeSync | None:
2422
2627
  """Allocate a new IP address by using the provided resource pool.
2423
2628
 
@@ -2438,7 +2643,7 @@ class InfrahubClientSync(BaseClient):
2438
2643
  raise ValueError("resource_pool is not an IP address pool")
2439
2644
 
2440
2645
  branch = branch or self.default_branch
2441
- mutation_name = "IPAddressPoolGetResource"
2646
+ mutation_name = "InfrahubIPAddressPoolGetResource"
2442
2647
 
2443
2648
  query = self._build_ip_address_allocation_query(
2444
2649
  resource_pool_id=resource_pool.id,
@@ -2448,7 +2653,11 @@ class InfrahubClientSync(BaseClient):
2448
2653
  data=data,
2449
2654
  )
2450
2655
  response = self.execute_graphql(
2451
- query=query.render(), branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error
2656
+ query=query.render(),
2657
+ branch_name=branch,
2658
+ timeout=timeout,
2659
+ tracker=tracker,
2660
+ raise_for_error=raise_for_error,
2452
2661
  )
2453
2662
 
2454
2663
  if response[mutation_name]["ok"]:
@@ -2501,7 +2710,7 @@ class InfrahubClientSync(BaseClient):
2501
2710
  branch: str | None = ...,
2502
2711
  timeout: int | None = ...,
2503
2712
  tracker: str | None = ...,
2504
- raise_for_error: bool = ...,
2713
+ raise_for_error: bool | None = ...,
2505
2714
  ) -> SchemaTypeSync: ...
2506
2715
 
2507
2716
  @overload
@@ -2549,7 +2758,7 @@ class InfrahubClientSync(BaseClient):
2549
2758
  branch: str | None = ...,
2550
2759
  timeout: int | None = ...,
2551
2760
  tracker: str | None = ...,
2552
- raise_for_error: bool = ...,
2761
+ raise_for_error: bool | None = ...,
2553
2762
  ) -> CoreNodeSync | None: ...
2554
2763
 
2555
2764
  def allocate_next_ip_prefix(
@@ -2564,7 +2773,7 @@ class InfrahubClientSync(BaseClient):
2564
2773
  branch: str | None = None,
2565
2774
  timeout: int | None = None,
2566
2775
  tracker: str | None = None,
2567
- raise_for_error: bool = True,
2776
+ raise_for_error: bool | None = None,
2568
2777
  ) -> CoreNodeSync | SchemaTypeSync | None:
2569
2778
  """Allocate a new IP prefix by using the provided resource pool.
2570
2779
 
@@ -2586,7 +2795,7 @@ class InfrahubClientSync(BaseClient):
2586
2795
  raise ValueError("resource_pool is not an IP prefix pool")
2587
2796
 
2588
2797
  branch = branch or self.default_branch
2589
- mutation_name = "IPPrefixPoolGetResource"
2798
+ mutation_name = "InfrahubIPPrefixPoolGetResource"
2590
2799
 
2591
2800
  query = self._build_ip_prefix_allocation_query(
2592
2801
  resource_pool_id=resource_pool.id,
@@ -2597,7 +2806,11 @@ class InfrahubClientSync(BaseClient):
2597
2806
  data=data,
2598
2807
  )
2599
2808
  response = self.execute_graphql(
2600
- query=query.render(), branch_name=branch, timeout=timeout, tracker=tracker, raise_for_error=raise_for_error
2809
+ query=query.render(),
2810
+ branch_name=branch,
2811
+ timeout=timeout,
2812
+ tracker=tracker,
2813
+ raise_for_error=raise_for_error,
2601
2814
  )
2602
2815
 
2603
2816
  if response[mutation_name]["ok"]:
@@ -2606,7 +2819,11 @@ class InfrahubClientSync(BaseClient):
2606
2819
  return None
2607
2820
 
2608
2821
  def repository_update_commit(
2609
- self, branch_name: str, repository_id: str, commit: str, is_read_only: bool = False
2822
+ self,
2823
+ branch_name: str,
2824
+ repository_id: str,
2825
+ commit: str,
2826
+ is_read_only: bool = False,
2610
2827
  ) -> bool:
2611
2828
  raise NotImplementedError(
2612
2829
  "This method is deprecated in the async client and won't be implemented in the sync client."
@@ -2626,10 +2843,21 @@ class InfrahubClientSync(BaseClient):
2626
2843
  base_headers = copy.copy(self.headers or {})
2627
2844
  headers.update(base_headers)
2628
2845
 
2629
- return self._request(url=url, method=HTTPMethod.GET, headers=headers, timeout=timeout or self.default_timeout)
2846
+ return self._request(
2847
+ url=url,
2848
+ method=HTTPMethod.GET,
2849
+ headers=headers,
2850
+ timeout=timeout or self.default_timeout,
2851
+ )
2630
2852
 
2631
2853
  @handle_relogin_sync
2632
- def _post(self, url: str, payload: dict, headers: dict | None = None, timeout: int | None = None) -> httpx.Response:
2854
+ def _post(
2855
+ self,
2856
+ url: str,
2857
+ payload: dict,
2858
+ headers: dict | None = None,
2859
+ timeout: int | None = None,
2860
+ ) -> httpx.Response:
2633
2861
  """Execute a HTTP POST with HTTPX.
2634
2862
 
2635
2863
  Raises:
@@ -2643,24 +2871,39 @@ class InfrahubClientSync(BaseClient):
2643
2871
  headers.update(base_headers)
2644
2872
 
2645
2873
  return self._request(
2646
- url=url, method=HTTPMethod.POST, payload=payload, headers=headers, timeout=timeout or self.default_timeout
2874
+ url=url,
2875
+ method=HTTPMethod.POST,
2876
+ payload=payload,
2877
+ headers=headers,
2878
+ timeout=timeout or self.default_timeout,
2647
2879
  )
2648
2880
 
2649
2881
  def _request(
2650
- self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: dict | None = None
2882
+ self,
2883
+ url: str,
2884
+ method: HTTPMethod,
2885
+ headers: dict[str, Any],
2886
+ timeout: int,
2887
+ payload: dict | None = None,
2651
2888
  ) -> httpx.Response:
2652
2889
  response = self._request_method(url=url, method=method, headers=headers, timeout=timeout, payload=payload)
2653
2890
  self._record(response)
2654
2891
  return response
2655
2892
 
2656
2893
  def _default_request_method(
2657
- self, url: str, method: HTTPMethod, headers: dict[str, Any], timeout: int, payload: dict | None = None
2894
+ self,
2895
+ url: str,
2896
+ method: HTTPMethod,
2897
+ headers: dict[str, Any],
2898
+ timeout: int,
2899
+ payload: dict | None = None,
2658
2900
  ) -> httpx.Response:
2659
2901
  params: dict[str, Any] = {}
2660
2902
  if payload:
2661
2903
  params["json"] = payload
2662
2904
 
2663
- proxy_config: dict[str, str | dict[str, httpx.HTTPTransport]] = {}
2905
+ proxy_config: ProxyConfigSync = {"proxy": None, "mounts": None}
2906
+
2664
2907
  if self.config.proxy:
2665
2908
  proxy_config["proxy"] = self.config.proxy
2666
2909
  elif self.config.proxy_mounts.is_set:
@@ -2670,8 +2913,8 @@ class InfrahubClientSync(BaseClient):
2670
2913
  }
2671
2914
 
2672
2915
  with httpx.Client(
2673
- **proxy_config, # type: ignore[arg-type]
2674
- verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure,
2916
+ **proxy_config,
2917
+ verify=self.config.tls_context,
2675
2918
  ) as client:
2676
2919
  try:
2677
2920
  response = client.request(
@@ -2696,7 +2939,10 @@ class InfrahubClientSync(BaseClient):
2696
2939
  response = self._request(
2697
2940
  url=url,
2698
2941
  method=HTTPMethod.POST,
2699
- headers={"content-type": "application/json", "Authorization": f"Bearer {self.refresh_token}"},
2942
+ headers={
2943
+ "content-type": "application/json",
2944
+ "Authorization": f"Bearer {self.refresh_token}",
2945
+ },
2700
2946
  timeout=self.default_timeout,
2701
2947
  )
2702
2948
 
@@ -2729,7 +2975,10 @@ class InfrahubClientSync(BaseClient):
2729
2975
  response = self._request(
2730
2976
  url=url,
2731
2977
  method=HTTPMethod.POST,
2732
- payload={"username": self.config.username, "password": self.config.password},
2978
+ payload={
2979
+ "username": self.config.username,
2980
+ "password": self.config.password,
2981
+ },
2733
2982
  headers={"content-type": "application/json"},
2734
2983
  timeout=self.default_timeout,
2735
2984
  )
@@ -2753,3 +3002,35 @@ class InfrahubClientSync(BaseClient):
2753
3002
  self.group_context.update_group()
2754
3003
 
2755
3004
  self.mode = InfrahubClientMode.DEFAULT
3005
+
3006
+ def convert_object_type(
3007
+ self,
3008
+ node_id: str,
3009
+ target_kind: str,
3010
+ branch: str | None = None,
3011
+ fields_mapping: dict[str, ConversionFieldInput] | None = None,
3012
+ ) -> InfrahubNodeSync:
3013
+ """
3014
+ Convert a given node to another kind on a given branch. `fields_mapping` keys are target fields names
3015
+ and its values indicate how to fill in these fields. Any mandatory field not having an equivalent field
3016
+ in the source kind should be specified in this mapping. See https://docs.infrahub.app/guides/object-convert-type
3017
+ for more information.
3018
+ """
3019
+
3020
+ if fields_mapping is None:
3021
+ mapping_dict = {}
3022
+ else:
3023
+ mapping_dict = {field_name: model.model_dump(mode="json") for field_name, model in fields_mapping.items()}
3024
+
3025
+ branch_name = branch or self.default_branch
3026
+ response = self.execute_graphql(
3027
+ query=CONVERT_OBJECT_MUTATION,
3028
+ variables={
3029
+ "node_id": node_id,
3030
+ "fields_mapping": mapping_dict,
3031
+ "target_kind": target_kind,
3032
+ },
3033
+ branch_name=branch_name,
3034
+ raise_for_error=True,
3035
+ )
3036
+ return InfrahubNodeSync.from_graphql(client=self, branch=branch_name, data=response["ConvertObjectType"])