infrahub-server 1.5.0b2__py3-none-any.whl → 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. infrahub/api/dependencies.py +4 -13
  2. infrahub/api/transformation.py +22 -20
  3. infrahub/cli/db.py +87 -65
  4. infrahub/cli/upgrade.py +27 -7
  5. infrahub/core/diff/calculator.py +2 -2
  6. infrahub/core/diff/query/delete_query.py +9 -5
  7. infrahub/core/diff/query/merge.py +39 -23
  8. infrahub/core/graph/__init__.py +1 -1
  9. infrahub/core/migrations/graph/__init__.py +5 -3
  10. infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
  11. infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
  12. infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
  13. infrahub/core/migrations/graph/{m041_profile_attrs_in_db.py → m042_profile_attrs_in_db.py} +10 -8
  14. infrahub/core/migrations/graph/{m042_create_hfid_display_label_in_db.py → m043_create_hfid_display_label_in_db.py} +6 -6
  15. infrahub/core/migrations/graph/{m043_backfill_hfid_display_label_in_db.py → m044_backfill_hfid_display_label_in_db.py} +9 -11
  16. infrahub/core/migrations/shared.py +14 -0
  17. infrahub/core/models.py +2 -2
  18. infrahub/core/node/__init__.py +26 -1
  19. infrahub/core/query/diff.py +61 -16
  20. infrahub/core/query/ipam.py +15 -4
  21. infrahub/core/query/node.py +42 -40
  22. infrahub/core/relationship/model.py +10 -5
  23. infrahub/core/schema/definitions/core/check.py +1 -1
  24. infrahub/core/schema/definitions/core/transform.py +1 -1
  25. infrahub/core/schema/schema_branch_display.py +12 -0
  26. infrahub/core/schema/schema_branch_hfid.py +6 -0
  27. infrahub/core/validators/uniqueness/checker.py +2 -1
  28. infrahub/database/__init__.py +0 -13
  29. infrahub/graphql/analyzer.py +9 -0
  30. infrahub/graphql/mutations/branch.py +5 -0
  31. infrahub/graphql/mutations/proposed_change.py +6 -0
  32. infrahub/message_bus/types.py +1 -0
  33. infrahub/profiles/queries/get_profile_data.py +4 -5
  34. infrahub/proposed_change/tasks.py +43 -9
  35. infrahub_sdk/analyzer.py +1 -1
  36. infrahub_sdk/batch.py +2 -2
  37. infrahub_sdk/branch.py +14 -2
  38. infrahub_sdk/checks.py +1 -1
  39. infrahub_sdk/client.py +2 -4
  40. infrahub_sdk/ctl/branch.py +3 -0
  41. infrahub_sdk/ctl/cli_commands.py +2 -0
  42. infrahub_sdk/ctl/exceptions.py +1 -1
  43. infrahub_sdk/ctl/task.py +110 -0
  44. infrahub_sdk/exceptions.py +18 -18
  45. infrahub_sdk/graphql/query.py +2 -2
  46. infrahub_sdk/node/attribute.py +1 -1
  47. infrahub_sdk/node/property.py +1 -1
  48. infrahub_sdk/node/related_node.py +3 -3
  49. infrahub_sdk/node/relationship.py +4 -6
  50. infrahub_sdk/object_store.py +2 -2
  51. infrahub_sdk/operation.py +1 -1
  52. infrahub_sdk/protocols_generator/generator.py +1 -1
  53. infrahub_sdk/pytest_plugin/exceptions.py +9 -9
  54. infrahub_sdk/pytest_plugin/items/base.py +1 -1
  55. infrahub_sdk/pytest_plugin/items/check.py +1 -1
  56. infrahub_sdk/pytest_plugin/items/python_transform.py +1 -1
  57. infrahub_sdk/repository.py +1 -1
  58. infrahub_sdk/schema/__init__.py +1 -1
  59. infrahub_sdk/spec/object.py +7 -3
  60. infrahub_sdk/task/exceptions.py +4 -4
  61. infrahub_sdk/task/manager.py +2 -2
  62. infrahub_sdk/task/models.py +6 -4
  63. infrahub_sdk/timestamp.py +1 -1
  64. infrahub_sdk/transfer/exporter/json.py +1 -1
  65. infrahub_sdk/transfer/importer/json.py +1 -1
  66. infrahub_sdk/transforms.py +1 -1
  67. {infrahub_server-1.5.0b2.dist-info → infrahub_server-1.5.1.dist-info}/METADATA +2 -2
  68. {infrahub_server-1.5.0b2.dist-info → infrahub_server-1.5.1.dist-info}/RECORD +75 -73
  69. infrahub_testcontainers/container.py +31 -5
  70. infrahub_testcontainers/helpers.py +19 -4
  71. infrahub_testcontainers/models.py +8 -6
  72. infrahub_testcontainers/performance_test.py +6 -4
  73. {infrahub_server-1.5.0b2.dist-info → infrahub_server-1.5.1.dist-info}/LICENSE.txt +0 -0
  74. {infrahub_server-1.5.0b2.dist-info → infrahub_server-1.5.1.dist-info}/WHEEL +0 -0
  75. {infrahub_server-1.5.0b2.dist-info → infrahub_server-1.5.1.dist-info}/entry_points.txt +0 -0
@@ -11,6 +11,7 @@ import pytest
11
11
  from infrahub_sdk.exceptions import ModuleImportError, NodeNotFoundError, URLNotFoundError
12
12
  from infrahub_sdk.node import InfrahubNode
13
13
  from infrahub_sdk.protocols import (
14
+ CoreArtifactDefinition,
14
15
  CoreArtifactValidator,
15
16
  CoreGeneratorDefinition,
16
17
  CoreGeneratorValidator,
@@ -44,7 +45,7 @@ from infrahub.core.diff.model.diff import DiffElementType, SchemaConflict
44
45
  from infrahub.core.diff.model.path import NodeDiffFieldSummary
45
46
  from infrahub.core.integrity.object_conflict.conflict_recorder import ObjectConflictValidatorRecorder
46
47
  from infrahub.core.manager import NodeManager
47
- from infrahub.core.protocols import CoreArtifactDefinition, CoreDataCheck, CoreValidator
48
+ from infrahub.core.protocols import CoreDataCheck, CoreValidator
48
49
  from infrahub.core.protocols import CoreProposedChange as InternalCoreProposedChange
49
50
  from infrahub.core.timestamp import Timestamp
50
51
  from infrahub.core.validators.checks_runner import run_checks_and_update_validator
@@ -59,6 +60,8 @@ from infrahub.git.base import extract_repo_file_information
59
60
  from infrahub.git.models import TriggerRepositoryInternalChecks, TriggerRepositoryUserChecks
60
61
  from infrahub.git.repository import InfrahubRepository, get_initialized_repo
61
62
  from infrahub.git.utils import fetch_artifact_definition_targets, fetch_proposed_change_generator_definition_targets
63
+ from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer
64
+ from infrahub.graphql.initialization import prepare_graphql_params
62
65
  from infrahub.log import get_logger
63
66
  from infrahub.message_bus.types import (
64
67
  ProposedChangeArtifactDefinition,
@@ -672,6 +675,27 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
672
675
  repository = model.branch_diff.get_repository(repository_id=model.artifact_definition.repository_id)
673
676
  impacted_artifacts = model.branch_diff.get_subscribers_ids(kind=InfrahubKind.ARTIFACT)
674
677
 
678
+ source_schema_branch = registry.schema.get_schema_branch(name=model.source_branch)
679
+ source_branch = registry.get_branch_from_registry(branch=model.source_branch)
680
+
681
+ graphql_params = await prepare_graphql_params(db=await get_database(), branch=model.source_branch)
682
+ query_analyzer = InfrahubGraphQLQueryAnalyzer(
683
+ query=model.artifact_definition.query_payload,
684
+ branch=source_branch,
685
+ schema_branch=source_schema_branch,
686
+ schema=graphql_params.schema,
687
+ )
688
+
689
+ only_has_unique_targets = query_analyzer.query_report.only_has_unique_targets
690
+ if not only_has_unique_targets:
691
+ log.warning(
692
+ f"Artifact definition {artifact_definition.name.value} query does not guarantee unique targets. All targets will be processed."
693
+ )
694
+
695
+ managed_branch = model.source_branch_sync_with_git and model.branch_diff.has_file_modifications
696
+ if managed_branch:
697
+ log.info("Source branch is synced with Git repositories with updates, all artifacts will be processed")
698
+
675
699
  checks = []
676
700
 
677
701
  for relationship in group.members.peers:
@@ -679,8 +703,9 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
679
703
  artifact_id = artifacts_by_member.get(member.id)
680
704
  if _should_render_artifact(
681
705
  artifact_id=artifact_id,
682
- managed_branch=model.source_branch_sync_with_git,
706
+ managed_branch=managed_branch,
683
707
  impacted_artifacts=impacted_artifacts,
708
+ only_has_unique_targets=only_has_unique_targets,
684
709
  ):
685
710
  log.info(f"Trigger Artifact processing for {member.display_label}")
686
711
 
@@ -726,21 +751,26 @@ async def validate_artifacts_generation(model: RequestArtifactDefinitionCheck, c
726
751
  )
727
752
 
728
753
 
729
- def _should_render_artifact(artifact_id: str | None, managed_branch: bool, impacted_artifacts: list[str]) -> bool: # noqa: ARG001
754
+ def _should_render_artifact(
755
+ artifact_id: str | None,
756
+ managed_branch: bool,
757
+ impacted_artifacts: list[str],
758
+ only_has_unique_targets: bool,
759
+ ) -> bool:
730
760
  """Returns a boolean to indicate if an artifact should be generated or not.
731
761
  Will return true if:
732
762
  * The artifact_id wasn't set which could be that it's a new object that doesn't have a previous artifact
733
- * The source brance is not data only which would indicate that it could contain updates in git to the transform
763
+ * The source branch is not data only which would indicate that it could contain updates in git to the transform
734
764
  * The artifact_id exists in the impacted_artifacts list
765
+ * The query failes the only_has_unique_targets check
735
766
  Will return false if:
736
767
  * The source branch is a data only branch and the artifact_id exists and is not in the impacted list
737
768
  """
738
769
 
739
- # if not artifact_id or managed_branch:
740
- # return True
741
- # return artifact_id in impacted_artifacts
742
- # Temporary workaround tracked in https://github.com/opsmill/infrahub/issues/4991
743
- return True
770
+ if not only_has_unique_targets or not artifact_id or managed_branch:
771
+ return True
772
+
773
+ return artifact_id in impacted_artifacts
744
774
 
745
775
 
746
776
  @flow(
@@ -1262,6 +1292,9 @@ query GatherArtifactDefinitions {
1262
1292
  name {
1263
1293
  value
1264
1294
  }
1295
+ query {
1296
+ value
1297
+ }
1265
1298
  }
1266
1299
  }
1267
1300
  ... on CoreTransformJinja2 {
@@ -1479,6 +1512,7 @@ def _parse_artifact_definitions(definitions: list[dict]) -> list[ProposedChangeA
1479
1512
  query_name=definition["node"]["transformation"]["node"]["query"]["node"]["name"]["value"],
1480
1513
  query_id=definition["node"]["transformation"]["node"]["query"]["node"]["id"],
1481
1514
  query_models=definition["node"]["transformation"]["node"]["query"]["node"]["models"]["value"] or [],
1515
+ query_payload=definition["node"]["transformation"]["node"]["query"]["node"]["query"]["value"],
1482
1516
  repository_id=definition["node"]["transformation"]["node"]["repository"]["node"]["id"],
1483
1517
  transform_kind=definition["node"]["transformation"]["node"]["__typename"],
1484
1518
  )
infrahub_sdk/analyzer.py CHANGED
@@ -30,7 +30,7 @@ class GraphQLOperation(BaseModel):
30
30
 
31
31
 
32
32
  class GraphQLQueryAnalyzer:
33
- def __init__(self, query: str, schema: GraphQLSchema | None = None):
33
+ def __init__(self, query: str, schema: GraphQLSchema | None = None) -> None:
34
34
  self.query: str = query
35
35
  self.schema: GraphQLSchema | None = schema
36
36
  self.document: DocumentNode = parse(self.query)
infrahub_sdk/batch.py CHANGED
@@ -58,7 +58,7 @@ class InfrahubBatch:
58
58
  semaphore: asyncio.Semaphore | None = None,
59
59
  max_concurrent_execution: int = 5,
60
60
  return_exceptions: bool = False,
61
- ):
61
+ ) -> None:
62
62
  self._tasks: list[BatchTask] = []
63
63
  self.semaphore = semaphore or asyncio.Semaphore(value=max_concurrent_execution)
64
64
  self.return_exceptions = return_exceptions
@@ -90,7 +90,7 @@ class InfrahubBatch:
90
90
 
91
91
 
92
92
  class InfrahubBatchSync:
93
- def __init__(self, max_concurrent_execution: int = 5, return_exceptions: bool = False):
93
+ def __init__(self, max_concurrent_execution: int = 5, return_exceptions: bool = False) -> None:
94
94
  self._tasks: list[BatchTaskSync] = []
95
95
  self.max_concurrent_execution = max_concurrent_execution
96
96
  self.return_exceptions = return_exceptions
infrahub_sdk/branch.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import warnings
4
+ from enum import Enum
4
5
  from typing import TYPE_CHECKING, Any, Literal, overload
5
6
  from urllib.parse import urlencode
6
7
 
@@ -14,6 +15,13 @@ if TYPE_CHECKING:
14
15
  from .client import InfrahubClient, InfrahubClientSync
15
16
 
16
17
 
18
+ class BranchStatus(str, Enum):
19
+ OPEN = "OPEN"
20
+ NEED_REBASE = "NEED_REBASE"
21
+ NEED_UPGRADE_REBASE = "NEED_UPGRADE_REBASE"
22
+ DELETING = "DELETING"
23
+
24
+
17
25
  class BranchData(BaseModel):
18
26
  id: str
19
27
  name: str
@@ -21,6 +29,8 @@ class BranchData(BaseModel):
21
29
  sync_with_git: bool
22
30
  is_default: bool
23
31
  has_schema_changes: bool
32
+ graph_version: int | None = None
33
+ status: BranchStatus = BranchStatus.OPEN
24
34
  origin_branch: str | None = None
25
35
  branched_from: str
26
36
 
@@ -34,6 +44,8 @@ BRANCH_DATA = {
34
44
  "is_default": None,
35
45
  "sync_with_git": None,
36
46
  "has_schema_changes": None,
47
+ "graph_version": None,
48
+ "status": None,
37
49
  }
38
50
 
39
51
  BRANCH_DATA_FILTER = {"@filters": {"name": "$branch_name"}}
@@ -71,7 +83,7 @@ class InfraHubBranchManagerBase:
71
83
 
72
84
 
73
85
  class InfrahubBranchManager(InfraHubBranchManagerBase):
74
- def __init__(self, client: InfrahubClient):
86
+ def __init__(self, client: InfrahubClient) -> None:
75
87
  self.client = client
76
88
 
77
89
  @overload
@@ -221,7 +233,7 @@ class InfrahubBranchManager(InfraHubBranchManagerBase):
221
233
 
222
234
 
223
235
  class InfrahubBranchManagerSync(InfraHubBranchManagerBase):
224
- def __init__(self, client: InfrahubClientSync):
236
+ def __init__(self, client: InfrahubClientSync) -> None:
225
237
  self.client = client
226
238
 
227
239
  def all(self) -> dict[str, BranchData]:
infrahub_sdk/checks.py CHANGED
@@ -43,7 +43,7 @@ class InfrahubCheck:
43
43
  initializer: InfrahubCheckInitializer | None = None,
44
44
  params: dict | None = None,
45
45
  client: InfrahubClient | None = None,
46
- ):
46
+ ) -> None:
47
47
  self.git: GitRepoManager | None = None
48
48
  self.initializer = initializer or InfrahubCheckInitializer()
49
49
 
infrahub_sdk/client.py CHANGED
@@ -140,7 +140,7 @@ class BaseClient:
140
140
  self,
141
141
  address: str = "",
142
142
  config: Config | dict[str, Any] | None = None,
143
- ):
143
+ ) -> None:
144
144
  self.client = None
145
145
  self.headers = {"content-type": "application/json"}
146
146
  self.access_token: str = ""
@@ -327,7 +327,6 @@ class InfrahubClient(BaseClient):
327
327
  self.object_store = ObjectStore(self)
328
328
  self.store = NodeStore(default_branch=self.default_branch)
329
329
  self.task = InfrahubTaskManager(self)
330
- self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution)
331
330
  self._request_method: AsyncRequester = self.config.requester or self._default_request_method
332
331
  self.group_context = InfrahubGroupContext(self)
333
332
 
@@ -1583,8 +1582,7 @@ class InfrahubClient(BaseClient):
1583
1582
 
1584
1583
  async def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch:
1585
1584
  return InfrahubBatch(
1586
- semaphore=self.concurrent_execution_limit,
1587
- return_exceptions=return_exceptions,
1585
+ max_concurrent_execution=self.max_concurrent_execution, return_exceptions=return_exceptions
1588
1586
  )
1589
1587
 
1590
1588
  async def get_list_repositories(
@@ -46,6 +46,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
46
46
  table.add_column("Sync with Git")
47
47
  table.add_column("Has Schema Changes")
48
48
  table.add_column("Is Default")
49
+ table.add_column("Status")
49
50
 
50
51
  # identify the default branch and always print it first
51
52
  default_branch = [branch for branch in branches.values() if branch.is_default][0]
@@ -57,6 +58,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
57
58
  "[green]True" if default_branch.sync_with_git else "[#FF7F50]False",
58
59
  "[green]True" if default_branch.has_schema_changes else "[#FF7F50]False",
59
60
  "[green]True" if default_branch.is_default else "[#FF7F50]False",
61
+ default_branch.status,
60
62
  )
61
63
 
62
64
  for branch in branches.values():
@@ -71,6 +73,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
71
73
  "[green]True" if branch.sync_with_git else "[#FF7F50]False",
72
74
  "[green]True" if default_branch.has_schema_changes else "[#FF7F50]False",
73
75
  "[green]True" if branch.is_default else "[#FF7F50]False",
76
+ branch.status,
74
77
  )
75
78
 
76
79
  console.print(table)
@@ -32,6 +32,7 @@ from ..ctl.render import list_jinja2_transforms, print_template_errors
32
32
  from ..ctl.repository import app as repository_app
33
33
  from ..ctl.repository import find_repository_config_file, get_repository_config
34
34
  from ..ctl.schema import app as schema_app
35
+ from ..ctl.task import app as task_app
35
36
  from ..ctl.transform import list_transforms
36
37
  from ..ctl.utils import (
37
38
  catch_exception,
@@ -64,6 +65,7 @@ app.add_typer(repository_app, name="repository")
64
65
  app.add_typer(menu_app, name="menu")
65
66
  app.add_typer(object_app, name="object")
66
67
  app.add_typer(graphql_app, name="graphql")
68
+ app.add_typer(task_app, name="task")
67
69
 
68
70
  app.command(name="dump")(dump)
69
71
  app.command(name="load")(load)
@@ -3,6 +3,6 @@ class Error(Exception):
3
3
 
4
4
 
5
5
  class QueryNotFoundError(Error):
6
- def __init__(self, name: str, message: str = ""):
6
+ def __init__(self, name: str, message: str = "") -> None:
7
7
  self.message = message or f"The requested query '{name}' was not found."
8
8
  super().__init__(self.message)
@@ -0,0 +1,110 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Optional
4
+
5
+ import typer
6
+ from rich.console import Console
7
+ from rich.table import Table
8
+
9
+ from ..async_typer import AsyncTyper
10
+ from ..task.manager import TaskFilter
11
+ from ..task.models import Task, TaskState
12
+ from .client import initialize_client
13
+ from .parameters import CONFIG_PARAM
14
+ from .utils import catch_exception, init_logging
15
+
16
+ app = AsyncTyper()
17
+ console = Console()
18
+
19
+
20
+ @app.callback()
21
+ def callback() -> None:
22
+ """Manage Infrahub tasks."""
23
+
24
+
25
+ def _parse_states(states: list[str] | None) -> list[TaskState] | None:
26
+ if not states:
27
+ return None
28
+
29
+ parsed_states: list[TaskState] = []
30
+ for state in states:
31
+ normalized_state = state.strip().upper()
32
+ try:
33
+ parsed_states.append(TaskState(normalized_state))
34
+ except ValueError as exc: # pragma: no cover - typer will surface this as CLI error
35
+ raise typer.BadParameter(
36
+ f"Unsupported state '{state}'. Available states: {', '.join(item.value.lower() for item in TaskState)}"
37
+ ) from exc
38
+
39
+ return parsed_states
40
+
41
+
42
+ def _render_table(tasks: list[Task]) -> None:
43
+ table = Table(title="Infrahub Tasks", box=None)
44
+ table.add_column("ID", style="cyan", overflow="fold")
45
+ table.add_column("Title", style="magenta", overflow="fold")
46
+ table.add_column("State", style="green")
47
+ table.add_column("Progress", justify="right")
48
+ table.add_column("Workflow", overflow="fold")
49
+ table.add_column("Branch", overflow="fold")
50
+ table.add_column("Updated")
51
+
52
+ if not tasks:
53
+ table.add_row("-", "No tasks found", "-", "-", "-", "-", "-")
54
+ console.print(table)
55
+ return
56
+
57
+ for task in tasks:
58
+ progress = f"{task.progress:.0%}" if task.progress is not None else "-"
59
+ table.add_row(
60
+ task.id,
61
+ task.title,
62
+ task.state.value,
63
+ progress,
64
+ task.workflow or "-",
65
+ task.branch or "-",
66
+ task.updated_at.isoformat(),
67
+ )
68
+
69
+ console.print(table)
70
+
71
+
72
+ @app.command(name="list")
73
+ @catch_exception(console=console)
74
+ async def list_tasks(
75
+ state: list[str] = typer.Option(
76
+ None, "--state", "-s", help="Filter by task state. Can be provided multiple times."
77
+ ),
78
+ limit: Optional[int] = typer.Option(None, help="Maximum number of tasks to retrieve."),
79
+ offset: Optional[int] = typer.Option(None, help="Offset for pagination."),
80
+ include_related_nodes: bool = typer.Option(False, help="Include related nodes in the output."),
81
+ include_logs: bool = typer.Option(False, help="Include task logs in the output."),
82
+ json_output: bool = typer.Option(False, "--json", help="Output the result as JSON."),
83
+ debug: bool = False,
84
+ _: str = CONFIG_PARAM,
85
+ ) -> None:
86
+ """List Infrahub tasks."""
87
+
88
+ init_logging(debug=debug)
89
+
90
+ client = initialize_client()
91
+ filters = TaskFilter()
92
+ parsed_states = _parse_states(state)
93
+ if parsed_states:
94
+ filters.state = parsed_states
95
+
96
+ tasks = await client.task.filter(
97
+ filter=filters,
98
+ limit=limit,
99
+ offset=offset,
100
+ include_related_nodes=include_related_nodes,
101
+ include_logs=include_logs,
102
+ )
103
+
104
+ if json_output:
105
+ console.print_json(
106
+ data=[task.model_dump(mode="json") for task in tasks], indent=2, sort_keys=True, highlight=False
107
+ )
108
+ return
109
+
110
+ _render_table(tasks)
@@ -5,13 +5,13 @@ from typing import Any
5
5
 
6
6
 
7
7
  class Error(Exception):
8
- def __init__(self, message: str | None = None):
8
+ def __init__(self, message: str | None = None) -> None:
9
9
  self.message = message
10
10
  super().__init__(self.message)
11
11
 
12
12
 
13
13
  class JsonDecodeError(Error):
14
- def __init__(self, message: str | None = None, content: str | None = None, url: str | None = None):
14
+ def __init__(self, message: str | None = None, content: str | None = None, url: str | None = None) -> None:
15
15
  self.message = message
16
16
  self.content = content
17
17
  self.url = url
@@ -23,14 +23,14 @@ class JsonDecodeError(Error):
23
23
 
24
24
 
25
25
  class ServerNotReachableError(Error):
26
- def __init__(self, address: str, message: str | None = None):
26
+ def __init__(self, address: str, message: str | None = None) -> None:
27
27
  self.address = address
28
28
  self.message = message or f"Unable to connect to '{address}'."
29
29
  super().__init__(self.message)
30
30
 
31
31
 
32
32
  class ServerNotResponsiveError(Error):
33
- def __init__(self, url: str, timeout: int | None = None, message: str | None = None):
33
+ def __init__(self, url: str, timeout: int | None = None, message: str | None = None) -> None:
34
34
  self.url = url
35
35
  self.timeout = timeout
36
36
  self.message = message or f"Unable to read from '{url}'."
@@ -40,7 +40,7 @@ class ServerNotResponsiveError(Error):
40
40
 
41
41
 
42
42
  class GraphQLError(Error):
43
- def __init__(self, errors: list[dict[str, Any]], query: str | None = None, variables: dict | None = None):
43
+ def __init__(self, errors: list[dict[str, Any]], query: str | None = None, variables: dict | None = None) -> None:
44
44
  self.query = query
45
45
  self.variables = variables
46
46
  self.errors = errors
@@ -49,21 +49,21 @@ class GraphQLError(Error):
49
49
 
50
50
 
51
51
  class BranchNotFoundError(Error):
52
- def __init__(self, identifier: str, message: str | None = None):
52
+ def __init__(self, identifier: str, message: str | None = None) -> None:
53
53
  self.identifier = identifier
54
54
  self.message = message or f"Unable to find the branch '{identifier}' in the Database."
55
55
  super().__init__(self.message)
56
56
 
57
57
 
58
58
  class SchemaNotFoundError(Error):
59
- def __init__(self, identifier: str, message: str | None = None):
59
+ def __init__(self, identifier: str, message: str | None = None) -> None:
60
60
  self.identifier = identifier
61
61
  self.message = message or f"Unable to find the schema '{identifier}'."
62
62
  super().__init__(self.message)
63
63
 
64
64
 
65
65
  class ModuleImportError(Error):
66
- def __init__(self, message: str | None = None):
66
+ def __init__(self, message: str | None = None) -> None:
67
67
  self.message = message or "Unable to import the module"
68
68
  super().__init__(self.message)
69
69
 
@@ -75,7 +75,7 @@ class NodeNotFoundError(Error):
75
75
  message: str = "Unable to find the node in the database.",
76
76
  branch_name: str | None = None,
77
77
  node_type: str | None = None,
78
- ):
78
+ ) -> None:
79
79
  self.node_type = node_type or "unknown"
80
80
  self.identifier = identifier
81
81
  self.branch_name = branch_name
@@ -97,25 +97,25 @@ class NodeInvalidError(NodeNotFoundError):
97
97
  class ResourceNotDefinedError(Error):
98
98
  """Raised when trying to access a resource that hasn't been defined."""
99
99
 
100
- def __init__(self, message: str | None = None):
100
+ def __init__(self, message: str | None = None) -> None:
101
101
  self.message = message or "The requested resource was not found"
102
102
  super().__init__(self.message)
103
103
 
104
104
 
105
105
  class InfrahubCheckNotFoundError(Error):
106
- def __init__(self, name: str, message: str | None = None):
106
+ def __init__(self, name: str, message: str | None = None) -> None:
107
107
  self.message = message or f"The requested InfrahubCheck '{name}' was not found."
108
108
  super().__init__(self.message)
109
109
 
110
110
 
111
111
  class InfrahubTransformNotFoundError(Error):
112
- def __init__(self, name: str, message: str | None = None):
112
+ def __init__(self, name: str, message: str | None = None) -> None:
113
113
  self.message = message or f"The requested InfrahubTransform '{name}' was not found."
114
114
  super().__init__(self.message)
115
115
 
116
116
 
117
117
  class ValidationError(Error):
118
- def __init__(self, identifier: str, message: str | None = None, messages: list[str] | None = None):
118
+ def __init__(self, identifier: str, message: str | None = None, messages: list[str] | None = None) -> None:
119
119
  self.identifier = identifier
120
120
  self.message = message
121
121
  self.messages = messages
@@ -130,7 +130,7 @@ class ValidationError(Error):
130
130
 
131
131
 
132
132
  class ObjectValidationError(Error):
133
- def __init__(self, position: list[int | str], message: str):
133
+ def __init__(self, position: list[int | str], message: str) -> None:
134
134
  self.position = position
135
135
  self.message = message
136
136
  super().__init__(self.message)
@@ -140,13 +140,13 @@ class ObjectValidationError(Error):
140
140
 
141
141
 
142
142
  class AuthenticationError(Error):
143
- def __init__(self, message: str | None = None):
143
+ def __init__(self, message: str | None = None) -> None:
144
144
  self.message = message or "Authentication Error, unable to execute the query."
145
145
  super().__init__(self.message)
146
146
 
147
147
 
148
148
  class URLNotFoundError(Error):
149
- def __init__(self, url: str):
149
+ def __init__(self, url: str) -> None:
150
150
  self.message = f"`{url}` not found."
151
151
  super().__init__(self.message)
152
152
 
@@ -164,12 +164,12 @@ class InvalidResponseError(Error):
164
164
 
165
165
 
166
166
  class FileNotValidError(Error):
167
- def __init__(self, name: str, message: str = ""):
167
+ def __init__(self, name: str, message: str = "") -> None:
168
168
  self.message = message or f"Cannot parse '{name}' content."
169
169
  super().__init__(self.message)
170
170
 
171
171
 
172
172
  class TimestampFormatError(Error):
173
- def __init__(self, message: str | None = None):
173
+ def __init__(self, message: str | None = None) -> None:
174
174
  self.message = message or "Invalid timestamp format"
175
175
  super().__init__(self.message)
@@ -9,7 +9,7 @@ class BaseGraphQLQuery:
9
9
  query_type: str = "not-defined"
10
10
  indentation: int = 4
11
11
 
12
- def __init__(self, query: dict, variables: dict | None = None, name: str | None = None):
12
+ def __init__(self, query: dict, variables: dict | None = None, name: str | None = None) -> None:
13
13
  self.query = query
14
14
  self.variables = variables
15
15
  self.name = name or ""
@@ -46,7 +46,7 @@ class Query(BaseGraphQLQuery):
46
46
  class Mutation(BaseGraphQLQuery):
47
47
  query_type = "mutation"
48
48
 
49
- def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any):
49
+ def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any) -> None:
50
50
  self.input_data = input_data
51
51
  self.mutation = mutation
52
52
  super().__init__(*args, **kwargs)
@@ -15,7 +15,7 @@ if TYPE_CHECKING:
15
15
  class Attribute:
16
16
  """Represents an attribute of a Node, including its schema, value, and properties."""
17
17
 
18
- def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict):
18
+ def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict) -> None:
19
19
  """
20
20
  Args:
21
21
  name (str): The name of the attribute.
@@ -4,7 +4,7 @@ from __future__ import annotations
4
4
  class NodeProperty:
5
5
  """Represents a property of a node, typically used for metadata like display labels."""
6
6
 
7
- def __init__(self, data: dict | str):
7
+ def __init__(self, data: dict | str) -> None:
8
8
  """
9
9
  Args:
10
10
  data (Union[dict, str]): Data representing the node property.
@@ -17,7 +17,7 @@ if TYPE_CHECKING:
17
17
  class RelatedNodeBase:
18
18
  """Base class for representing a related node in a relationship."""
19
19
 
20
- def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, name: str | None = None):
20
+ def __init__(self, branch: str, schema: RelationshipSchemaAPI, data: Any | dict, name: str | None = None) -> None:
21
21
  """
22
22
  Args:
23
23
  branch (str): The branch where the related node resides.
@@ -189,7 +189,7 @@ class RelatedNode(RelatedNodeBase):
189
189
  schema: RelationshipSchemaAPI,
190
190
  data: Any | dict,
191
191
  name: str | None = None,
192
- ):
192
+ ) -> None:
193
193
  """
194
194
  Args:
195
195
  client (InfrahubClient): The client used to interact with the backend asynchronously.
@@ -236,7 +236,7 @@ class RelatedNodeSync(RelatedNodeBase):
236
236
  schema: RelationshipSchemaAPI,
237
237
  data: Any | dict,
238
238
  name: str | None = None,
239
- ):
239
+ ) -> None:
240
240
  """
241
241
  Args:
242
242
  client (InfrahubClientSync): The client used to interact with the backend synchronously.
@@ -4,7 +4,6 @@ from collections import defaultdict
4
4
  from collections.abc import Iterable
5
5
  from typing import TYPE_CHECKING, Any
6
6
 
7
- from ..batch import InfrahubBatch
8
7
  from ..exceptions import (
9
8
  Error,
10
9
  UninitializedError,
@@ -22,7 +21,7 @@ if TYPE_CHECKING:
22
21
  class RelationshipManagerBase:
23
22
  """Base class for RelationshipManager and RelationshipManagerSync"""
24
23
 
25
- def __init__(self, name: str, branch: str, schema: RelationshipSchemaAPI):
24
+ def __init__(self, name: str, branch: str, schema: RelationshipSchemaAPI) -> None:
26
25
  """
27
26
  Args:
28
27
  name (str): The name of the relationship.
@@ -108,7 +107,7 @@ class RelationshipManager(RelationshipManagerBase):
108
107
  branch: str,
109
108
  schema: RelationshipSchemaAPI,
110
109
  data: Any | dict,
111
- ):
110
+ ) -> None:
112
111
  """
113
112
  Args:
114
113
  name (str): The name of the relationship.
@@ -166,7 +165,7 @@ class RelationshipManager(RelationshipManagerBase):
166
165
  raise Error("Unable to fetch the peer, id and/or typename are not defined")
167
166
  ids_per_kind_map[peer.typename].append(peer.id)
168
167
 
169
- batch = InfrahubBatch(max_concurrent_execution=self.client.max_concurrent_execution)
168
+ batch = await self.client.create_batch()
170
169
  for kind, ids in ids_per_kind_map.items():
171
170
  batch.add(
172
171
  task=self.client.filters,
@@ -231,7 +230,7 @@ class RelationshipManagerSync(RelationshipManagerBase):
231
230
  branch: str,
232
231
  schema: RelationshipSchemaAPI,
233
232
  data: Any | dict,
234
- ):
233
+ ) -> None:
235
234
  """
236
235
  Args:
237
236
  name (str): The name of the relationship.
@@ -289,7 +288,6 @@ class RelationshipManagerSync(RelationshipManagerBase):
289
288
  raise Error("Unable to fetch the peer, id and/or typename are not defined")
290
289
  ids_per_kind_map[peer.typename].append(peer.id)
291
290
 
292
- # Unlike Async, no need to create a new batch from scratch because we are not using a semaphore
293
291
  batch = self.client.create_batch()
294
292
  for kind, ids in ids_per_kind_map.items():
295
293
  batch.add(