infrahub-server 1.5.0b1__py3-none-any.whl → 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (171) hide show
  1. infrahub/api/dependencies.py +4 -13
  2. infrahub/api/internal.py +2 -0
  3. infrahub/api/oauth2.py +13 -19
  4. infrahub/api/oidc.py +15 -21
  5. infrahub/api/schema.py +24 -3
  6. infrahub/api/transformation.py +22 -20
  7. infrahub/artifacts/models.py +2 -1
  8. infrahub/auth.py +137 -3
  9. infrahub/cli/__init__.py +2 -0
  10. infrahub/cli/db.py +158 -155
  11. infrahub/cli/dev.py +118 -0
  12. infrahub/cli/tasks.py +46 -0
  13. infrahub/cli/upgrade.py +56 -9
  14. infrahub/computed_attribute/tasks.py +20 -8
  15. infrahub/core/attribute.py +10 -2
  16. infrahub/core/branch/enums.py +1 -1
  17. infrahub/core/branch/models.py +7 -3
  18. infrahub/core/branch/tasks.py +68 -7
  19. infrahub/core/constants/__init__.py +3 -0
  20. infrahub/core/diff/calculator.py +2 -2
  21. infrahub/core/diff/query/artifact.py +1 -0
  22. infrahub/core/diff/query/delete_query.py +9 -5
  23. infrahub/core/diff/query/field_summary.py +1 -0
  24. infrahub/core/diff/query/merge.py +39 -23
  25. infrahub/core/graph/__init__.py +1 -1
  26. infrahub/core/initialization.py +5 -2
  27. infrahub/core/migrations/__init__.py +3 -0
  28. infrahub/core/migrations/exceptions.py +4 -0
  29. infrahub/core/migrations/graph/__init__.py +12 -13
  30. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  31. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  32. infrahub/core/migrations/graph/m037_index_attr_vals.py +11 -30
  33. infrahub/core/migrations/graph/m039_ipam_reconcile.py +9 -7
  34. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  35. infrahub/core/migrations/graph/m041_deleted_dup_edges.py +149 -0
  36. infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +147 -0
  37. infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +164 -0
  38. infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +864 -0
  39. infrahub/core/migrations/query/__init__.py +7 -8
  40. infrahub/core/migrations/query/attribute_add.py +8 -6
  41. infrahub/core/migrations/query/attribute_remove.py +134 -0
  42. infrahub/core/migrations/runner.py +54 -0
  43. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  44. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  45. infrahub/core/migrations/schema/node_attribute_add.py +30 -2
  46. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  47. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  48. infrahub/core/migrations/schema/node_remove.py +2 -1
  49. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  50. infrahub/core/migrations/shared.py +62 -14
  51. infrahub/core/models.py +2 -2
  52. infrahub/core/node/__init__.py +42 -12
  53. infrahub/core/node/create.py +46 -63
  54. infrahub/core/node/lock_utils.py +70 -44
  55. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  56. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  57. infrahub/core/node/resource_manager/number_pool.py +2 -1
  58. infrahub/core/query/attribute.py +55 -0
  59. infrahub/core/query/diff.py +61 -16
  60. infrahub/core/query/ipam.py +16 -4
  61. infrahub/core/query/node.py +51 -43
  62. infrahub/core/query/relationship.py +1 -0
  63. infrahub/core/relationship/model.py +10 -5
  64. infrahub/core/schema/__init__.py +56 -0
  65. infrahub/core/schema/attribute_schema.py +4 -0
  66. infrahub/core/schema/definitions/core/check.py +1 -1
  67. infrahub/core/schema/definitions/core/transform.py +1 -1
  68. infrahub/core/schema/definitions/internal.py +2 -2
  69. infrahub/core/schema/generated/attribute_schema.py +2 -2
  70. infrahub/core/schema/manager.py +22 -1
  71. infrahub/core/schema/schema_branch.py +180 -22
  72. infrahub/core/schema/schema_branch_display.py +12 -0
  73. infrahub/core/schema/schema_branch_hfid.py +6 -0
  74. infrahub/core/validators/uniqueness/checker.py +2 -1
  75. infrahub/database/__init__.py +0 -13
  76. infrahub/database/graph.py +21 -0
  77. infrahub/display_labels/tasks.py +13 -7
  78. infrahub/events/branch_action.py +27 -1
  79. infrahub/generators/tasks.py +3 -7
  80. infrahub/git/base.py +4 -1
  81. infrahub/git/integrator.py +1 -1
  82. infrahub/git/models.py +2 -1
  83. infrahub/git/repository.py +22 -5
  84. infrahub/git/tasks.py +66 -10
  85. infrahub/git/utils.py +123 -1
  86. infrahub/graphql/analyzer.py +9 -0
  87. infrahub/graphql/api/endpoints.py +14 -4
  88. infrahub/graphql/manager.py +4 -9
  89. infrahub/graphql/mutations/branch.py +5 -0
  90. infrahub/graphql/mutations/convert_object_type.py +11 -1
  91. infrahub/graphql/mutations/display_label.py +17 -10
  92. infrahub/graphql/mutations/hfid.py +17 -10
  93. infrahub/graphql/mutations/ipam.py +54 -35
  94. infrahub/graphql/mutations/main.py +27 -28
  95. infrahub/graphql/mutations/proposed_change.py +6 -0
  96. infrahub/graphql/schema_sort.py +170 -0
  97. infrahub/graphql/types/branch.py +4 -1
  98. infrahub/graphql/types/enums.py +3 -0
  99. infrahub/hfid/tasks.py +13 -7
  100. infrahub/lock.py +52 -12
  101. infrahub/message_bus/types.py +3 -1
  102. infrahub/permissions/constants.py +2 -0
  103. infrahub/profiles/queries/get_profile_data.py +4 -5
  104. infrahub/proposed_change/tasks.py +66 -23
  105. infrahub/server.py +6 -2
  106. infrahub/services/__init__.py +2 -2
  107. infrahub/services/adapters/http/__init__.py +5 -0
  108. infrahub/services/adapters/workflow/worker.py +14 -3
  109. infrahub/task_manager/event.py +5 -0
  110. infrahub/task_manager/models.py +7 -0
  111. infrahub/task_manager/task.py +73 -0
  112. infrahub/trigger/setup.py +13 -4
  113. infrahub/trigger/tasks.py +3 -0
  114. infrahub/workers/dependencies.py +10 -1
  115. infrahub/workers/infrahub_async.py +10 -2
  116. infrahub/workflows/catalogue.py +8 -0
  117. infrahub/workflows/initialization.py +5 -0
  118. infrahub/workflows/utils.py +2 -1
  119. infrahub_sdk/analyzer.py +1 -1
  120. infrahub_sdk/batch.py +2 -2
  121. infrahub_sdk/branch.py +14 -2
  122. infrahub_sdk/checks.py +1 -1
  123. infrahub_sdk/client.py +15 -14
  124. infrahub_sdk/config.py +29 -2
  125. infrahub_sdk/ctl/branch.py +3 -0
  126. infrahub_sdk/ctl/cli_commands.py +2 -0
  127. infrahub_sdk/ctl/exceptions.py +1 -1
  128. infrahub_sdk/ctl/schema.py +22 -7
  129. infrahub_sdk/ctl/task.py +110 -0
  130. infrahub_sdk/exceptions.py +18 -18
  131. infrahub_sdk/graphql/query.py +2 -2
  132. infrahub_sdk/node/attribute.py +1 -1
  133. infrahub_sdk/node/property.py +1 -1
  134. infrahub_sdk/node/related_node.py +3 -3
  135. infrahub_sdk/node/relationship.py +4 -6
  136. infrahub_sdk/object_store.py +2 -2
  137. infrahub_sdk/operation.py +1 -1
  138. infrahub_sdk/protocols_generator/generator.py +1 -1
  139. infrahub_sdk/pytest_plugin/exceptions.py +9 -9
  140. infrahub_sdk/pytest_plugin/items/base.py +1 -1
  141. infrahub_sdk/pytest_plugin/items/check.py +1 -1
  142. infrahub_sdk/pytest_plugin/items/python_transform.py +1 -1
  143. infrahub_sdk/repository.py +1 -1
  144. infrahub_sdk/schema/__init__.py +33 -5
  145. infrahub_sdk/spec/models.py +7 -0
  146. infrahub_sdk/spec/object.py +41 -102
  147. infrahub_sdk/spec/processors/__init__.py +0 -0
  148. infrahub_sdk/spec/processors/data_processor.py +10 -0
  149. infrahub_sdk/spec/processors/factory.py +34 -0
  150. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  151. infrahub_sdk/task/exceptions.py +4 -4
  152. infrahub_sdk/task/manager.py +2 -2
  153. infrahub_sdk/task/models.py +6 -4
  154. infrahub_sdk/timestamp.py +1 -1
  155. infrahub_sdk/transfer/exporter/json.py +1 -1
  156. infrahub_sdk/transfer/importer/json.py +1 -1
  157. infrahub_sdk/transforms.py +1 -1
  158. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.1.dist-info}/METADATA +4 -2
  159. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.1.dist-info}/RECORD +168 -152
  160. infrahub_testcontainers/container.py +144 -6
  161. infrahub_testcontainers/docker-compose-cluster.test.yml +5 -0
  162. infrahub_testcontainers/docker-compose.test.yml +5 -0
  163. infrahub_testcontainers/helpers.py +19 -4
  164. infrahub_testcontainers/models.py +8 -6
  165. infrahub_testcontainers/performance_test.py +6 -4
  166. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  167. infrahub/core/migrations/graph/m041_create_hfid_display_label_in_db.py +0 -97
  168. infrahub/core/migrations/graph/m042_backfill_hfid_display_label_in_db.py +0 -86
  169. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.1.dist-info}/LICENSE.txt +0 -0
  170. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.1.dist-info}/WHEEL +0 -0
  171. {infrahub_server-1.5.0b1.dist-info → infrahub_server-1.5.1.dist-info}/entry_points.txt +0 -0
infrahub_sdk/batch.py CHANGED
@@ -58,7 +58,7 @@ class InfrahubBatch:
58
58
  semaphore: asyncio.Semaphore | None = None,
59
59
  max_concurrent_execution: int = 5,
60
60
  return_exceptions: bool = False,
61
- ):
61
+ ) -> None:
62
62
  self._tasks: list[BatchTask] = []
63
63
  self.semaphore = semaphore or asyncio.Semaphore(value=max_concurrent_execution)
64
64
  self.return_exceptions = return_exceptions
@@ -90,7 +90,7 @@ class InfrahubBatch:
90
90
 
91
91
 
92
92
  class InfrahubBatchSync:
93
- def __init__(self, max_concurrent_execution: int = 5, return_exceptions: bool = False):
93
+ def __init__(self, max_concurrent_execution: int = 5, return_exceptions: bool = False) -> None:
94
94
  self._tasks: list[BatchTaskSync] = []
95
95
  self.max_concurrent_execution = max_concurrent_execution
96
96
  self.return_exceptions = return_exceptions
infrahub_sdk/branch.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import warnings
4
+ from enum import Enum
4
5
  from typing import TYPE_CHECKING, Any, Literal, overload
5
6
  from urllib.parse import urlencode
6
7
 
@@ -14,6 +15,13 @@ if TYPE_CHECKING:
14
15
  from .client import InfrahubClient, InfrahubClientSync
15
16
 
16
17
 
18
+ class BranchStatus(str, Enum):
19
+ OPEN = "OPEN"
20
+ NEED_REBASE = "NEED_REBASE"
21
+ NEED_UPGRADE_REBASE = "NEED_UPGRADE_REBASE"
22
+ DELETING = "DELETING"
23
+
24
+
17
25
  class BranchData(BaseModel):
18
26
  id: str
19
27
  name: str
@@ -21,6 +29,8 @@ class BranchData(BaseModel):
21
29
  sync_with_git: bool
22
30
  is_default: bool
23
31
  has_schema_changes: bool
32
+ graph_version: int | None = None
33
+ status: BranchStatus = BranchStatus.OPEN
24
34
  origin_branch: str | None = None
25
35
  branched_from: str
26
36
 
@@ -34,6 +44,8 @@ BRANCH_DATA = {
34
44
  "is_default": None,
35
45
  "sync_with_git": None,
36
46
  "has_schema_changes": None,
47
+ "graph_version": None,
48
+ "status": None,
37
49
  }
38
50
 
39
51
  BRANCH_DATA_FILTER = {"@filters": {"name": "$branch_name"}}
@@ -71,7 +83,7 @@ class InfraHubBranchManagerBase:
71
83
 
72
84
 
73
85
  class InfrahubBranchManager(InfraHubBranchManagerBase):
74
- def __init__(self, client: InfrahubClient):
86
+ def __init__(self, client: InfrahubClient) -> None:
75
87
  self.client = client
76
88
 
77
89
  @overload
@@ -221,7 +233,7 @@ class InfrahubBranchManager(InfraHubBranchManagerBase):
221
233
 
222
234
 
223
235
  class InfrahubBranchManagerSync(InfraHubBranchManagerBase):
224
- def __init__(self, client: InfrahubClientSync):
236
+ def __init__(self, client: InfrahubClientSync) -> None:
225
237
  self.client = client
226
238
 
227
239
  def all(self) -> dict[str, BranchData]:
infrahub_sdk/checks.py CHANGED
@@ -43,7 +43,7 @@ class InfrahubCheck:
43
43
  initializer: InfrahubCheckInitializer | None = None,
44
44
  params: dict | None = None,
45
45
  client: InfrahubClient | None = None,
46
- ):
46
+ ) -> None:
47
47
  self.git: GitRepoManager | None = None
48
48
  self.initializer = initializer or InfrahubCheckInitializer()
49
49
 
infrahub_sdk/client.py CHANGED
@@ -94,7 +94,9 @@ class ProcessRelationsNodeSync(TypedDict):
94
94
  related_nodes: list[InfrahubNodeSync]
95
95
 
96
96
 
97
- def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): # type: ignore[no-untyped-def]
97
+ def handle_relogin(
98
+ func: Callable[..., Coroutine[Any, Any, httpx.Response]],
99
+ ) -> Callable[..., Coroutine[Any, Any, httpx.Response]]:
98
100
  @wraps(func)
99
101
  async def wrapper(client: InfrahubClient, *args: Any, **kwargs: Any) -> httpx.Response:
100
102
  response = await func(client, *args, **kwargs)
@@ -108,7 +110,7 @@ def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): #
108
110
  return wrapper
109
111
 
110
112
 
111
- def handle_relogin_sync(func: Callable[..., httpx.Response]): # type: ignore[no-untyped-def]
113
+ def handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response]:
112
114
  @wraps(func)
113
115
  def wrapper(client: InfrahubClientSync, *args: Any, **kwargs: Any) -> httpx.Response:
114
116
  response = func(client, *args, **kwargs)
@@ -138,7 +140,7 @@ class BaseClient:
138
140
  self,
139
141
  address: str = "",
140
142
  config: Config | dict[str, Any] | None = None,
141
- ):
143
+ ) -> None:
142
144
  self.client = None
143
145
  self.headers = {"content-type": "application/json"}
144
146
  self.access_token: str = ""
@@ -170,6 +172,7 @@ class BaseClient:
170
172
  self.group_context: InfrahubGroupContext | InfrahubGroupContextSync
171
173
  self._initialize()
172
174
  self._request_context: RequestContext | None = None
175
+ _ = self.config.tls_context # Early load of the TLS context to catch errors
173
176
 
174
177
  def _initialize(self) -> None:
175
178
  """Sets the properties for each version of the client"""
@@ -324,7 +327,6 @@ class InfrahubClient(BaseClient):
324
327
  self.object_store = ObjectStore(self)
325
328
  self.store = NodeStore(default_branch=self.default_branch)
326
329
  self.task = InfrahubTaskManager(self)
327
- self.concurrent_execution_limit = asyncio.Semaphore(self.max_concurrent_execution)
328
330
  self._request_method: AsyncRequester = self.config.requester or self._default_request_method
329
331
  self.group_context = InfrahubGroupContext(self)
330
332
 
@@ -574,7 +576,7 @@ class InfrahubClient(BaseClient):
574
576
  schema_kind (str): The kind of schema being queried.
575
577
  branch (str): The branch name.
576
578
  prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
577
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
579
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
578
580
 
579
581
  Returns:
580
582
  ProcessRelationsNodeSync: A TypedDict containing two lists:
@@ -694,7 +696,7 @@ class InfrahubClient(BaseClient):
694
696
  at (Timestamp, optional): Time of the query. Defaults to Now.
695
697
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
696
698
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
697
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
699
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
698
700
  offset (int, optional): The offset for pagination.
699
701
  limit (int, optional): The limit for pagination.
700
702
  include (list[str], optional): List of attributes or relationships to include in the query.
@@ -791,7 +793,7 @@ class InfrahubClient(BaseClient):
791
793
  kind (str): kind of the nodes to query
792
794
  at (Timestamp, optional): Time of the query. Defaults to Now.
793
795
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
794
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
796
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
795
797
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
796
798
  offset (int, optional): The offset for pagination.
797
799
  limit (int, optional): The limit for pagination.
@@ -1073,7 +1075,7 @@ class InfrahubClient(BaseClient):
1073
1075
 
1074
1076
  async with httpx.AsyncClient(
1075
1077
  **proxy_config,
1076
- verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure,
1078
+ verify=self.config.tls_context,
1077
1079
  ) as client:
1078
1080
  try:
1079
1081
  response = await client.request(
@@ -1580,8 +1582,7 @@ class InfrahubClient(BaseClient):
1580
1582
 
1581
1583
  async def create_batch(self, return_exceptions: bool = False) -> InfrahubBatch:
1582
1584
  return InfrahubBatch(
1583
- semaphore=self.concurrent_execution_limit,
1584
- return_exceptions=return_exceptions,
1585
+ max_concurrent_execution=self.max_concurrent_execution, return_exceptions=return_exceptions
1585
1586
  )
1586
1587
 
1587
1588
  async def get_list_repositories(
@@ -1945,7 +1946,7 @@ class InfrahubClientSync(BaseClient):
1945
1946
  kind (str): kind of the nodes to query
1946
1947
  at (Timestamp, optional): Time of the query. Defaults to Now.
1947
1948
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
1948
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1949
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1949
1950
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
1950
1951
  offset (int, optional): The offset for pagination.
1951
1952
  limit (int, optional): The limit for pagination.
@@ -1992,7 +1993,7 @@ class InfrahubClientSync(BaseClient):
1992
1993
  schema_kind (str): The kind of schema being queried.
1993
1994
  branch (str): The branch name.
1994
1995
  prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
1995
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1996
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1996
1997
 
1997
1998
  Returns:
1998
1999
  ProcessRelationsNodeSync: A TypedDict containing two lists:
@@ -2084,7 +2085,7 @@ class InfrahubClientSync(BaseClient):
2084
2085
  kind (str): kind of the nodes to query
2085
2086
  at (Timestamp, optional): Time of the query. Defaults to Now.
2086
2087
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
2087
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
2088
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
2088
2089
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
2089
2090
  offset (int, optional): The offset for pagination.
2090
2091
  limit (int, optional): The limit for pagination.
@@ -2913,7 +2914,7 @@ class InfrahubClientSync(BaseClient):
2913
2914
 
2914
2915
  with httpx.Client(
2915
2916
  **proxy_config,
2916
- verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure,
2917
+ verify=self.config.tls_context,
2917
2918
  ) as client:
2918
2919
  try:
2919
2920
  response = client.request(
infrahub_sdk/config.py CHANGED
@@ -1,9 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import ssl
3
4
  from copy import deepcopy
4
5
  from typing import Any
5
6
 
6
- from pydantic import Field, field_validator, model_validator
7
+ from pydantic import Field, PrivateAttr, field_validator, model_validator
7
8
  from pydantic_settings import BaseSettings, SettingsConfigDict
8
9
  from typing_extensions import Self
9
10
 
@@ -78,6 +79,7 @@ class ConfigBase(BaseSettings):
78
79
  Can be useful to test with self-signed certificates.""",
79
80
  )
80
81
  tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
82
+ _ssl_context: ssl.SSLContext | None = PrivateAttr(default=None)
81
83
 
82
84
  @model_validator(mode="before")
83
85
  @classmethod
@@ -133,6 +135,28 @@ class ConfigBase(BaseSettings):
133
135
  def password_authentication(self) -> bool:
134
136
  return bool(self.username)
135
137
 
138
+ @property
139
+ def tls_context(self) -> ssl.SSLContext:
140
+ if self._ssl_context:
141
+ return self._ssl_context
142
+
143
+ if self.tls_insecure:
144
+ self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
145
+ self._ssl_context.check_hostname = False
146
+ self._ssl_context.verify_mode = ssl.CERT_NONE
147
+ return self._ssl_context
148
+
149
+ if self.tls_ca_file:
150
+ self._ssl_context = ssl.create_default_context(cafile=self.tls_ca_file)
151
+
152
+ if self._ssl_context is None:
153
+ self._ssl_context = ssl.create_default_context()
154
+
155
+ return self._ssl_context
156
+
157
+ def set_ssl_context(self, context: ssl.SSLContext) -> None:
158
+ self._ssl_context = context
159
+
136
160
 
137
161
  class Config(ConfigBase):
138
162
  recorder: RecorderType = Field(default=RecorderType.NONE, description="Select builtin recorder for later replay.")
@@ -174,4 +198,7 @@ class Config(ConfigBase):
174
198
  if field not in covered_keys:
175
199
  config[field] = deepcopy(getattr(self, field))
176
200
 
177
- return Config(**config)
201
+ new_config = Config(**config)
202
+ if self._ssl_context:
203
+ new_config.set_ssl_context(self._ssl_context)
204
+ return new_config
@@ -46,6 +46,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
46
46
  table.add_column("Sync with Git")
47
47
  table.add_column("Has Schema Changes")
48
48
  table.add_column("Is Default")
49
+ table.add_column("Status")
49
50
 
50
51
  # identify the default branch and always print it first
51
52
  default_branch = [branch for branch in branches.values() if branch.is_default][0]
@@ -57,6 +58,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
57
58
  "[green]True" if default_branch.sync_with_git else "[#FF7F50]False",
58
59
  "[green]True" if default_branch.has_schema_changes else "[#FF7F50]False",
59
60
  "[green]True" if default_branch.is_default else "[#FF7F50]False",
61
+ default_branch.status,
60
62
  )
61
63
 
62
64
  for branch in branches.values():
@@ -71,6 +73,7 @@ async def list_branch(_: str = CONFIG_PARAM) -> None:
71
73
  "[green]True" if branch.sync_with_git else "[#FF7F50]False",
72
74
  "[green]True" if default_branch.has_schema_changes else "[#FF7F50]False",
73
75
  "[green]True" if branch.is_default else "[#FF7F50]False",
76
+ branch.status,
74
77
  )
75
78
 
76
79
  console.print(table)
@@ -32,6 +32,7 @@ from ..ctl.render import list_jinja2_transforms, print_template_errors
32
32
  from ..ctl.repository import app as repository_app
33
33
  from ..ctl.repository import find_repository_config_file, get_repository_config
34
34
  from ..ctl.schema import app as schema_app
35
+ from ..ctl.task import app as task_app
35
36
  from ..ctl.transform import list_transforms
36
37
  from ..ctl.utils import (
37
38
  catch_exception,
@@ -64,6 +65,7 @@ app.add_typer(repository_app, name="repository")
64
65
  app.add_typer(menu_app, name="menu")
65
66
  app.add_typer(object_app, name="object")
66
67
  app.add_typer(graphql_app, name="graphql")
68
+ app.add_typer(task_app, name="task")
67
69
 
68
70
  app.command(name="dump")(dump)
69
71
  app.command(name="load")(load)
@@ -3,6 +3,6 @@ class Error(Exception):
3
3
 
4
4
 
5
5
  class QueryNotFoundError(Error):
6
- def __init__(self, name: str, message: str = ""):
6
+ def __init__(self, name: str, message: str = "") -> None:
7
7
  self.message = message or f"The requested query '{name}' was not found."
8
8
  super().__init__(self.message)
@@ -14,6 +14,7 @@ from ..async_typer import AsyncTyper
14
14
  from ..ctl.client import initialize_client
15
15
  from ..ctl.utils import catch_exception, init_logging
16
16
  from ..queries import SCHEMA_HASH_SYNC_STATUS
17
+ from ..schema import SchemaWarning
17
18
  from ..yaml import SchemaFile
18
19
  from .parameters import CONFIG_PARAM
19
20
  from .utils import load_yamlfile_from_disk_and_exit
@@ -152,6 +153,8 @@ async def load(
152
153
 
153
154
  console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.")
154
155
 
156
+ _display_schema_warnings(console=console, warnings=response.warnings)
157
+
155
158
  if response.schema_updated and wait:
156
159
  waited = 0
157
160
  continue_waiting = True
@@ -187,12 +190,24 @@ async def check(
187
190
 
188
191
  success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch)
189
192
 
190
- if not success:
193
+ if not success or not response:
191
194
  display_schema_load_errors(response=response or {}, schemas_data=schemas_data)
195
+ return
196
+
197
+ for schema_file in schemas_data:
198
+ console.print(f"[green] schema '{schema_file.location}' is Valid!")
199
+
200
+ warnings = response.pop("warnings", [])
201
+ schema_warnings = [SchemaWarning.model_validate(warning) for warning in warnings]
202
+ _display_schema_warnings(console=console, warnings=schema_warnings)
203
+ if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
204
+ print("No diff")
192
205
  else:
193
- for schema_file in schemas_data:
194
- console.print(f"[green] schema '{schema_file.location}' is Valid!")
195
- if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
196
- print("No diff")
197
- else:
198
- print(yaml.safe_dump(data=response, indent=4))
206
+ print(yaml.safe_dump(data=response, indent=4))
207
+
208
+
209
+ def _display_schema_warnings(console: Console, warnings: list[SchemaWarning]) -> None:
210
+ for warning in warnings:
211
+ console.print(
212
+ f"[yellow] {warning.type.value}: {warning.message} [{', '.join([kind.display for kind in warning.kinds])}]"
213
+ )
@@ -0,0 +1,110 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Optional
4
+
5
+ import typer
6
+ from rich.console import Console
7
+ from rich.table import Table
8
+
9
+ from ..async_typer import AsyncTyper
10
+ from ..task.manager import TaskFilter
11
+ from ..task.models import Task, TaskState
12
+ from .client import initialize_client
13
+ from .parameters import CONFIG_PARAM
14
+ from .utils import catch_exception, init_logging
15
+
16
+ app = AsyncTyper()
17
+ console = Console()
18
+
19
+
20
+ @app.callback()
21
+ def callback() -> None:
22
+ """Manage Infrahub tasks."""
23
+
24
+
25
+ def _parse_states(states: list[str] | None) -> list[TaskState] | None:
26
+ if not states:
27
+ return None
28
+
29
+ parsed_states: list[TaskState] = []
30
+ for state in states:
31
+ normalized_state = state.strip().upper()
32
+ try:
33
+ parsed_states.append(TaskState(normalized_state))
34
+ except ValueError as exc: # pragma: no cover - typer will surface this as CLI error
35
+ raise typer.BadParameter(
36
+ f"Unsupported state '{state}'. Available states: {', '.join(item.value.lower() for item in TaskState)}"
37
+ ) from exc
38
+
39
+ return parsed_states
40
+
41
+
42
+ def _render_table(tasks: list[Task]) -> None:
43
+ table = Table(title="Infrahub Tasks", box=None)
44
+ table.add_column("ID", style="cyan", overflow="fold")
45
+ table.add_column("Title", style="magenta", overflow="fold")
46
+ table.add_column("State", style="green")
47
+ table.add_column("Progress", justify="right")
48
+ table.add_column("Workflow", overflow="fold")
49
+ table.add_column("Branch", overflow="fold")
50
+ table.add_column("Updated")
51
+
52
+ if not tasks:
53
+ table.add_row("-", "No tasks found", "-", "-", "-", "-", "-")
54
+ console.print(table)
55
+ return
56
+
57
+ for task in tasks:
58
+ progress = f"{task.progress:.0%}" if task.progress is not None else "-"
59
+ table.add_row(
60
+ task.id,
61
+ task.title,
62
+ task.state.value,
63
+ progress,
64
+ task.workflow or "-",
65
+ task.branch or "-",
66
+ task.updated_at.isoformat(),
67
+ )
68
+
69
+ console.print(table)
70
+
71
+
72
+ @app.command(name="list")
73
+ @catch_exception(console=console)
74
+ async def list_tasks(
75
+ state: list[str] = typer.Option(
76
+ None, "--state", "-s", help="Filter by task state. Can be provided multiple times."
77
+ ),
78
+ limit: Optional[int] = typer.Option(None, help="Maximum number of tasks to retrieve."),
79
+ offset: Optional[int] = typer.Option(None, help="Offset for pagination."),
80
+ include_related_nodes: bool = typer.Option(False, help="Include related nodes in the output."),
81
+ include_logs: bool = typer.Option(False, help="Include task logs in the output."),
82
+ json_output: bool = typer.Option(False, "--json", help="Output the result as JSON."),
83
+ debug: bool = False,
84
+ _: str = CONFIG_PARAM,
85
+ ) -> None:
86
+ """List Infrahub tasks."""
87
+
88
+ init_logging(debug=debug)
89
+
90
+ client = initialize_client()
91
+ filters = TaskFilter()
92
+ parsed_states = _parse_states(state)
93
+ if parsed_states:
94
+ filters.state = parsed_states
95
+
96
+ tasks = await client.task.filter(
97
+ filter=filters,
98
+ limit=limit,
99
+ offset=offset,
100
+ include_related_nodes=include_related_nodes,
101
+ include_logs=include_logs,
102
+ )
103
+
104
+ if json_output:
105
+ console.print_json(
106
+ data=[task.model_dump(mode="json") for task in tasks], indent=2, sort_keys=True, highlight=False
107
+ )
108
+ return
109
+
110
+ _render_table(tasks)
@@ -5,13 +5,13 @@ from typing import Any
5
5
 
6
6
 
7
7
  class Error(Exception):
8
- def __init__(self, message: str | None = None):
8
+ def __init__(self, message: str | None = None) -> None:
9
9
  self.message = message
10
10
  super().__init__(self.message)
11
11
 
12
12
 
13
13
  class JsonDecodeError(Error):
14
- def __init__(self, message: str | None = None, content: str | None = None, url: str | None = None):
14
+ def __init__(self, message: str | None = None, content: str | None = None, url: str | None = None) -> None:
15
15
  self.message = message
16
16
  self.content = content
17
17
  self.url = url
@@ -23,14 +23,14 @@ class JsonDecodeError(Error):
23
23
 
24
24
 
25
25
  class ServerNotReachableError(Error):
26
- def __init__(self, address: str, message: str | None = None):
26
+ def __init__(self, address: str, message: str | None = None) -> None:
27
27
  self.address = address
28
28
  self.message = message or f"Unable to connect to '{address}'."
29
29
  super().__init__(self.message)
30
30
 
31
31
 
32
32
  class ServerNotResponsiveError(Error):
33
- def __init__(self, url: str, timeout: int | None = None, message: str | None = None):
33
+ def __init__(self, url: str, timeout: int | None = None, message: str | None = None) -> None:
34
34
  self.url = url
35
35
  self.timeout = timeout
36
36
  self.message = message or f"Unable to read from '{url}'."
@@ -40,7 +40,7 @@ class ServerNotResponsiveError(Error):
40
40
 
41
41
 
42
42
  class GraphQLError(Error):
43
- def __init__(self, errors: list[dict[str, Any]], query: str | None = None, variables: dict | None = None):
43
+ def __init__(self, errors: list[dict[str, Any]], query: str | None = None, variables: dict | None = None) -> None:
44
44
  self.query = query
45
45
  self.variables = variables
46
46
  self.errors = errors
@@ -49,21 +49,21 @@ class GraphQLError(Error):
49
49
 
50
50
 
51
51
  class BranchNotFoundError(Error):
52
- def __init__(self, identifier: str, message: str | None = None):
52
+ def __init__(self, identifier: str, message: str | None = None) -> None:
53
53
  self.identifier = identifier
54
54
  self.message = message or f"Unable to find the branch '{identifier}' in the Database."
55
55
  super().__init__(self.message)
56
56
 
57
57
 
58
58
  class SchemaNotFoundError(Error):
59
- def __init__(self, identifier: str, message: str | None = None):
59
+ def __init__(self, identifier: str, message: str | None = None) -> None:
60
60
  self.identifier = identifier
61
61
  self.message = message or f"Unable to find the schema '{identifier}'."
62
62
  super().__init__(self.message)
63
63
 
64
64
 
65
65
  class ModuleImportError(Error):
66
- def __init__(self, message: str | None = None):
66
+ def __init__(self, message: str | None = None) -> None:
67
67
  self.message = message or "Unable to import the module"
68
68
  super().__init__(self.message)
69
69
 
@@ -75,7 +75,7 @@ class NodeNotFoundError(Error):
75
75
  message: str = "Unable to find the node in the database.",
76
76
  branch_name: str | None = None,
77
77
  node_type: str | None = None,
78
- ):
78
+ ) -> None:
79
79
  self.node_type = node_type or "unknown"
80
80
  self.identifier = identifier
81
81
  self.branch_name = branch_name
@@ -97,25 +97,25 @@ class NodeInvalidError(NodeNotFoundError):
97
97
  class ResourceNotDefinedError(Error):
98
98
  """Raised when trying to access a resource that hasn't been defined."""
99
99
 
100
- def __init__(self, message: str | None = None):
100
+ def __init__(self, message: str | None = None) -> None:
101
101
  self.message = message or "The requested resource was not found"
102
102
  super().__init__(self.message)
103
103
 
104
104
 
105
105
  class InfrahubCheckNotFoundError(Error):
106
- def __init__(self, name: str, message: str | None = None):
106
+ def __init__(self, name: str, message: str | None = None) -> None:
107
107
  self.message = message or f"The requested InfrahubCheck '{name}' was not found."
108
108
  super().__init__(self.message)
109
109
 
110
110
 
111
111
  class InfrahubTransformNotFoundError(Error):
112
- def __init__(self, name: str, message: str | None = None):
112
+ def __init__(self, name: str, message: str | None = None) -> None:
113
113
  self.message = message or f"The requested InfrahubTransform '{name}' was not found."
114
114
  super().__init__(self.message)
115
115
 
116
116
 
117
117
  class ValidationError(Error):
118
- def __init__(self, identifier: str, message: str | None = None, messages: list[str] | None = None):
118
+ def __init__(self, identifier: str, message: str | None = None, messages: list[str] | None = None) -> None:
119
119
  self.identifier = identifier
120
120
  self.message = message
121
121
  self.messages = messages
@@ -130,7 +130,7 @@ class ValidationError(Error):
130
130
 
131
131
 
132
132
  class ObjectValidationError(Error):
133
- def __init__(self, position: list[int | str], message: str):
133
+ def __init__(self, position: list[int | str], message: str) -> None:
134
134
  self.position = position
135
135
  self.message = message
136
136
  super().__init__(self.message)
@@ -140,13 +140,13 @@ class ObjectValidationError(Error):
140
140
 
141
141
 
142
142
  class AuthenticationError(Error):
143
- def __init__(self, message: str | None = None):
143
+ def __init__(self, message: str | None = None) -> None:
144
144
  self.message = message or "Authentication Error, unable to execute the query."
145
145
  super().__init__(self.message)
146
146
 
147
147
 
148
148
  class URLNotFoundError(Error):
149
- def __init__(self, url: str):
149
+ def __init__(self, url: str) -> None:
150
150
  self.message = f"`{url}` not found."
151
151
  super().__init__(self.message)
152
152
 
@@ -164,12 +164,12 @@ class InvalidResponseError(Error):
164
164
 
165
165
 
166
166
  class FileNotValidError(Error):
167
- def __init__(self, name: str, message: str = ""):
167
+ def __init__(self, name: str, message: str = "") -> None:
168
168
  self.message = message or f"Cannot parse '{name}' content."
169
169
  super().__init__(self.message)
170
170
 
171
171
 
172
172
  class TimestampFormatError(Error):
173
- def __init__(self, message: str | None = None):
173
+ def __init__(self, message: str | None = None) -> None:
174
174
  self.message = message or "Invalid timestamp format"
175
175
  super().__init__(self.message)
@@ -9,7 +9,7 @@ class BaseGraphQLQuery:
9
9
  query_type: str = "not-defined"
10
10
  indentation: int = 4
11
11
 
12
- def __init__(self, query: dict, variables: dict | None = None, name: str | None = None):
12
+ def __init__(self, query: dict, variables: dict | None = None, name: str | None = None) -> None:
13
13
  self.query = query
14
14
  self.variables = variables
15
15
  self.name = name or ""
@@ -46,7 +46,7 @@ class Query(BaseGraphQLQuery):
46
46
  class Mutation(BaseGraphQLQuery):
47
47
  query_type = "mutation"
48
48
 
49
- def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any):
49
+ def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any) -> None:
50
50
  self.input_data = input_data
51
51
  self.mutation = mutation
52
52
  super().__init__(*args, **kwargs)
@@ -15,7 +15,7 @@ if TYPE_CHECKING:
15
15
  class Attribute:
16
16
  """Represents an attribute of a Node, including its schema, value, and properties."""
17
17
 
18
- def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict):
18
+ def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict) -> None:
19
19
  """
20
20
  Args:
21
21
  name (str): The name of the attribute.
@@ -4,7 +4,7 @@ from __future__ import annotations
4
4
  class NodeProperty:
5
5
  """Represents a property of a node, typically used for metadata like display labels."""
6
6
 
7
- def __init__(self, data: dict | str):
7
+ def __init__(self, data: dict | str) -> None:
8
8
  """
9
9
  Args:
10
10
  data (Union[dict, str]): Data representing the node property.