infrahub-server 1.5.5__py3-none-any.whl → 1.6.0b0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. infrahub/api/artifact.py +5 -3
  2. infrahub/auth.py +5 -6
  3. infrahub/cli/db.py +3 -3
  4. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +2 -2
  5. infrahub/cli/dev.py +30 -0
  6. infrahub/config.py +62 -14
  7. infrahub/constants/database.py +5 -5
  8. infrahub/core/branch/models.py +24 -6
  9. infrahub/core/diff/model/diff.py +2 -2
  10. infrahub/core/graph/constraints.py +2 -2
  11. infrahub/core/manager.py +155 -29
  12. infrahub/core/merge.py +29 -2
  13. infrahub/core/migrations/graph/m041_deleted_dup_edges.py +2 -3
  14. infrahub/core/migrations/shared.py +2 -2
  15. infrahub/core/node/__init__.py +1 -1
  16. infrahub/core/node/ipam.py +4 -4
  17. infrahub/core/node/node_property_attribute.py +2 -2
  18. infrahub/core/protocols.py +7 -1
  19. infrahub/core/query/branch.py +11 -0
  20. infrahub/core/query/standard_node.py +3 -0
  21. infrahub/core/relationship/model.py +3 -9
  22. infrahub/core/schema/__init__.py +3 -3
  23. infrahub/core/task/user_task.py +2 -2
  24. infrahub/core/validators/enum.py +2 -2
  25. infrahub/dependencies/interface.py +2 -2
  26. infrahub/events/constants.py +2 -2
  27. infrahub/git/base.py +43 -1
  28. infrahub/git/models.py +2 -1
  29. infrahub/git/repository.py +5 -1
  30. infrahub/git/tasks.py +28 -1
  31. infrahub/git/utils.py +9 -0
  32. infrahub/graphql/analyzer.py +4 -4
  33. infrahub/graphql/mutations/computed_attribute.py +1 -1
  34. infrahub/graphql/mutations/convert_object_type.py +1 -1
  35. infrahub/graphql/mutations/display_label.py +1 -1
  36. infrahub/graphql/mutations/hfid.py +1 -1
  37. infrahub/graphql/mutations/ipam.py +1 -1
  38. infrahub/graphql/mutations/profile.py +1 -0
  39. infrahub/graphql/mutations/relationship.py +2 -2
  40. infrahub/graphql/mutations/resource_manager.py +1 -1
  41. infrahub/graphql/queries/__init__.py +2 -1
  42. infrahub/graphql/queries/branch.py +58 -3
  43. infrahub/graphql/queries/ipam.py +9 -4
  44. infrahub/graphql/queries/resource_manager.py +5 -8
  45. infrahub/graphql/queries/search.py +3 -3
  46. infrahub/graphql/schema.py +2 -0
  47. infrahub/graphql/types/__init__.py +3 -1
  48. infrahub/graphql/types/branch.py +98 -2
  49. infrahub/lock.py +6 -6
  50. infrahub/patch/constants.py +2 -2
  51. infrahub/task_manager/task.py +2 -2
  52. infrahub/telemetry/constants.py +2 -2
  53. infrahub/trigger/models.py +2 -2
  54. infrahub/utils.py +1 -1
  55. infrahub/validators/tasks.py +1 -1
  56. infrahub/workers/infrahub_async.py +37 -0
  57. infrahub_sdk/async_typer.py +2 -1
  58. infrahub_sdk/batch.py +2 -2
  59. infrahub_sdk/client.py +8 -9
  60. infrahub_sdk/config.py +2 -2
  61. infrahub_sdk/ctl/branch.py +1 -1
  62. infrahub_sdk/ctl/cli.py +2 -2
  63. infrahub_sdk/ctl/cli_commands.py +2 -1
  64. infrahub_sdk/ctl/graphql.py +2 -2
  65. infrahub_sdk/ctl/importer.py +1 -1
  66. infrahub_sdk/ctl/utils.py +3 -3
  67. infrahub_sdk/node/attribute.py +11 -10
  68. infrahub_sdk/node/constants.py +1 -2
  69. infrahub_sdk/node/node.py +54 -11
  70. infrahub_sdk/node/related_node.py +1 -1
  71. infrahub_sdk/object_store.py +4 -4
  72. infrahub_sdk/operation.py +2 -2
  73. infrahub_sdk/protocols_generator/generator.py +1 -1
  74. infrahub_sdk/pytest_plugin/items/jinja2_transform.py +1 -1
  75. infrahub_sdk/pytest_plugin/models.py +1 -1
  76. infrahub_sdk/pytest_plugin/plugin.py +1 -1
  77. infrahub_sdk/query_groups.py +2 -2
  78. infrahub_sdk/schema/__init__.py +10 -11
  79. infrahub_sdk/schema/main.py +2 -2
  80. infrahub_sdk/schema/repository.py +2 -2
  81. infrahub_sdk/spec/object.py +2 -2
  82. infrahub_sdk/spec/range_expansion.py +1 -1
  83. infrahub_sdk/template/__init__.py +2 -1
  84. infrahub_sdk/transfer/importer/json.py +3 -3
  85. infrahub_sdk/types.py +2 -2
  86. infrahub_sdk/utils.py +2 -2
  87. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0b0.dist-info}/METADATA +58 -59
  88. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0b0.dist-info}/RECORD +217 -223
  89. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0b0.dist-info}/WHEEL +1 -1
  90. infrahub_server-1.6.0b0.dist-info/entry_points.txt +12 -0
  91. infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
  92. infrahub_testcontainers/docker-compose.test.yml +1 -1
  93. infrahub/core/schema/generated/__init__.py +0 -0
  94. infrahub/core/schema/generated/attribute_schema.py +0 -133
  95. infrahub/core/schema/generated/base_node_schema.py +0 -111
  96. infrahub/core/schema/generated/genericnode_schema.py +0 -30
  97. infrahub/core/schema/generated/node_schema.py +0 -40
  98. infrahub/core/schema/generated/relationship_schema.py +0 -141
  99. infrahub_server-1.5.5.dist-info/entry_points.txt +0 -13
  100. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0b0.dist-info/licenses}/LICENSE.txt +0 -0
infrahub_sdk/node/node.py CHANGED
@@ -284,12 +284,12 @@ class InfrahubNodeBase:
284
284
  def _strip_unmodified_dict(data: dict, original_data: dict, variables: dict, item: str) -> None:
285
285
  data_item = data.get(item)
286
286
  if item in original_data and isinstance(original_data[item], dict) and isinstance(data_item, dict):
287
- for item_key in original_data[item].keys():
287
+ for item_key in original_data[item]:
288
288
  for property_name in PROPERTIES_OBJECT:
289
289
  if item_key == property_name and isinstance(original_data[item][property_name], dict):
290
290
  if original_data[item][property_name].get("id"):
291
291
  original_data[item][property_name] = original_data[item][property_name]["id"]
292
- if item_key in data[item].keys():
292
+ if item_key in data[item]:
293
293
  if item_key == "id" and len(data[item].keys()) > 1:
294
294
  # Related nodes typically require an ID. So the ID is only
295
295
  # removed if it's the last key in the current context
@@ -335,8 +335,8 @@ class InfrahubNodeBase:
335
335
  elif isinstance(relationship_property, RelationshipManagerBase) and not relationship_property.has_update:
336
336
  data.pop(relationship)
337
337
 
338
- for item in original_data.keys():
339
- if item in data.keys():
338
+ for item in original_data:
339
+ if item in data:
340
340
  if data[item] == original_data[item]:
341
341
  if attr := getattr(self, item, None): # this should never be None, just a safety default value
342
342
  if not isinstance(attr, Attribute) or not attr.value_has_been_mutated:
@@ -741,7 +741,7 @@ class InfrahubNode(InfrahubNodeBase):
741
741
 
742
742
  if (
743
743
  rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr]
744
- and rel_schema.kind not in [RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT] # type: ignore[union-attr]
744
+ and rel_schema.kind not in {RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT} # type: ignore[union-attr]
745
745
  and not (include and rel_name in include)
746
746
  ):
747
747
  continue
@@ -895,6 +895,7 @@ class InfrahubNode(InfrahubNodeBase):
895
895
  branch: str,
896
896
  related_nodes: list[InfrahubNode],
897
897
  timeout: int | None = None,
898
+ recursive: bool = False,
898
899
  ) -> None:
899
900
  """Processes the Relationships of a InfrahubNode and add Related Nodes to a list.
900
901
 
@@ -903,6 +904,7 @@ class InfrahubNode(InfrahubNodeBase):
903
904
  branch (str): The branch name.
904
905
  related_nodes (list[InfrahubNode]): The list to which related nodes will be appended.
905
906
  timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
907
+ recursive:(bool): Whether to recursively process relationships of related nodes.
906
908
  """
907
909
  for rel_name in self._relationships:
908
910
  rel = getattr(self, rel_name)
@@ -910,17 +912,37 @@ class InfrahubNode(InfrahubNodeBase):
910
912
  relation = node_data["node"].get(rel_name, None)
911
913
  if relation and relation.get("node", None):
912
914
  related_node = await InfrahubNode.from_graphql(
913
- client=self._client, branch=branch, data=relation, timeout=timeout
915
+ client=self._client,
916
+ branch=branch,
917
+ data=relation,
918
+ timeout=timeout,
914
919
  )
915
920
  related_nodes.append(related_node)
921
+ if recursive:
922
+ await related_node._process_relationships(
923
+ node_data=relation,
924
+ branch=branch,
925
+ related_nodes=related_nodes,
926
+ recursive=recursive,
927
+ )
916
928
  elif rel and isinstance(rel, RelationshipManager):
917
929
  peers = node_data["node"].get(rel_name, None)
918
930
  if peers and peers["edges"]:
919
931
  for peer in peers["edges"]:
920
932
  related_node = await InfrahubNode.from_graphql(
921
- client=self._client, branch=branch, data=peer, timeout=timeout
933
+ client=self._client,
934
+ branch=branch,
935
+ data=peer,
936
+ timeout=timeout,
922
937
  )
923
938
  related_nodes.append(related_node)
939
+ if recursive:
940
+ await related_node._process_relationships(
941
+ node_data=peer,
942
+ branch=branch,
943
+ related_nodes=related_nodes,
944
+ recursive=recursive,
945
+ )
924
946
 
925
947
  async def get_pool_allocated_resources(self, resource: InfrahubNode) -> list[InfrahubNode]:
926
948
  """Fetch all nodes that were allocated for the pool and a given resource.
@@ -1364,7 +1386,7 @@ class InfrahubNodeSync(InfrahubNodeBase):
1364
1386
 
1365
1387
  if (
1366
1388
  rel_schema.cardinality == RelationshipCardinality.MANY # type: ignore[union-attr]
1367
- and rel_schema.kind not in [RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT] # type: ignore[union-attr]
1389
+ and rel_schema.kind not in {RelationshipKind.ATTRIBUTE, RelationshipKind.PARENT} # type: ignore[union-attr]
1368
1390
  and not (include and rel_name in include)
1369
1391
  ):
1370
1392
  continue
@@ -1520,6 +1542,7 @@ class InfrahubNodeSync(InfrahubNodeBase):
1520
1542
  branch: str,
1521
1543
  related_nodes: list[InfrahubNodeSync],
1522
1544
  timeout: int | None = None,
1545
+ recursive: bool = False,
1523
1546
  ) -> None:
1524
1547
  """Processes the Relationships of a InfrahubNodeSync and add Related Nodes to a list.
1525
1548
 
@@ -1528,7 +1551,7 @@ class InfrahubNodeSync(InfrahubNodeBase):
1528
1551
  branch (str): The branch name.
1529
1552
  related_nodes (list[InfrahubNodeSync]): The list to which related nodes will be appended.
1530
1553
  timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1531
-
1554
+ recursive:(bool): Whether to recursively process relationships of related nodes.
1532
1555
  """
1533
1556
  for rel_name in self._relationships:
1534
1557
  rel = getattr(self, rel_name)
@@ -1536,17 +1559,37 @@ class InfrahubNodeSync(InfrahubNodeBase):
1536
1559
  relation = node_data["node"].get(rel_name, None)
1537
1560
  if relation and relation.get("node", None):
1538
1561
  related_node = InfrahubNodeSync.from_graphql(
1539
- client=self._client, branch=branch, data=relation, timeout=timeout
1562
+ client=self._client,
1563
+ branch=branch,
1564
+ data=relation,
1565
+ timeout=timeout,
1540
1566
  )
1541
1567
  related_nodes.append(related_node)
1568
+ if recursive:
1569
+ related_node._process_relationships(
1570
+ node_data=relation,
1571
+ branch=branch,
1572
+ related_nodes=related_nodes,
1573
+ recursive=recursive,
1574
+ )
1542
1575
  elif rel and isinstance(rel, RelationshipManagerSync):
1543
1576
  peers = node_data["node"].get(rel_name, None)
1544
1577
  if peers and peers["edges"]:
1545
1578
  for peer in peers["edges"]:
1546
1579
  related_node = InfrahubNodeSync.from_graphql(
1547
- client=self._client, branch=branch, data=peer, timeout=timeout
1580
+ client=self._client,
1581
+ branch=branch,
1582
+ data=peer,
1583
+ timeout=timeout,
1548
1584
  )
1549
1585
  related_nodes.append(related_node)
1586
+ if recursive:
1587
+ related_node._process_relationships(
1588
+ node_data=peer,
1589
+ branch=branch,
1590
+ related_nodes=related_nodes,
1591
+ recursive=recursive,
1592
+ )
1550
1593
 
1551
1594
  def get_pool_allocated_resources(self, resource: InfrahubNodeSync) -> list[InfrahubNodeSync]:
1552
1595
  """Fetch all nodes that were allocated for the pool and a given resource.
@@ -64,7 +64,7 @@ class RelatedNodeBase:
64
64
  self._display_label = node_data.get("display_label", None)
65
65
  self._typename = node_data.get("__typename", None)
66
66
 
67
- self.updated_at: str | None = data.get("updated_at", data.get("_relation__updated_at", None))
67
+ self.updated_at: str | None = data.get("updated_at", data.get("_relation__updated_at"))
68
68
 
69
69
  # FIXME, we won't need that once we are only supporting paginated results
70
70
  if self._typename and self._typename.startswith("Related"):
@@ -33,7 +33,7 @@ class ObjectStore(ObjectStoreBase):
33
33
  self.client.log.error(f"Unable to connect to {self.client.address} .. ")
34
34
  raise
35
35
  except httpx.HTTPStatusError as exc:
36
- if exc.response.status_code in [401, 403]:
36
+ if exc.response.status_code in {401, 403}:
37
37
  response = exc.response.json()
38
38
  errors = response.get("errors")
39
39
  messages = [error.get("message") for error in errors]
@@ -54,7 +54,7 @@ class ObjectStore(ObjectStoreBase):
54
54
  self.client.log.error(f"Unable to connect to {self.client.address} .. ")
55
55
  raise
56
56
  except httpx.HTTPStatusError as exc:
57
- if exc.response.status_code in [401, 403]:
57
+ if exc.response.status_code in {401, 403}:
58
58
  response = exc.response.json()
59
59
  errors = response.get("errors")
60
60
  messages = [error.get("message") for error in errors]
@@ -81,7 +81,7 @@ class ObjectStoreSync(ObjectStoreBase):
81
81
  self.client.log.error(f"Unable to connect to {self.client.address} .. ")
82
82
  raise
83
83
  except httpx.HTTPStatusError as exc:
84
- if exc.response.status_code in [401, 403]:
84
+ if exc.response.status_code in {401, 403}:
85
85
  response = exc.response.json()
86
86
  errors = response.get("errors")
87
87
  messages = [error.get("message") for error in errors]
@@ -102,7 +102,7 @@ class ObjectStoreSync(ObjectStoreBase):
102
102
  self.client.log.error(f"Unable to connect to {self.client.address} .. ")
103
103
  raise
104
104
  except httpx.HTTPStatusError as exc:
105
- if exc.response.status_code in [401, 403]:
105
+ if exc.response.status_code in {401, 403}:
106
106
  response = exc.response.json()
107
107
  errors = response.get("errors")
108
108
  messages = [error.get("message") for error in errors]
infrahub_sdk/operation.py CHANGED
@@ -65,14 +65,14 @@ class InfrahubOperation:
65
65
  await self._init_client.schema.all(branch=self.branch_name)
66
66
 
67
67
  for kind in data:
68
- if kind in self._init_client.schema.cache[self.branch_name].nodes.keys():
68
+ if kind in self._init_client.schema.cache[self.branch_name].nodes:
69
69
  for result in data[kind].get("edges", []):
70
70
  node = await self.infrahub_node.from_graphql(
71
71
  client=self._init_client, branch=self.branch_name, data=result
72
72
  )
73
73
  self._nodes.append(node)
74
74
  await node._process_relationships(
75
- node_data=result, branch=self.branch_name, related_nodes=self._related_nodes
75
+ node_data=result, branch=self.branch_name, related_nodes=self._related_nodes, recursive=True
76
76
  )
77
77
 
78
78
  for node in self._nodes + self._related_nodes:
@@ -56,7 +56,7 @@ class CodeGenerator:
56
56
  if not e.startswith("__")
57
57
  and not e.endswith("__")
58
58
  and e
59
- not in ("TYPE_CHECKING", "CoreNode", "Optional", "Protocol", "Union", "annotations", "runtime_checkable")
59
+ not in {"TYPE_CHECKING", "CoreNode", "Optional", "Protocol", "Union", "annotations", "runtime_checkable"}
60
60
  ]
61
61
 
62
62
  self.sorted_generics = self._sort_and_filter_models(self.generics, filters=["CoreNode"] + self.base_protocols)
@@ -83,7 +83,7 @@ class InfrahubJinja2Item(InfrahubItem):
83
83
  class InfrahubJinja2TransformSmokeItem(InfrahubJinja2Item):
84
84
  def runtest(self) -> None:
85
85
  file_path: Path = self.session.infrahub_config_path.parent / self.resource_config.template_path # type: ignore[attr-defined]
86
- self.get_jinja2_environment().parse(file_path.read_text(), filename=file_path.name)
86
+ self.get_jinja2_environment().parse(file_path.read_text(encoding="utf-8"), filename=file_path.name)
87
87
 
88
88
 
89
89
  class InfrahubJinja2TransformUnitRenderItem(InfrahubJinja2Item):
@@ -55,7 +55,7 @@ class InfrahubInputOutputTest(InfrahubBaseTest):
55
55
 
56
56
  if suffix and suffix == "json":
57
57
  return ujson.loads(text)
58
- if suffix in ("yml", "yaml"):
58
+ if suffix in {"yml", "yaml"}:
59
59
  return yaml.safe_load(text)
60
60
 
61
61
  return text
@@ -90,7 +90,7 @@ def pytest_sessionstart(session: Session) -> None:
90
90
 
91
91
 
92
92
  def pytest_collect_file(parent: Collector | Item, file_path: Path) -> InfrahubYamlFile | None:
93
- if file_path.suffix in [".yml", ".yaml"] and file_path.name.startswith("test_"):
93
+ if file_path.suffix in {".yml", ".yaml"} and file_path.name.startswith("test_"):
94
94
  return InfrahubYamlFile.from_parent(parent, path=file_path)
95
95
  return None
96
96
 
@@ -168,7 +168,7 @@ class InfrahubGroupContext(InfrahubGroupContextBase):
168
168
  return
169
169
 
170
170
  # Calculate how many nodes should be deleted
171
- self.unused_member_ids = set(existing_group.members.peer_ids) - set(members) # type: ignore
171
+ self.unused_member_ids = list(set(existing_group.members.peer_ids) - set(members)) # type: ignore[union-attr]
172
172
 
173
173
  if not self.delete_unused_nodes:
174
174
  return
@@ -262,7 +262,7 @@ class InfrahubGroupContextSync(InfrahubGroupContextBase):
262
262
  return
263
263
 
264
264
  # Calculate how many nodes should be deleted
265
- self.unused_member_ids = set(existing_group.members.peer_ids) - set(members) # type: ignore
265
+ self.unused_member_ids = list(set(existing_group.members.peer_ids) - set(members)) # type: ignore[union-attr]
266
266
 
267
267
  if not self.delete_unused_nodes:
268
268
  return
@@ -7,12 +7,11 @@ import warnings
7
7
  from collections.abc import MutableMapping
8
8
  from enum import Enum
9
9
  from time import sleep
10
- from typing import TYPE_CHECKING, Any, TypedDict, Union
10
+ from typing import TYPE_CHECKING, Any, TypeAlias, TypedDict
11
11
  from urllib.parse import urlencode
12
12
 
13
13
  import httpx
14
14
  from pydantic import BaseModel, Field
15
- from typing_extensions import TypeAlias
16
15
 
17
16
  from ..exceptions import (
18
17
  BranchNotFoundError,
@@ -46,7 +45,7 @@ if TYPE_CHECKING:
46
45
  from ..client import InfrahubClient, InfrahubClientSync, SchemaType, SchemaTypeSync
47
46
  from ..node import InfrahubNode, InfrahubNodeSync
48
47
 
49
- InfrahubNodeTypes = Union[InfrahubNode, InfrahubNodeSync]
48
+ InfrahubNodeTypes: TypeAlias = InfrahubNode | InfrahubNodeSync
50
49
 
51
50
 
52
51
  __all__ = [
@@ -84,11 +83,11 @@ class EnumMutation(str, Enum):
84
83
  remove = "SchemaEnumRemove"
85
84
 
86
85
 
87
- MainSchemaTypes: TypeAlias = Union[NodeSchema, GenericSchema]
88
- MainSchemaTypesAPI: TypeAlias = Union[NodeSchemaAPI, GenericSchemaAPI, ProfileSchemaAPI, TemplateSchemaAPI]
89
- MainSchemaTypesAll: TypeAlias = Union[
90
- NodeSchema, GenericSchema, NodeSchemaAPI, GenericSchemaAPI, ProfileSchemaAPI, TemplateSchemaAPI
91
- ]
86
+ MainSchemaTypes: TypeAlias = NodeSchema | GenericSchema
87
+ MainSchemaTypesAPI: TypeAlias = NodeSchemaAPI | GenericSchemaAPI | ProfileSchemaAPI | TemplateSchemaAPI
88
+ MainSchemaTypesAll: TypeAlias = (
89
+ NodeSchema | GenericSchema | NodeSchemaAPI | GenericSchemaAPI | ProfileSchemaAPI | TemplateSchemaAPI
90
+ )
92
91
 
93
92
 
94
93
  class SchemaWarningType(Enum):
@@ -123,7 +122,7 @@ class InfrahubSchemaBase:
123
122
  SchemaRoot(**data)
124
123
 
125
124
  def validate_data_against_schema(self, schema: MainSchemaTypesAPI, data: dict) -> None:
126
- for key in data.keys():
125
+ for key in data:
127
126
  if key not in schema.relationship_names + schema.attribute_names:
128
127
  identifier = f"{schema.kind}"
129
128
  raise ValidationError(
@@ -194,12 +193,12 @@ class InfrahubSchemaBase:
194
193
  hash=status["hash"], previous_hash=status["previous_hash"], warnings=status.get("warnings") or []
195
194
  )
196
195
 
197
- if response.status_code in [
196
+ if response.status_code in {
198
197
  httpx.codes.BAD_REQUEST,
199
198
  httpx.codes.UNPROCESSABLE_ENTITY,
200
199
  httpx.codes.UNAUTHORIZED,
201
200
  httpx.codes.FORBIDDEN,
202
- ]:
201
+ }:
203
202
  return SchemaLoadResponse(errors=response.json())
204
203
 
205
204
  response.raise_for_status()
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  import warnings
4
4
  from collections.abc import MutableMapping
5
5
  from enum import Enum
6
- from typing import TYPE_CHECKING, Any, Union
6
+ from typing import TYPE_CHECKING, Any
7
7
 
8
8
  from pydantic import BaseModel, ConfigDict, Field
9
9
  from typing_extensions import Self
@@ -11,7 +11,7 @@ from typing_extensions import Self
11
11
  if TYPE_CHECKING:
12
12
  from ..node import InfrahubNode, InfrahubNodeSync
13
13
 
14
- InfrahubNodeTypes = Union[InfrahubNode, InfrahubNodeSync]
14
+ InfrahubNodeTypes = InfrahubNode | InfrahubNodeSync
15
15
 
16
16
 
17
17
  class RelationshipCardinality(str, Enum):
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from pathlib import Path
4
- from typing import TYPE_CHECKING, Any, TypeVar, Union
4
+ from typing import TYPE_CHECKING, Any, TypeVar
5
5
 
6
6
  from pydantic import BaseModel, ConfigDict, Field, field_validator
7
7
 
@@ -18,7 +18,7 @@ from ..utils import duplicates
18
18
  if TYPE_CHECKING:
19
19
  from ..node import InfrahubNode, InfrahubNodeSync
20
20
 
21
- InfrahubNodeTypes = Union[InfrahubNode, InfrahubNodeSync]
21
+ InfrahubNodeTypes = InfrahubNode | InfrahubNodeSync
22
22
 
23
23
  ResourceClass = TypeVar("ResourceClass")
24
24
 
@@ -74,7 +74,7 @@ class RelationshipInfo(BaseModel):
74
74
 
75
75
  @property
76
76
  def is_reference(self) -> bool:
77
- return self.format in [RelationshipDataFormat.ONE_REF, RelationshipDataFormat.MANY_REF]
77
+ return self.format in {RelationshipDataFormat.ONE_REF, RelationshipDataFormat.MANY_REF}
78
78
 
79
79
  def get_context(self, value: Any) -> dict:
80
80
  """Return a dict to insert to the context if the relationship is mandatory"""
@@ -230,7 +230,7 @@ class InfrahubObjectFileData(BaseModel):
230
230
 
231
231
  # First validate if all mandatory fields are present
232
232
  for element in schema.mandatory_input_names:
233
- if not any([element in data.keys(), element in context.keys()]):
233
+ if not any([element in data, element in context]):
234
234
  errors.append(ObjectValidationError(position=position + [element], message=f"{element} is mandatory"))
235
235
 
236
236
  # Validate if all attributes are valid
@@ -69,7 +69,7 @@ def _extract_constants(pattern: str, re_compiled: re.Pattern) -> tuple[list[int]
69
69
  def _expand_interfaces(pattern: str, interface_constant: list[int], cartesian_list: list[list[str]]) -> list[str]:
70
70
  def _pairwise(lst: list[int]) -> list[tuple[int, int]]:
71
71
  it = iter(lst)
72
- return list(zip(it, it))
72
+ return list(zip(it, it, strict=False))
73
73
 
74
74
  if interface_constant[-1] < len(pattern):
75
75
  interface_constant.append(len(pattern))
@@ -1,8 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import linecache
4
+ from collections.abc import Callable
4
5
  from pathlib import Path
5
- from typing import Any, Callable, NoReturn
6
+ from typing import Any, NoReturn
6
7
 
7
8
  import jinja2
8
9
  from jinja2 import meta, nodes
@@ -62,7 +62,7 @@ class LineDelimitedJSONImporter(ImporterInterface):
62
62
 
63
63
  with self.wrapped_task_output("Analyzing import"):
64
64
  import_nodes_by_kind = defaultdict(list)
65
- for graphql_data, kind in zip(table.column("graphql_json"), table.column("kind")):
65
+ for graphql_data, kind in zip(table.column("graphql_json"), table.column("kind"), strict=False):
66
66
  node = await InfrahubNode.from_graphql(self.client, branch, ujson.loads(str(graphql_data)))
67
67
  import_nodes_by_kind[str(kind)].append(node)
68
68
  self.all_nodes[node.id] = node
@@ -109,7 +109,7 @@ class LineDelimitedJSONImporter(ImporterInterface):
109
109
  relationship_schema
110
110
  )
111
111
 
112
- for relationship_name in self.optional_relationships_schemas_by_node_kind[node_kind].keys():
112
+ for relationship_name in self.optional_relationships_schemas_by_node_kind[node_kind]:
113
113
  relationship_value = getattr(node, relationship_name)
114
114
  if isinstance(relationship_value, RelationshipManager):
115
115
  if relationship_value.peer_ids:
@@ -144,7 +144,7 @@ class LineDelimitedJSONImporter(ImporterInterface):
144
144
  await self.execute_batches([update_batch], "Adding optional relationships to nodes")
145
145
 
146
146
  async def update_many_to_many_relationships(self, file: Path) -> None:
147
- relationships = ujson.loads(file.read_text())
147
+ relationships = ujson.loads(file.read_text(encoding="utf-8"))
148
148
  update_batch = await self.client.create_batch(return_exceptions=True)
149
149
 
150
150
  for relationship in relationships:
infrahub_sdk/types.py CHANGED
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import enum
4
4
  from logging import Logger
5
- from typing import TYPE_CHECKING, Any, Protocol, Union, runtime_checkable
5
+ from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable
6
6
 
7
7
  from pydantic import BaseModel
8
8
 
@@ -65,7 +65,7 @@ class InfrahubLogger(Protocol):
65
65
  """Send an exception event."""
66
66
 
67
67
 
68
- InfrahubLoggers = Union[InfrahubLogger, Logger]
68
+ InfrahubLoggers = InfrahubLogger | Logger
69
69
 
70
70
 
71
71
  class Order(BaseModel):
infrahub_sdk/utils.py CHANGED
@@ -168,7 +168,7 @@ def str_to_bool(value: str) -> bool:
168
168
  if isinstance(value, bool):
169
169
  return value
170
170
 
171
- if isinstance(value, int) and value in [0, 1]:
171
+ if isinstance(value, int) and value in {0, 1}:
172
172
  return bool(value)
173
173
 
174
174
  if not isinstance(value, str):
@@ -318,7 +318,7 @@ def write_to_file(path: Path, value: Any) -> bool:
318
318
  raise FileExistsError(f"{path} is a directory")
319
319
 
320
320
  to_write = str(value)
321
- written = path.write_text(to_write)
321
+ written = path.write_text(to_write, encoding="utf-8")
322
322
 
323
323
  return written is not None
324
324
 
@@ -1,66 +1,66 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: infrahub-server
3
- Version: 1.5.5
3
+ Version: 1.6.0b0
4
4
  Summary: Infrahub is taking a new approach to Infrastructure Management by providing a new generation of datastore to organize and control all the data that defines how an infrastructure should run.
5
- License: Apache-2.0
6
- Author: OpsMill
7
- Author-email: info@opsmill.com
8
- Requires-Python: >=3.12,<3.13
5
+ Project-URL: Homepage, https://opsmill.com
6
+ Project-URL: Repository, https://github.com/opsmill/infrahub
7
+ Project-URL: Documentation, https://docs.infrahub.app/
8
+ Author-email: OpsMill <info@opsmill.com>
9
+ License-Expression: Apache-2.0
10
+ License-File: LICENSE.txt
9
11
  Classifier: Intended Audience :: Developers
10
- Classifier: License :: OSI Approved :: Apache Software License
11
12
  Classifier: Programming Language :: Python :: 3
12
13
  Classifier: Programming Language :: Python :: 3.12
13
- Requires-Dist: Jinja2 (>=3,<4)
14
- Requires-Dist: aio-pika (>=9.4,<9.5)
15
- Requires-Dist: aiodataloader (==0.4.0)
16
- Requires-Dist: ariadne-codegen (==0.15.3)
17
- Requires-Dist: asgi-correlation-id (==4.2.0)
18
- Requires-Dist: authlib (==1.6.5)
19
- Requires-Dist: bcrypt (>=4.1,<4.2)
20
- Requires-Dist: boto3 (==1.34.129)
21
- Requires-Dist: cachetools-async (>=0.0.5,<0.0.6)
22
- Requires-Dist: click (==8.1.7)
23
- Requires-Dist: copier (>=9.8.0,<10.0.0)
24
- Requires-Dist: dulwich (>=0.22.7,<0.23.0)
25
- Requires-Dist: email-validator (>=2.1,<2.2)
26
- Requires-Dist: fast-depends (>=2.4.12,<3.0.0)
27
- Requires-Dist: fastapi (==0.121.1)
28
- Requires-Dist: fastapi-storages (>=0.3,<0.4)
29
- Requires-Dist: gitpython (>=3,<4)
30
- Requires-Dist: graphene (>=3.4,<3.5)
31
- Requires-Dist: gunicorn (>=23.0.0,<24.0.0)
32
- Requires-Dist: lunr (>=0.7.0.post1,<0.8.0)
33
- Requires-Dist: nats-py (>=2.7.2,<3.0.0)
34
- Requires-Dist: neo4j (>=5.28,<5.29)
35
- Requires-Dist: neo4j-rust-ext (>=5.28,<5.29)
36
- Requires-Dist: netaddr (==1.3.0)
37
- Requires-Dist: netutils (==1.12.0)
38
- Requires-Dist: numpy (>=1.26.2,<2.0.0)
39
- Requires-Dist: opentelemetry-exporter-otlp-proto-grpc (==1.28.1)
40
- Requires-Dist: opentelemetry-exporter-otlp-proto-http (==1.28.1)
41
- Requires-Dist: opentelemetry-instrumentation-aio-pika (==0.49b1)
42
- Requires-Dist: opentelemetry-instrumentation-fastapi (==0.49b1)
43
- Requires-Dist: prefect (==3.4.23)
44
- Requires-Dist: prefect-redis (==0.2.5)
45
- Requires-Dist: pyarrow (>=14)
46
- Requires-Dist: pydantic (>=2.10,<2.11)
47
- Requires-Dist: pydantic-settings (>=2.8,<2.9)
48
- Requires-Dist: pyjwt (>=2.8,<2.9)
49
- Requires-Dist: pytest (>=7.4,<7.5)
50
- Requires-Dist: python-multipart (==0.0.18)
51
- Requires-Dist: pyyaml (>=6,<7)
52
- Requires-Dist: redis[hiredis] (>=6.0.0,<7.0.0)
53
- Requires-Dist: rich (>=13,<14)
54
- Requires-Dist: starlette-exporter (>=0.23,<0.24)
55
- Requires-Dist: structlog (==24.1.0)
56
- Requires-Dist: tomli (>=1.1.0) ; python_version < "3.11"
57
- Requires-Dist: typer (==0.12.5)
58
- Requires-Dist: ujson (>=5,<6)
59
- Requires-Dist: uvicorn[standard] (>=0.32,<0.33)
60
- Requires-Dist: whenever (==0.7.3)
61
- Project-URL: Documentation, https://docs.infrahub.app/
62
- Project-URL: Homepage, https://opsmill.com
63
- Project-URL: Repository, https://github.com/opsmill/infrahub
14
+ Requires-Python: <3.13,>=3.12
15
+ Requires-Dist: aio-pika<9.5,>=9.4
16
+ Requires-Dist: aiodataloader==0.4.0
17
+ Requires-Dist: ariadne-codegen==0.15.3
18
+ Requires-Dist: asgi-correlation-id==4.2.0
19
+ Requires-Dist: authlib==1.6.5
20
+ Requires-Dist: bcrypt<4.2,>=4.1
21
+ Requires-Dist: boto3==1.34.129
22
+ Requires-Dist: cachetools-async==0.0.5
23
+ Requires-Dist: click==8.1.7
24
+ Requires-Dist: copier==9.8.0
25
+ Requires-Dist: deepdiff==8.6.1
26
+ Requires-Dist: dulwich==0.22.7
27
+ Requires-Dist: email-validator<2.2,>=2.1
28
+ Requires-Dist: fast-depends==2.4.12
29
+ Requires-Dist: fastapi-storages<0.4,>=0.3
30
+ Requires-Dist: fastapi==0.121.1
31
+ Requires-Dist: gitpython<4,>=3
32
+ Requires-Dist: graphene<3.5,>=3.4
33
+ Requires-Dist: gunicorn<24,>=23.0.0
34
+ Requires-Dist: jinja2<4,>=3
35
+ Requires-Dist: lunr<0.8,>=0.7.0.post1
36
+ Requires-Dist: nats-py==2.7.2
37
+ Requires-Dist: neo4j-rust-ext<5.29,>=5.28
38
+ Requires-Dist: neo4j<5.29,>=5.28
39
+ Requires-Dist: netaddr==1.3.0
40
+ Requires-Dist: netutils==1.12.0
41
+ Requires-Dist: numpy==1.26.2
42
+ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc==1.28.1
43
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http==1.28.1
44
+ Requires-Dist: opentelemetry-instrumentation-aio-pika==0.49b1
45
+ Requires-Dist: opentelemetry-instrumentation-fastapi==0.49b1
46
+ Requires-Dist: prefect-redis==0.2.6
47
+ Requires-Dist: prefect==3.5.0
48
+ Requires-Dist: pyarrow<15,>=14
49
+ Requires-Dist: pydantic-settings<2.9,>=2.8
50
+ Requires-Dist: pydantic<2.11,>=2.10
51
+ Requires-Dist: pyjwt<2.9,>=2.8
52
+ Requires-Dist: pytest<7.5,>=7.4
53
+ Requires-Dist: python-multipart==0.0.18
54
+ Requires-Dist: pyyaml<7,>=6
55
+ Requires-Dist: redis[hiredis]==6.0.0
56
+ Requires-Dist: rich<14,>=13
57
+ Requires-Dist: starlette-exporter<0.24,>=0.23
58
+ Requires-Dist: structlog==24.1.0
59
+ Requires-Dist: tomli>=1.1.0; python_version <= '3.11'
60
+ Requires-Dist: typer==0.19.2
61
+ Requires-Dist: ujson<6,>=5
62
+ Requires-Dist: uvicorn[standard]<0.33,>=0.32
63
+ Requires-Dist: whenever==0.7.3
64
64
  Description-Content-Type: text/markdown
65
65
 
66
66
  <h1 align="center">
@@ -137,4 +137,3 @@ To help our community with the creation of contributions, please view our [CONTR
137
137
  ## Security
138
138
 
139
139
  [View our SECURITY](https://github.com/opsmill/infrahub?tab=security-ov-file) policy to find the latest information.
140
-