infrahub-server 1.7.0b0__py3-none-any.whl → 1.7.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (177) hide show
  1. infrahub/api/exceptions.py +2 -2
  2. infrahub/api/schema.py +5 -0
  3. infrahub/cli/db.py +54 -24
  4. infrahub/core/account.py +12 -9
  5. infrahub/core/branch/models.py +11 -117
  6. infrahub/core/branch/tasks.py +7 -3
  7. infrahub/core/diff/branch_differ.py +1 -1
  8. infrahub/core/diff/conflict_transferer.py +1 -1
  9. infrahub/core/diff/data_check_synchronizer.py +1 -1
  10. infrahub/core/diff/enricher/cardinality_one.py +1 -1
  11. infrahub/core/diff/enricher/hierarchy.py +1 -1
  12. infrahub/core/diff/enricher/labels.py +1 -1
  13. infrahub/core/diff/merger/merger.py +6 -2
  14. infrahub/core/diff/repository/repository.py +3 -1
  15. infrahub/core/graph/__init__.py +1 -1
  16. infrahub/core/graph/constraints.py +1 -1
  17. infrahub/core/initialization.py +2 -1
  18. infrahub/core/ipam/reconciler.py +8 -6
  19. infrahub/core/ipam/utilization.py +8 -15
  20. infrahub/core/manager.py +1 -26
  21. infrahub/core/merge.py +1 -1
  22. infrahub/core/migrations/graph/__init__.py +2 -0
  23. infrahub/core/migrations/graph/m012_convert_account_generic.py +12 -12
  24. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +4 -4
  25. infrahub/core/migrations/graph/m014_remove_index_attr_value.py +3 -2
  26. infrahub/core/migrations/graph/m015_diff_format_update.py +3 -2
  27. infrahub/core/migrations/graph/m016_diff_delete_bug_fix.py +3 -2
  28. infrahub/core/migrations/graph/m017_add_core_profile.py +6 -4
  29. infrahub/core/migrations/graph/m018_uniqueness_nulls.py +3 -4
  30. infrahub/core/migrations/graph/m020_duplicate_edges.py +3 -3
  31. infrahub/core/migrations/graph/m025_uniqueness_nulls.py +3 -4
  32. infrahub/core/migrations/graph/m026_0000_prefix_fix.py +4 -5
  33. infrahub/core/migrations/graph/m028_delete_diffs.py +3 -2
  34. infrahub/core/migrations/graph/m029_duplicates_cleanup.py +3 -2
  35. infrahub/core/migrations/graph/m031_check_number_attributes.py +4 -3
  36. infrahub/core/migrations/graph/m032_cleanup_orphaned_branch_relationships.py +3 -2
  37. infrahub/core/migrations/graph/m034_find_orphaned_schema_fields.py +3 -2
  38. infrahub/core/migrations/graph/m035_orphan_relationships.py +3 -3
  39. infrahub/core/migrations/graph/m036_drop_attr_value_index.py +3 -2
  40. infrahub/core/migrations/graph/m037_index_attr_vals.py +3 -2
  41. infrahub/core/migrations/graph/m038_redo_0000_prefix_fix.py +4 -5
  42. infrahub/core/migrations/graph/m039_ipam_reconcile.py +3 -2
  43. infrahub/core/migrations/graph/m041_deleted_dup_edges.py +4 -3
  44. infrahub/core/migrations/graph/m042_profile_attrs_in_db.py +5 -4
  45. infrahub/core/migrations/graph/m043_create_hfid_display_label_in_db.py +12 -5
  46. infrahub/core/migrations/graph/m044_backfill_hfid_display_label_in_db.py +15 -4
  47. infrahub/core/migrations/graph/m045_backfill_hfid_display_label_in_db_profile_template.py +10 -4
  48. infrahub/core/migrations/graph/m046_fill_agnostic_hfid_display_labels.py +6 -5
  49. infrahub/core/migrations/graph/m047_backfill_or_null_display_label.py +19 -5
  50. infrahub/core/migrations/graph/m048_undelete_rel_props.py +6 -4
  51. infrahub/core/migrations/graph/m049_remove_is_visible_relationship.py +19 -4
  52. infrahub/core/migrations/graph/m050_backfill_vertex_metadata.py +3 -3
  53. infrahub/core/migrations/graph/m051_subtract_branched_from_microsecond.py +39 -0
  54. infrahub/core/migrations/query/__init__.py +2 -2
  55. infrahub/core/migrations/query/schema_attribute_update.py +1 -1
  56. infrahub/core/migrations/runner.py +6 -3
  57. infrahub/core/migrations/schema/attribute_kind_update.py +8 -11
  58. infrahub/core/migrations/schema/attribute_name_update.py +1 -1
  59. infrahub/core/migrations/schema/attribute_supports_profile.py +5 -10
  60. infrahub/core/migrations/schema/models.py +8 -0
  61. infrahub/core/migrations/schema/node_attribute_add.py +11 -14
  62. infrahub/core/migrations/schema/node_attribute_remove.py +1 -1
  63. infrahub/core/migrations/schema/node_kind_update.py +1 -1
  64. infrahub/core/migrations/schema/tasks.py +7 -1
  65. infrahub/core/migrations/shared.py +37 -30
  66. infrahub/core/node/__init__.py +3 -2
  67. infrahub/core/node/base.py +9 -5
  68. infrahub/core/node/delete_validator.py +1 -1
  69. infrahub/core/order.py +30 -0
  70. infrahub/core/protocols.py +1 -0
  71. infrahub/core/protocols_base.py +4 -0
  72. infrahub/core/query/__init__.py +8 -5
  73. infrahub/core/query/attribute.py +3 -3
  74. infrahub/core/query/branch.py +1 -1
  75. infrahub/core/query/delete.py +1 -1
  76. infrahub/core/query/diff.py +3 -3
  77. infrahub/core/query/ipam.py +104 -43
  78. infrahub/core/query/node.py +454 -101
  79. infrahub/core/query/relationship.py +83 -26
  80. infrahub/core/query/resource_manager.py +107 -18
  81. infrahub/core/relationship/constraints/count.py +1 -1
  82. infrahub/core/relationship/constraints/peer_kind.py +1 -1
  83. infrahub/core/relationship/constraints/peer_parent.py +1 -1
  84. infrahub/core/relationship/constraints/peer_relatives.py +1 -1
  85. infrahub/core/relationship/constraints/profiles_kind.py +1 -1
  86. infrahub/core/relationship/constraints/profiles_removal.py +1 -1
  87. infrahub/core/relationship/model.py +8 -2
  88. infrahub/core/schema/attribute_parameters.py +28 -1
  89. infrahub/core/schema/attribute_schema.py +9 -15
  90. infrahub/core/schema/basenode_schema.py +3 -0
  91. infrahub/core/schema/definitions/core/__init__.py +8 -2
  92. infrahub/core/schema/definitions/core/account.py +10 -10
  93. infrahub/core/schema/definitions/core/artifact.py +14 -8
  94. infrahub/core/schema/definitions/core/check.py +10 -4
  95. infrahub/core/schema/definitions/core/generator.py +26 -6
  96. infrahub/core/schema/definitions/core/graphql_query.py +1 -1
  97. infrahub/core/schema/definitions/core/group.py +9 -2
  98. infrahub/core/schema/definitions/core/ipam.py +80 -10
  99. infrahub/core/schema/definitions/core/menu.py +41 -7
  100. infrahub/core/schema/definitions/core/permission.py +16 -2
  101. infrahub/core/schema/definitions/core/profile.py +16 -2
  102. infrahub/core/schema/definitions/core/propose_change.py +24 -4
  103. infrahub/core/schema/definitions/core/propose_change_comment.py +23 -11
  104. infrahub/core/schema/definitions/core/propose_change_validator.py +50 -21
  105. infrahub/core/schema/definitions/core/repository.py +10 -0
  106. infrahub/core/schema/definitions/core/resource_pool.py +8 -1
  107. infrahub/core/schema/definitions/core/template.py +19 -2
  108. infrahub/core/schema/definitions/core/transform.py +11 -5
  109. infrahub/core/schema/definitions/core/webhook.py +27 -9
  110. infrahub/core/schema/manager.py +50 -38
  111. infrahub/core/schema/schema_branch.py +68 -2
  112. infrahub/core/utils.py +3 -3
  113. infrahub/core/validators/aggregated_checker.py +1 -1
  114. infrahub/core/validators/attribute/choices.py +1 -1
  115. infrahub/core/validators/attribute/enum.py +1 -1
  116. infrahub/core/validators/attribute/kind.py +6 -3
  117. infrahub/core/validators/attribute/length.py +1 -1
  118. infrahub/core/validators/attribute/min_max.py +1 -1
  119. infrahub/core/validators/attribute/number_pool.py +1 -1
  120. infrahub/core/validators/attribute/optional.py +1 -1
  121. infrahub/core/validators/attribute/regex.py +1 -1
  122. infrahub/core/validators/node/attribute.py +1 -1
  123. infrahub/core/validators/node/relationship.py +1 -1
  124. infrahub/core/validators/relationship/peer.py +1 -1
  125. infrahub/database/__init__.py +1 -1
  126. infrahub/git/utils.py +1 -1
  127. infrahub/graphql/app.py +2 -2
  128. infrahub/graphql/field_extractor.py +1 -1
  129. infrahub/graphql/manager.py +17 -3
  130. infrahub/graphql/mutations/account.py +1 -1
  131. infrahub/graphql/order.py +14 -0
  132. infrahub/graphql/queries/diff/tree.py +5 -5
  133. infrahub/graphql/queries/resource_manager.py +25 -24
  134. infrahub/graphql/resolvers/ipam.py +3 -3
  135. infrahub/graphql/resolvers/resolver.py +44 -3
  136. infrahub/graphql/types/standard_node.py +8 -4
  137. infrahub/lock.py +7 -0
  138. infrahub/menu/repository.py +1 -1
  139. infrahub/patch/queries/base.py +1 -1
  140. infrahub/pools/number.py +1 -8
  141. infrahub/profiles/node_applier.py +1 -1
  142. infrahub/profiles/queries/get_profile_data.py +1 -1
  143. infrahub/proposed_change/action_checker.py +1 -1
  144. infrahub/services/__init__.py +1 -1
  145. infrahub/services/adapters/cache/nats.py +1 -1
  146. infrahub/services/adapters/cache/redis.py +7 -0
  147. infrahub/webhook/gather.py +1 -1
  148. infrahub/webhook/tasks.py +22 -6
  149. infrahub_sdk/analyzer.py +2 -2
  150. infrahub_sdk/branch.py +12 -39
  151. infrahub_sdk/checks.py +4 -4
  152. infrahub_sdk/client.py +36 -0
  153. infrahub_sdk/ctl/cli_commands.py +2 -1
  154. infrahub_sdk/ctl/graphql.py +15 -4
  155. infrahub_sdk/ctl/utils.py +2 -2
  156. infrahub_sdk/enums.py +6 -0
  157. infrahub_sdk/graphql/renderers.py +21 -0
  158. infrahub_sdk/graphql/utils.py +85 -0
  159. infrahub_sdk/node/attribute.py +12 -2
  160. infrahub_sdk/node/constants.py +11 -0
  161. infrahub_sdk/node/metadata.py +69 -0
  162. infrahub_sdk/node/node.py +65 -14
  163. infrahub_sdk/node/property.py +3 -0
  164. infrahub_sdk/node/related_node.py +24 -1
  165. infrahub_sdk/node/relationship.py +10 -1
  166. infrahub_sdk/operation.py +2 -2
  167. infrahub_sdk/schema/repository.py +1 -2
  168. infrahub_sdk/transforms.py +2 -2
  169. infrahub_sdk/types.py +18 -2
  170. {infrahub_server-1.7.0b0.dist-info → infrahub_server-1.7.1.dist-info}/METADATA +6 -6
  171. {infrahub_server-1.7.0b0.dist-info → infrahub_server-1.7.1.dist-info}/RECORD +176 -172
  172. {infrahub_server-1.7.0b0.dist-info → infrahub_server-1.7.1.dist-info}/entry_points.txt +0 -1
  173. infrahub_testcontainers/models.py +3 -3
  174. infrahub_testcontainers/performance_test.py +1 -1
  175. infrahub/graphql/models.py +0 -36
  176. {infrahub_server-1.7.0b0.dist-info → infrahub_server-1.7.1.dist-info}/WHEEL +0 -0
  177. {infrahub_server-1.7.0b0.dist-info → infrahub_server-1.7.1.dist-info}/licenses/LICENSE.txt +0 -0
@@ -598,7 +598,10 @@ class GraphQLSchemaManager:
598
598
  required=False,
599
599
  description="Human friendly identifier",
600
600
  ),
601
- "_updated_at": graphene.DateTime(required=False),
601
+ "_updated_at": graphene.DateTime(
602
+ required=False,
603
+ deprecation_reason="Query the node_metadata field instead. Will be removed in Infrahub 1.9",
604
+ ),
602
605
  "display_label": graphene.String(required=False),
603
606
  "Meta": type("Meta", (object,), meta_attrs),
604
607
  }
@@ -1005,8 +1008,16 @@ class GraphQLSchemaManager:
1005
1008
  if not top_level:
1006
1009
  filters["isnull"] = graphene.Boolean()
1007
1010
 
1011
+ if schema.display_label:
1012
+ display_label_schema = schema.get_attribute("display_label")
1013
+ filters.update(
1014
+ get_attribute_type(kind=display_label_schema.kind).get_graphql_filters(
1015
+ name="display_label", include_properties=False, include_isnull=True
1016
+ )
1017
+ )
1018
+
1008
1019
  if schema.human_friendly_id and top_level:
1009
- # HFID filter limited to top level because we can't filter on HFID for relationships (yet)
1020
+ # NOTE: this can loosen to allow filtering at a non-top level once IFC-2110 is implemented
1010
1021
  filters["hfid"] = graphene.List(graphene.String)
1011
1022
 
1012
1023
  for attr in schema.attributes:
@@ -1201,7 +1212,10 @@ class GraphQLSchemaManager:
1201
1212
 
1202
1213
  main_attrs: dict[str, Any] = {
1203
1214
  "node": graphene.Field(base_interface, required=False),
1204
- "_updated_at": graphene.DateTime(required=False),
1215
+ "_updated_at": graphene.DateTime(
1216
+ required=False,
1217
+ deprecation_reason="Query the node_metadata field instead. Will be removed in Infrahub 1.9",
1218
+ ),
1205
1219
  "node_metadata": graphene.Field(node_metadata, required=True),
1206
1220
  "Meta": type("Meta", (object,), meta_attrs),
1207
1221
  }
@@ -9,13 +9,13 @@ from infrahub.auth import AuthType
9
9
  from infrahub.core.constants import InfrahubKind
10
10
  from infrahub.core.manager import NodeManager
11
11
  from infrahub.core.node import Node
12
+ from infrahub.core.order import OrderModel
12
13
  from infrahub.core.protocols import CoreAccount, CoreNode, InternalAccountToken
13
14
  from infrahub.core.timestamp import Timestamp
14
15
  from infrahub.database import InfrahubDatabase, retry_db_transaction
15
16
  from infrahub.exceptions import NodeNotFoundError, PermissionDeniedError
16
17
  from infrahub.graphql.field_extractor import extract_graphql_fields
17
18
 
18
- from ..models import OrderModel
19
19
  from ..types import InfrahubObjectType
20
20
 
21
21
  if TYPE_CHECKING:
@@ -0,0 +1,14 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from infrahub.core.order import OrderModel
6
+
7
+
8
+ def deserialize_order_input(input_data: dict[str, Any] | None) -> OrderModel | None:
9
+ # Corresponds to infrahub.graphql.manager.OrderInput
10
+ if not input_data:
11
+ return None
12
+
13
+ order_model = OrderModel(**input_data)
14
+ return order_model
@@ -60,10 +60,10 @@ class ConflictDetails(ObjectType):
60
60
 
61
61
 
62
62
  class DiffSummaryCounts(ObjectType):
63
- num_added = Int(required=False)
64
- num_updated = Int(required=False)
65
- num_removed = Int(required=False)
66
- num_conflicts = Int(required=False)
63
+ num_added = Int(required=True)
64
+ num_updated = Int(required=True)
65
+ num_removed = Int(required=True)
66
+ num_conflicts = Int(required=True)
67
67
 
68
68
 
69
69
  class DiffProperty(ObjectType):
@@ -146,7 +146,7 @@ class DiffTreeSummary(DiffSummaryCounts):
146
146
  diff_branch = String(required=True)
147
147
  from_time = DateTime(required=True)
148
148
  to_time = DateTime(required=True)
149
- num_unchanged = Int(required=False)
149
+ num_unchanged = Int(required=True)
150
150
  num_untracked_base_changes = Int(required=False)
151
151
  num_untracked_diff_changes = Int(required=False)
152
152
 
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import contextlib
4
- from typing import TYPE_CHECKING, Any
4
+ from typing import TYPE_CHECKING, Any, cast
5
5
 
6
6
  from graphene import BigInt, Field, Float, Int, List, NonNull, ObjectType, String
7
7
 
@@ -129,18 +129,17 @@ class PoolAllocated(ObjectType):
129
129
 
130
130
  node_fields = edges.get("node", {})
131
131
 
132
- nodes = []
133
- for result in query.get_results():
134
- child_node = result.get_node("child")
135
- child_value_node = result.get_node("av")
136
- node_id = str(child_node.get("uuid"))
137
-
138
- child_ip_value = child_value_node.get("value")
139
- kind = child_node.get("kind")
140
- branch_name = str(result.get("branch"))
141
-
132
+ nodes: list[dict[str, dict[str, str | None]]] = []
133
+ for item in query.get_data():
142
134
  nodes.append(
143
- {"node": {"id": node_id, "kind": kind, "branch": branch_name, "display_label": child_ip_value}}
135
+ {
136
+ "node": {
137
+ "id": item.child_uuid,
138
+ "kind": item.child_kind,
139
+ "branch": item.branch,
140
+ "display_label": item.ip_value,
141
+ }
142
+ }
144
143
  )
145
144
 
146
145
  if "identifier" in node_fields:
@@ -152,19 +151,21 @@ class PoolAllocated(ObjectType):
152
151
  identifier_query_class = identifier_query_map.get(pool.get_kind())
153
152
  if not identifier_query_class:
154
153
  raise ValidationError(input_value=f"This query doesn't get support {pool.get_kind()}")
155
- identifier_query = await identifier_query_class.init(
156
- db=graphql_context.db, at=graphql_context.at, pool_id=pool_id, allocated=allocated_ids
154
+ identifier_query = cast(
155
+ "IPAddressPoolGetIdentifiers | PrefixPoolGetIdentifiers",
156
+ await identifier_query_class.init(
157
+ db=graphql_context.db, at=graphql_context.at, pool_id=pool_id, allocated=allocated_ids
158
+ ),
157
159
  )
158
160
  await identifier_query.execute(db=graphql_context.db)
159
161
 
160
- reservations = {}
161
- for result in identifier_query.get_results():
162
- reservation = result.get_rel("reservation")
163
- allocated = result.get_node("allocated")
164
- reservations[allocated.get("uuid")] = reservation.get("identifier")
162
+ reservations: dict[str, str] = {}
163
+ for identifier_item in identifier_query.get_data():
164
+ reservations[identifier_item.allocated_uuid] = identifier_item.identifier
165
165
 
166
166
  for node in nodes:
167
- node["node"]["identifier"] = reservations.get(node["node"]["id"])
167
+ node_id = cast("str", node["node"]["id"])
168
+ node["node"]["identifier"] = reservations.get(node_id)
168
169
 
169
170
  response["edges"] = nodes
170
171
 
@@ -284,13 +285,13 @@ async def resolve_number_pool_allocation(
284
285
  if "edges" in fields:
285
286
  await query.execute(db=db)
286
287
  edges = []
287
- for entry in query.results:
288
+ for item in query.get_data():
288
289
  node = {
289
290
  "node": {
290
- "id": entry.get_as_optional_type("id", str),
291
+ "id": item.id,
291
292
  "kind": pool.node.value, # type: ignore[attr-defined]
292
- "branch": entry.get_as_optional_type("branch", str),
293
- "display_label": entry.get_as_optional_type("value", int),
293
+ "branch": item.branch,
294
+ "display_label": item.value,
294
295
  }
295
296
  }
296
297
  edges.append(node)
@@ -16,10 +16,11 @@ from infrahub.core.node import Node
16
16
  from infrahub.core.protocols import BuiltinIPNamespace, BuiltinIPPrefix
17
17
  from infrahub.core.schema.generic_schema import GenericSchema
18
18
  from infrahub.exceptions import ValidationError
19
- from infrahub.graphql.models import OrderModel
20
19
  from infrahub.graphql.parser import extract_selection
21
20
  from infrahub.graphql.permissions import get_permissions
22
21
 
22
+ from ..order import deserialize_order_input
23
+
23
24
  if TYPE_CHECKING:
24
25
  from collections.abc import Sequence
25
26
 
@@ -322,7 +323,7 @@ async def ipam_paginated_list_resolver( # noqa: PLR0915
322
323
  if not isinstance(schema, GenericSchema) or schema.kind not in [InfrahubKind.IPADDRESS, InfrahubKind.IPPREFIX]:
323
324
  raise ValidationError(f"{schema.kind} is not {InfrahubKind.IPADDRESS} or {InfrahubKind.IPPREFIX}")
324
325
 
325
- order_model = OrderModel.from_input(input_data=order)
326
+ order_model = deserialize_order_input(input_data=order)
326
327
  fields = await extract_selection(info=info, schema=schema)
327
328
  resolve_available = bool(kwargs.pop("include_available", False))
328
329
  kinds_to_filter: list[str] = kwargs.pop("kinds", []) # type: ignore[assignment]
@@ -398,7 +399,6 @@ async def ipam_paginated_list_resolver( # noqa: PLR0915
398
399
  branch=graphql_context.branch,
399
400
  limit=query_limit,
400
401
  offset=offset,
401
- account=graphql_context.account_session,
402
402
  include_metadata=MetadataOptions.LINKED_NODES,
403
403
  partial_match=partial_match,
404
404
  order=order_model,
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ from datetime import datetime, timedelta
3
4
  from typing import TYPE_CHECKING, Any
4
5
 
5
6
  from graphql.type.definition import GraphQLNonNull
@@ -7,11 +8,12 @@ from opentelemetry import trace
7
8
 
8
9
  from infrahub.core.constants import BranchSupportType, InfrahubKind, RelationshipHierarchyDirection
9
10
  from infrahub.core.manager import NodeManager
11
+ from infrahub.core.order import OrderModel
10
12
  from infrahub.exceptions import NodeNotFoundError
11
13
  from infrahub.graphql.field_extractor import extract_graphql_fields
12
14
  from infrahub.graphql.metadata import build_metadata_query_options
13
15
 
14
- from ..models import OrderModel
16
+ from ..order import deserialize_order_input
15
17
  from ..parser import extract_selection
16
18
  from ..permissions import get_permissions
17
19
 
@@ -140,6 +142,45 @@ async def parent_field_name_resolver(parent: dict[str, dict], info: GraphQLResol
140
142
  return parent[info.field_name]
141
143
 
142
144
 
145
+ def _transform_metadata_day_filters(filters: dict[str, Any]) -> dict[str, Any]:
146
+ """Transform metadata datetime filters with 00:00:00 time into day range filters.
147
+
148
+ When a filter like `node_metadata__created_at="2025-02-03T00:00:00"` has a time
149
+ of exactly midnight, transform it into __after and __before filters to match
150
+ the entire day (inclusive of midnight).
151
+
152
+ If __after or __before filters are already explicitly defined, they will not be
153
+ overwritten by the generated day range filters.
154
+ """
155
+ result = dict(filters)
156
+ metadata_datetime_fields = ("node_metadata__created_at", "node_metadata__updated_at")
157
+
158
+ for field in metadata_datetime_fields:
159
+ if field not in result:
160
+ continue
161
+ value = result[field]
162
+ if not isinstance(value, datetime):
163
+ continue
164
+ # Check if time is midnight (00:00:00)
165
+ if value.hour == 0 and value.minute == 0 and value.second == 0 and value.microsecond == 0:
166
+ # Remove the exact match filter
167
+ del result[field]
168
+ # Add __after filter with one microsecond before midnight to include objects at exactly midnight
169
+ # Skip if __after is already explicitly defined
170
+ after_key = f"{field}__after"
171
+ if after_key not in result:
172
+ one_microsecond_before = value - timedelta(microseconds=1)
173
+ result[after_key] = one_microsecond_before
174
+ # Add __before filter with next day (exclusive: <)
175
+ # Skip if __before is already explicitly defined
176
+ before_key = f"{field}__before"
177
+ if before_key not in result:
178
+ next_day = value + timedelta(days=1)
179
+ result[before_key] = next_day
180
+
181
+ return result
182
+
183
+
143
184
  @trace.get_tracer(__name__).start_as_current_span("default_paginated_list_resolver")
144
185
  async def default_paginated_list_resolver(
145
186
  root: dict, # noqa: ARG001
@@ -156,7 +197,7 @@ async def default_paginated_list_resolver(
156
197
  else info.return_type.graphene_type._meta.schema
157
198
  )
158
199
 
159
- order_model = OrderModel.from_input(input_data=order)
200
+ order_model = deserialize_order_input(input_data=order)
160
201
 
161
202
  fields = await extract_selection(info=info, schema=schema)
162
203
 
@@ -166,6 +207,7 @@ async def default_paginated_list_resolver(
166
207
  filters = {
167
208
  key: value for key, value in kwargs.items() if ("__" in key and value is not None) or key in ("ids", "hfid")
168
209
  }
210
+ filters = _transform_metadata_day_filters(filters)
169
211
 
170
212
  edges: dict[str, Any] = fields.get("edges", {})
171
213
  node_fields = edges.get("node", {})
@@ -203,7 +245,6 @@ async def default_paginated_list_resolver(
203
245
  branch=graphql_context.branch,
204
246
  limit=limit,
205
247
  offset=offset,
206
- account=graphql_context.account_session,
207
248
  partial_match=partial_match,
208
249
  order=order_model,
209
250
  )
@@ -13,12 +13,18 @@ if TYPE_CHECKING:
13
13
 
14
14
 
15
15
  class InfrahubObjectTypeOptions(ObjectTypeOptions):
16
- model = None
16
+ model: type | None = None
17
17
 
18
18
 
19
19
  class InfrahubObjectType(ObjectType):
20
20
  @classmethod
21
- def __init_subclass_with_meta__(cls, model=None, interfaces=(), _meta=None, **options) -> None:
21
+ def __init_subclass_with_meta__(
22
+ cls,
23
+ model: type | None = None,
24
+ interfaces: tuple[type, ...] = (),
25
+ _meta: InfrahubObjectTypeOptions | None = None,
26
+ **options: Any,
27
+ ) -> None:
22
28
  if not _meta:
23
29
  _meta = InfrahubObjectTypeOptions(cls)
24
30
 
@@ -38,14 +44,12 @@ class InfrahubObjectType(ObjectType):
38
44
  filters=filters,
39
45
  at=graphql_context.at,
40
46
  branch=graphql_context.branch,
41
- account=graphql_context.account_session,
42
47
  db=db,
43
48
  )
44
49
  else:
45
50
  objs = await cls._meta.model.get_list(
46
51
  at=graphql_context.at,
47
52
  branch=graphql_context.branch,
48
- account=graphql_context.account_session,
49
53
  db=db,
50
54
  )
51
55
 
infrahub/lock.py CHANGED
@@ -10,6 +10,7 @@ from typing import TYPE_CHECKING
10
10
 
11
11
  import redis.asyncio as redis
12
12
  from prometheus_client import Histogram
13
+ from redis import UsernamePasswordCredentialProvider
13
14
  from redis.asyncio.lock import Lock as GlobalLock
14
15
 
15
16
  from infrahub import config
@@ -275,10 +276,16 @@ class InfrahubLockRegistry:
275
276
  ) -> None:
276
277
  if config.SETTINGS.cache.enable and not local_only:
277
278
  if config.SETTINGS.cache.driver == config.CacheDriver.Redis:
279
+ credential_provider: UsernamePasswordCredentialProvider | None = None
280
+ if config.SETTINGS.cache.username and config.SETTINGS.cache.password:
281
+ credential_provider = UsernamePasswordCredentialProvider(
282
+ username=config.SETTINGS.cache.username, password=config.SETTINGS.cache.password
283
+ )
278
284
  self.connection = redis.Redis(
279
285
  host=config.SETTINGS.cache.address,
280
286
  port=config.SETTINGS.cache.service_port,
281
287
  db=config.SETTINGS.cache.database,
288
+ credential_provider=credential_provider,
282
289
  ssl=config.SETTINGS.cache.tls_enabled,
283
290
  ssl_cert_reqs="optional" if not config.SETTINGS.cache.tls_insecure else "none",
284
291
  ssl_check_hostname=not config.SETTINGS.cache.tls_insecure,
@@ -6,7 +6,7 @@ from .models import MenuDict, MenuItemDefinition, MenuItemDict
6
6
 
7
7
 
8
8
  class MenuRepository:
9
- def __init__(self, db: InfrahubDatabase):
9
+ def __init__(self, db: InfrahubDatabase) -> None:
10
10
  self.db = db
11
11
 
12
12
  async def get_menu(self, nodes: dict[str, CoreMenuItem] | None = None) -> MenuDict:
@@ -6,7 +6,7 @@ from ..models import PatchPlan
6
6
 
7
7
 
8
8
  class PatchQuery(ABC):
9
- def __init__(self, db: InfrahubDatabase):
9
+ def __init__(self, db: InfrahubDatabase) -> None:
10
10
  self.db = db
11
11
 
12
12
  @abstractmethod
infrahub/pools/number.py CHANGED
@@ -37,14 +37,7 @@ class NumberUtilizationGetter:
37
37
  query = await NumberPoolGetAllocated.init(db=self.db, pool=self.pool, branch=self.branch, branch_agnostic=True)
38
38
  await query.execute(db=self.db)
39
39
 
40
- self.used = [
41
- UsedNumber(
42
- number=result.get_as_type(label="value", return_type=int),
43
- branch=result.get_as_type(label="branch", return_type=str),
44
- )
45
- for result in query.results
46
- if result.get_as_optional_type(label="value", return_type=int) is not None
47
- ]
40
+ self.used = [UsedNumber(number=item.value, branch=item.branch) for item in query.get_data()]
48
41
 
49
42
  self.used_default_branch = {entry.number for entry in self.used if entry.branch == registry.default_branch}
50
43
  used_branches = {entry.number for entry in self.used if entry.branch != registry.default_branch}
@@ -21,7 +21,7 @@ class NodeProfilesApplier:
21
21
  3. Profile priority determines which profile wins when multiple profiles set the same attribute
22
22
  """
23
23
 
24
- def __init__(self, db: InfrahubDatabase, branch: Branch):
24
+ def __init__(self, db: InfrahubDatabase, branch: Branch) -> None:
25
25
  self.db = db
26
26
  self.branch = branch
27
27
 
@@ -34,7 +34,7 @@ class GetProfileDataQuery(Query):
34
34
  attr_names: list[str],
35
35
  relationship_filters: list[RelationshipFilter] | None = None,
36
36
  **kwargs: Any,
37
- ):
37
+ ) -> None:
38
38
  super().__init__(*args, **kwargs)
39
39
  self.profile_ids = profile_ids
40
40
  self.attr_names = attr_names
@@ -125,7 +125,7 @@ class ActionRule:
125
125
 
126
126
 
127
127
  class ActionRulesEvaluator:
128
- def __init__(self, rules: list[ActionRule]):
128
+ def __init__(self, rules: list[ActionRule]) -> None:
129
129
  self.rules = rules
130
130
 
131
131
  async def evaluate(
@@ -54,7 +54,7 @@ class InfrahubServices:
54
54
  message_bus: InfrahubMessageBus | None = None,
55
55
  workflow: InfrahubWorkflow | None = None,
56
56
  component: InfrahubComponent | None = None,
57
- ):
57
+ ) -> None:
58
58
  """
59
59
  This method should not be called directly, use `new` instead for a proper initialization.
60
60
  """
@@ -23,7 +23,7 @@ class NATSCache(InfrahubCache):
23
23
  jetstream: nats.js.JetStreamContext,
24
24
  kv: dict[int, nats.js.kv.KeyValue],
25
25
  kv_buckets: dict[str, KVTTL],
26
- ):
26
+ ) -> None:
27
27
  self.connection = connection
28
28
  self.jetstream = jetstream
29
29
  self.kv = kv
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from typing import TYPE_CHECKING
4
4
 
5
5
  import redis.asyncio as redis
6
+ from redis import UsernamePasswordCredentialProvider
6
7
 
7
8
  from infrahub import config
8
9
  from infrahub.services.adapters.cache import InfrahubCache
@@ -13,10 +14,16 @@ if TYPE_CHECKING:
13
14
 
14
15
  class RedisCache(InfrahubCache):
15
16
  def __init__(self) -> None:
17
+ credential_provider: UsernamePasswordCredentialProvider | None = None
18
+ if config.SETTINGS.cache.username and config.SETTINGS.cache.password:
19
+ credential_provider = UsernamePasswordCredentialProvider(
20
+ username=config.SETTINGS.cache.username, password=config.SETTINGS.cache.password
21
+ )
16
22
  self.connection = redis.Redis(
17
23
  host=config.SETTINGS.cache.address,
18
24
  port=config.SETTINGS.cache.service_port,
19
25
  db=config.SETTINGS.cache.database,
26
+ credential_provider=credential_provider,
20
27
  ssl=config.SETTINGS.cache.tls_enabled,
21
28
  ssl_cert_reqs="optional" if not config.SETTINGS.cache.tls_insecure else "none",
22
29
  ssl_check_hostname=not config.SETTINGS.cache.tls_insecure,
@@ -13,5 +13,5 @@ from .models import WebhookTriggerDefinition
13
13
  @task(name="gather-trigger-webhook", task_run_name="Gather webhook triggers", cache_policy=NONE)
14
14
  async def gather_trigger_webhook(db: InfrahubDatabase) -> list[WebhookTriggerDefinition]:
15
15
  webhooks = await NodeManager.query(db=db, schema=CoreWebhook)
16
- triggers = [WebhookTriggerDefinition.from_object(webhook) for webhook in webhooks]
16
+ triggers = [WebhookTriggerDefinition.from_object(webhook) for webhook in webhooks if webhook.active.value]
17
17
  return triggers
infrahub/webhook/tasks.py CHANGED
@@ -13,7 +13,7 @@ from prefect.logging import get_run_logger
13
13
 
14
14
  from infrahub.message_bus.types import KVTTL
15
15
  from infrahub.trigger.models import TriggerType
16
- from infrahub.trigger.setup import setup_triggers_specific
16
+ from infrahub.trigger.setup import gather_all_automations, setup_triggers_specific
17
17
  from infrahub.workers.dependencies import get_cache, get_client, get_database, get_http
18
18
  from infrahub.workflows.utils import add_tags
19
19
 
@@ -123,7 +123,7 @@ async def configure_webhook_all() -> None:
123
123
 
124
124
  @flow(name="webhook-setup-automation-one", flow_run_name="Configurate webhook for {webhook_name}")
125
125
  async def configure_webhook_one(
126
- webhook_name: str, # noqa: ARG001
126
+ webhook_name: str,
127
127
  event_data: dict,
128
128
  ) -> None:
129
129
  log = get_run_logger()
@@ -132,6 +132,24 @@ async def configure_webhook_one(
132
132
  trigger = WebhookTriggerDefinition.from_object(webhook)
133
133
 
134
134
  async with get_prefect_client(sync_client=False) as prefect_client:
135
+ all_automations = await gather_all_automations(client=prefect_client)
136
+ existing_automations = [
137
+ automation for automation in all_automations if automation.name == trigger.generate_name()
138
+ ]
139
+ existing_automation = existing_automations[0] if existing_automations else None
140
+
141
+ # If webhook is inactive, delete the automation if it exists
142
+ if not webhook.active.value:
143
+ if existing_automation:
144
+ await prefect_client.delete_automation(automation_id=existing_automation.id)
145
+ log.info(f"Automation {trigger.generate_name()} deleted (webhook disabled)")
146
+ else:
147
+ log.info(f"Webhook {webhook_name} is disabled, no automation to delete")
148
+
149
+ cache = await get_cache()
150
+ await cache.delete(key=f"webhook:{webhook.id}")
151
+ return
152
+
135
153
  # Query the deployment associated with the trigger to have its ID
136
154
  deployment_name = trigger.get_deployment_names()[0]
137
155
  deployment = await prefect_client.read_deployment_by_name(name=f"{deployment_name}/{deployment_name}")
@@ -144,9 +162,6 @@ async def configure_webhook_one(
144
162
  actions=[action.get(deployment.id) for action in trigger.actions],
145
163
  )
146
164
 
147
- existing_automations = await prefect_client.read_automations_by_name(trigger.generate_name())
148
- existing_automation = existing_automations[0] if existing_automations else None
149
-
150
165
  if existing_automation:
151
166
  await prefect_client.update_automation(automation_id=existing_automation.id, automation=automation)
152
167
  log.info(f"Automation {trigger.generate_name()} updated")
@@ -168,7 +183,8 @@ async def delete_webhook_automation(
168
183
  async with get_prefect_client(sync_client=False) as prefect_client:
169
184
  automation_name = WebhookTriggerDefinition.generate_name_from_id(id=webhook_id)
170
185
 
171
- existing_automations = await prefect_client.read_automations_by_name(automation_name)
186
+ all_automations = await gather_all_automations(client=prefect_client)
187
+ existing_automations = [automation for automation in all_automations if automation.name == automation_name]
172
188
  existing_automation = existing_automations[0] if existing_automations else None
173
189
 
174
190
  if existing_automation:
infrahub_sdk/analyzer.py CHANGED
@@ -30,10 +30,10 @@ class GraphQLOperation(BaseModel):
30
30
 
31
31
 
32
32
  class GraphQLQueryAnalyzer:
33
- def __init__(self, query: str, schema: GraphQLSchema | None = None) -> None:
33
+ def __init__(self, query: str, schema: GraphQLSchema | None = None, document: DocumentNode | None = None) -> None:
34
34
  self.query: str = query
35
35
  self.schema: GraphQLSchema | None = schema
36
- self.document: DocumentNode = parse(self.query)
36
+ self.document: DocumentNode = document or parse(self.query)
37
37
  self._fields: dict | None = None
38
38
 
39
39
  @property