infrahub-server 1.7.1__py3-none-any.whl → 1.7.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. infrahub/actions/gather.py +2 -2
  2. infrahub/api/query.py +3 -2
  3. infrahub/api/transformation.py +3 -3
  4. infrahub/computed_attribute/gather.py +2 -0
  5. infrahub/config.py +2 -2
  6. infrahub/core/attribute.py +21 -2
  7. infrahub/core/diff/model/path.py +43 -0
  8. infrahub/core/graph/__init__.py +1 -1
  9. infrahub/core/graph/index.py +2 -0
  10. infrahub/core/ipam/resource_allocator.py +229 -0
  11. infrahub/core/migrations/graph/__init__.py +8 -0
  12. infrahub/core/migrations/graph/m052_fix_global_branch_level.py +51 -0
  13. infrahub/core/migrations/graph/m053_fix_branch_level_zero.py +61 -0
  14. infrahub/core/migrations/graph/m054_cleanup_orphaned_nodes.py +87 -0
  15. infrahub/core/migrations/graph/m055_remove_webhook_validate_certificates_default.py +86 -0
  16. infrahub/core/migrations/schema/node_attribute_add.py +17 -19
  17. infrahub/core/node/lock_utils.py +23 -2
  18. infrahub/core/node/resource_manager/ip_address_pool.py +5 -11
  19. infrahub/core/node/resource_manager/ip_prefix_pool.py +5 -21
  20. infrahub/core/node/resource_manager/number_pool.py +109 -39
  21. infrahub/core/query/__init__.py +7 -1
  22. infrahub/core/query/branch.py +18 -2
  23. infrahub/core/query/ipam.py +629 -40
  24. infrahub/core/query/node.py +128 -0
  25. infrahub/core/query/resource_manager.py +114 -1
  26. infrahub/core/relationship/model.py +1 -1
  27. infrahub/core/schema/definitions/core/webhook.py +0 -1
  28. infrahub/core/schema/definitions/internal.py +8 -5
  29. infrahub/core/validators/determiner.py +4 -0
  30. infrahub/graphql/analyzer.py +3 -1
  31. infrahub/graphql/app.py +7 -10
  32. infrahub/graphql/execution.py +95 -0
  33. infrahub/graphql/mutations/proposed_change.py +15 -0
  34. infrahub/graphql/parser.py +10 -7
  35. infrahub/graphql/queries/ipam.py +20 -25
  36. infrahub/graphql/queries/search.py +29 -9
  37. infrahub/proposed_change/tasks.py +2 -0
  38. infrahub/services/adapters/http/httpx.py +27 -0
  39. infrahub/trigger/catalogue.py +2 -0
  40. infrahub/trigger/models.py +73 -4
  41. infrahub/trigger/setup.py +1 -1
  42. infrahub/trigger/system.py +36 -0
  43. infrahub/webhook/models.py +4 -2
  44. infrahub/webhook/tasks.py +2 -2
  45. infrahub/workflows/initialization.py +2 -2
  46. {infrahub_server-1.7.1.dist-info → infrahub_server-1.7.3.dist-info}/METADATA +3 -3
  47. {infrahub_server-1.7.1.dist-info → infrahub_server-1.7.3.dist-info}/RECORD +52 -46
  48. infrahub_testcontainers/docker-compose-cluster.test.yml +16 -10
  49. infrahub_testcontainers/docker-compose.test.yml +11 -10
  50. infrahub/pools/address.py +0 -16
  51. {infrahub_server-1.7.1.dist-info → infrahub_server-1.7.3.dist-info}/WHEEL +0 -0
  52. {infrahub_server-1.7.1.dist-info → infrahub_server-1.7.3.dist-info}/entry_points.txt +0 -0
  53. {infrahub_server-1.7.1.dist-info → infrahub_server-1.7.3.dist-info}/licenses/LICENSE.txt +0 -0
@@ -2180,6 +2180,134 @@ WITH %(tracked_vars)s,
2180
2180
  return [str(result.get("n.uuid")) for result in self.get_results()]
2181
2181
 
2182
2182
 
2183
+ @dataclass(frozen=True)
2184
+ class NodeGetListByAttributeValueQueryResult:
2185
+ """Result from NodeGetListByAttributeValueQuery."""
2186
+
2187
+ uuid: str
2188
+ kind: str
2189
+
2190
+
2191
+ class NodeGetListByAttributeValueQuery(Query):
2192
+ """Query to find nodes by searching attribute values.
2193
+
2194
+ This query is optimized for search operations by starting from the AttributeValueIndexed
2195
+ nodes and using a TEXT index for efficient CONTAINS searches. This approach is more
2196
+ efficient than the standard NodeGetListQuery when searching for values across all nodes.
2197
+ """
2198
+
2199
+ name = "node_get_list_by_attribute_value"
2200
+ type = QueryType.READ
2201
+
2202
+ def __init__(
2203
+ self,
2204
+ search_value: str,
2205
+ kinds: list[str] | None = None,
2206
+ partial_match: bool = True,
2207
+ **kwargs: Any,
2208
+ ) -> None:
2209
+ self.search_value = search_value
2210
+ self.kinds = kinds
2211
+ self.partial_match = partial_match
2212
+
2213
+ super().__init__(**kwargs)
2214
+
2215
+ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002
2216
+ self.order_by = ["n.uuid"]
2217
+ self.return_labels = ["DISTINCT n.uuid as uuid", "n.kind as kind"]
2218
+
2219
+ branch_filter, branch_params = self.branch.get_query_filter_path(
2220
+ at=self.at, branch_agnostic=self.branch_agnostic
2221
+ )
2222
+ self.params.update(branch_params)
2223
+
2224
+ # Build search values for case-insensitive matching without using toLower/toString
2225
+ # which would disable index lookup. We search for four case variations:
2226
+ # 1. Original (as provided), 2. lowercase, 3. UPPERCASE, 4. Title Case (first char upper, rest lower)
2227
+ search_original = self.search_value
2228
+ search_lower = self.search_value.lower()
2229
+ search_upper = self.search_value.upper()
2230
+ search_title = self.search_value.capitalize()
2231
+
2232
+ # Build the search predicate based on partial_match
2233
+ # We avoid toLower/toString to allow TEXT index usage
2234
+ if self.partial_match:
2235
+ # Use CONTAINS with multiple case variations to leverage TEXT index
2236
+ search_predicate = (
2237
+ "(av.value CONTAINS $search_original OR av.value CONTAINS $search_lower "
2238
+ "OR av.value CONTAINS $search_upper OR av.value CONTAINS $search_title)"
2239
+ )
2240
+ else:
2241
+ # Exact match with case variations
2242
+ search_predicate = (
2243
+ "(av.value = $search_original OR av.value = $search_lower "
2244
+ "OR av.value = $search_upper OR av.value = $search_title)"
2245
+ )
2246
+
2247
+ self.params["search_original"] = search_original
2248
+ self.params["search_lower"] = search_lower
2249
+ self.params["search_upper"] = search_upper
2250
+ self.params["search_title"] = search_title
2251
+
2252
+ # Build kind filter if specified
2253
+ kind_filter = ""
2254
+ if self.kinds:
2255
+ kind_filter = "AND any(l IN labels(n) WHERE l in $kinds)"
2256
+ self.params["kinds"] = self.kinds
2257
+
2258
+ # The query starts from AttributeValueIndexed nodes to leverage the TEXT index
2259
+ # This approach is more efficient for search operations as it:
2260
+ # 1. Starts from AttributeValueIndexed nodes (smaller set when filtered)
2261
+ # 2. Traverses from matching values back to their owning nodes
2262
+ # 3. Filters nodes by branch and status
2263
+ query = """
2264
+ // --------------------------
2265
+ // start with all possible Node-Attribute-AttributeValue combinations
2266
+ // --------------------------
2267
+ MATCH (av:AttributeValueIndexed)<-[:HAS_VALUE]-(attr:Attribute)<-[:HAS_ATTRIBUTE]-(n)
2268
+ WHERE %(search_predicate)s %(kind_filter)s
2269
+ WITH DISTINCT n, attr, av
2270
+ // --------------------------
2271
+ // filter HAS_VALUE edges
2272
+ // --------------------------
2273
+ CALL (av, attr) {
2274
+ MATCH (av)<-[r:HAS_VALUE]-(attr)
2275
+ WHERE %(branch_filter)s
2276
+ RETURN r
2277
+ ORDER BY r.branch_level DESC, r.from DESC, r.status ASC
2278
+ LIMIT 1
2279
+ }
2280
+ WITH n, attr
2281
+ WHERE r.status = "active"
2282
+ // --------------------------
2283
+ // filter HAS_ATTRIBUTE edges
2284
+ // --------------------------
2285
+ CALL (n, attr) {
2286
+ MATCH (attr)<-[r:HAS_ATTRIBUTE]-(n)
2287
+ WHERE %(branch_filter)s
2288
+ RETURN r
2289
+ ORDER BY r.branch_level DESC, r.from DESC, r.status ASC
2290
+ LIMIT 1
2291
+ }
2292
+ WITH n, attr, r
2293
+ WHERE r.status = "active"
2294
+ """ % {
2295
+ "search_predicate": search_predicate,
2296
+ "kind_filter": kind_filter,
2297
+ "branch_filter": branch_filter,
2298
+ }
2299
+
2300
+ self.add_to_query(query)
2301
+
2302
+ def get_data(self) -> Generator[NodeGetListByAttributeValueQueryResult, None, None]:
2303
+ """Yield results as typed dataclass instances."""
2304
+ for result in self.get_results():
2305
+ yield NodeGetListByAttributeValueQueryResult(
2306
+ uuid=result.get_as_str("uuid"),
2307
+ kind=result.get_as_str("kind"),
2308
+ )
2309
+
2310
+
2183
2311
  class NodeGetHierarchyQuery(Query):
2184
2312
  name = "node_get_hierarchy"
2185
2313
  type = QueryType.READ
@@ -70,6 +70,21 @@ class NumberPoolAllocatedResult:
70
70
  )
71
71
 
72
72
 
73
+ @dataclass(frozen=True)
74
+ class NumberPoolFreeData:
75
+ value: int
76
+ is_free: bool
77
+ is_last: bool
78
+
79
+ @classmethod
80
+ def from_db(cls, result: QueryResult) -> NumberPoolFreeData:
81
+ return cls(
82
+ value=result.get_as_type("value", return_type=int),
83
+ is_free=result.get_as_type("is_free", return_type=bool),
84
+ is_last=result.get_as_type("is_last", return_type=bool),
85
+ )
86
+
87
+
73
88
  class IPAddressPoolGetIdentifiers(Query):
74
89
  name = "ipaddresspool_get_identifiers"
75
90
  type = QueryType.READ
@@ -393,7 +408,9 @@ class NumberPoolGetUsed(Query):
393
408
  n.uuid = res.identifier AND
394
409
  attr.name = $attribute_name AND
395
410
  all(r in [res, hv, ha] WHERE (%(branch_filter)s))
396
- ORDER BY res.branch_level DESC, hv.branch_level DESC, ha.branch_level DESC, res.from DESC, hv.from DESC, ha.from DESC
411
+ ORDER BY res.branch_level DESC, hv.branch_level DESC, ha.branch_level DESC,
412
+ res.from DESC, hv.from DESC, ha.from DESC,
413
+ res.status ASC, hv.status ASC, ha.status ASC
397
414
  RETURN (res.status = "active" AND hv.status = "active" AND ha.status = "active") AS is_active
398
415
  LIMIT 1
399
416
  }
@@ -419,6 +436,102 @@ class NumberPoolGetUsed(Query):
419
436
  yield NumberPoolIdentifierData.from_db(result)
420
437
 
421
438
 
439
+ class NumberPoolGetFree(Query):
440
+ name = "number_pool_get_free"
441
+ type = QueryType.READ
442
+
443
+ def __init__(
444
+ self,
445
+ pool: CoreNumberPool,
446
+ min_value: int | None = None,
447
+ max_value: int | None = None,
448
+ **kwargs: dict[str, Any],
449
+ ) -> None:
450
+ self.pool = pool
451
+ self.min_value = min_value
452
+ self.max_value = max_value
453
+
454
+ super().__init__(**kwargs) # type: ignore[arg-type]
455
+
456
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
457
+ self.params["pool_id"] = self.pool.get_id()
458
+ # Use min_value/max_value if provided, otherwise use pool's start_range/end_range
459
+ self.params["start_range"] = self.min_value if self.min_value is not None else self.pool.start_range.value
460
+ self.params["end_range"] = self.max_value if self.max_value is not None else self.pool.end_range.value
461
+ self.limit = 1 # Query only works at returning a single, free entry
462
+
463
+ branch_filter, branch_params = self.branch.get_query_filter_path(
464
+ at=self.at.to_string(), branch_agnostic=self.branch_agnostic
465
+ )
466
+
467
+ self.params.update(branch_params)
468
+ self.params["attribute_name"] = self.pool.node_attribute.value
469
+
470
+ query = """
471
+ MATCH (pool:%(number_pool)s { uuid: $pool_id })-[res:IS_RESERVED]->(av:AttributeValueIndexed)
472
+ WHERE toInteger(av.value) >= $start_range and toInteger(av.value) <= $end_range
473
+ CALL (pool, res, av) {
474
+ MATCH (pool)-[res]->(av)<-[hv:HAS_VALUE]-(attr:Attribute)<-[ha:HAS_ATTRIBUTE]-(n:%(node)s)
475
+ WHERE
476
+ n.uuid = res.identifier AND
477
+ attr.name = $attribute_name AND
478
+ all(r in [res, hv, ha] WHERE (%(branch_filter)s))
479
+ ORDER BY res.branch_level DESC, hv.branch_level DESC, ha.branch_level DESC,
480
+ res.from DESC, hv.from DESC, ha.from DESC,
481
+ res.status ASC, hv.status ASC, ha.status ASC
482
+ RETURN (res.status = "active" AND hv.status = "active" AND ha.status = "active") AS is_active
483
+ LIMIT 1
484
+ }
485
+ WITH av, res, is_active
486
+ WHERE is_active = True
487
+ WITH DISTINCT toInteger(av.value) AS used_value
488
+ ORDER BY used_value ASC
489
+ WITH [$start_range - 1] + collect(used_value) AS nums
490
+ UNWIND range(0, size(nums) - 1) AS idx
491
+ CALL (nums, idx) {
492
+ WITH nums[idx] AS curr, idx - 1 + $start_range AS expected
493
+ RETURN expected AS number, expected <> curr AS is_free, idx = size(nums) - 1 AS is_last
494
+ }
495
+ WITH number, is_free, is_last
496
+ WHERE is_free = true OR is_last = true
497
+ WITH number AS free_number, is_free, is_last
498
+ """ % {
499
+ "branch_filter": branch_filter,
500
+ "number_pool": InfrahubKind.NUMBERPOOL,
501
+ "node": self.pool.node.value,
502
+ }
503
+
504
+ self.add_to_query(query)
505
+ self.return_labels = ["free_number as value", "is_free", "is_last"]
506
+ self.order_by = ["value"]
507
+
508
+ def get_free_data(self) -> NumberPoolFreeData | None:
509
+ if not self.results:
510
+ return None
511
+
512
+ return NumberPoolFreeData.from_db(result=self.results[0])
513
+
514
+ def get_result_value(self) -> int | None:
515
+ """Get the free number from query results, handling edge cases.
516
+
517
+ Returns:
518
+ The free number if found, None if pool is exhausted in queried range.
519
+ """
520
+ result_data = self.get_free_data()
521
+ if result_data is None:
522
+ # No reservations in range - return start_range
523
+ if self.params["start_range"] <= self.params["end_range"]:
524
+ return self.params["start_range"]
525
+ return None
526
+
527
+ if result_data.is_free:
528
+ return result_data.value
529
+ # is_last=True and is_free=False means all numbers up to value are used
530
+ if result_data.is_last and result_data.value < self.params["end_range"]:
531
+ return result_data.value + 1
532
+ return None
533
+
534
+
422
535
  class NumberPoolSetReserved(Query):
423
536
  name = "numberpool_set_reserved"
424
537
  type = QueryType.WRITE
@@ -652,7 +652,7 @@ class Relationship(FlagPropertyMixin, NodePropertyMixin, MetadataInterface):
652
652
  destination_id=peer.id,
653
653
  status="active",
654
654
  direction=self.schema.direction.value,
655
- branch_level=self.branch.hierarchy_level,
655
+ branch_level=branch.hierarchy_level,
656
656
  branch_support=self.schema.branch.value if self.schema.branch else None,
657
657
  hierarchical=self.schema.hierarchical,
658
658
  is_protected=self.is_protected,
@@ -87,7 +87,6 @@ core_webhook = GenericSchema(
87
87
  name="validate_certificates",
88
88
  kind="Boolean",
89
89
  description="Whether to validate SSL/TLS certificates",
90
- default_value=True,
91
90
  optional=True,
92
91
  order_weight=5000,
93
92
  ),
@@ -182,6 +182,7 @@ class SchemaNode(BaseModel):
182
182
  display_label: str | None = None
183
183
  display_labels: list[str]
184
184
  uniqueness_constraints: list[list[str]] | None = None
185
+ human_friendly_id: list[str] | None = None
185
186
 
186
187
  def to_dict(self) -> dict[str, Any]:
187
188
  return {
@@ -199,6 +200,7 @@ class SchemaNode(BaseModel):
199
200
  "display_label": self.display_label,
200
201
  "display_labels": self.display_labels,
201
202
  "uniqueness_constraints": self.uniqueness_constraints,
203
+ "human_friendly_id": self.human_friendly_id,
202
204
  }
203
205
 
204
206
  def without_duplicates(self, other: SchemaNode) -> SchemaNode:
@@ -225,7 +227,6 @@ base_node_schema = SchemaNode(
225
227
  namespace="Schema",
226
228
  branch=BranchSupportType.AWARE.value,
227
229
  include_in_menu=False,
228
- default_filter="name__value",
229
230
  display_labels=["label__value"],
230
231
  attributes=[
231
232
  SchemaAttribute(
@@ -239,7 +240,7 @@ base_node_schema = SchemaNode(
239
240
  name="name",
240
241
  kind="Text",
241
242
  description="Node name, must be unique within a namespace and must start with an uppercase letter.",
242
- unique=True,
243
+ unique=False,
243
244
  regex=str(NODE_NAME_REGEX),
244
245
  min_length=DEFAULT_NAME_MIN_LENGTH,
245
246
  max_length=DEFAULT_NAME_MAX_LENGTH,
@@ -394,8 +395,9 @@ node_schema = SchemaNode(
394
395
  namespace="Schema",
395
396
  branch=BranchSupportType.AWARE.value,
396
397
  include_in_menu=False,
397
- default_filter="name__value",
398
398
  display_labels=["label__value"],
399
+ human_friendly_id=["namespace__value", "name__value"],
400
+ uniqueness_constraints=[["namespace__value", "name__value"]],
399
401
  attributes=base_node_schema.attributes
400
402
  + [
401
403
  SchemaAttribute(
@@ -802,7 +804,7 @@ relationship_schema = SchemaNode(
802
804
  name="branch",
803
805
  kind="Text",
804
806
  internal_kind=BranchSupportType,
805
- description="Type of branch support for the relatioinship, if not defined it will be determine based both peers.",
807
+ description="Type of branch support for the relationship. If not defined, it will be determined based on both peers.",
806
808
  enum=BranchSupportType.available_types(),
807
809
  optional=True,
808
810
  extra={"update": UpdateSupport.NOT_SUPPORTED}, # https://github.com/opsmill/infrahub/issues/2476
@@ -898,8 +900,9 @@ generic_schema = SchemaNode(
898
900
  namespace="Schema",
899
901
  branch=BranchSupportType.AWARE.value,
900
902
  include_in_menu=False,
901
- default_filter="name__value",
902
903
  display_labels=["label__value"],
904
+ human_friendly_id=["namespace__value", "name__value"],
905
+ uniqueness_constraints=[["namespace__value", "name__value"]],
903
906
  attributes=base_node_schema.attributes
904
907
  + [
905
908
  SchemaAttribute(
@@ -85,6 +85,10 @@ class ConstraintValidatorDeterminer:
85
85
  constraints: list[SchemaUpdateConstraintInfo] = []
86
86
  schemas = list(self.schema_branch.get_all(duplicate=False).values())
87
87
  # added here to check their uniqueness constraints
88
+ with contextlib.suppress(SchemaNotFoundError):
89
+ schemas.append(self.schema_branch.get_node(name="SchemaNode", duplicate=False))
90
+ with contextlib.suppress(SchemaNotFoundError):
91
+ schemas.append(self.schema_branch.get_node(name="SchemaGeneric", duplicate=False))
88
92
  with contextlib.suppress(SchemaNotFoundError):
89
93
  schemas.append(self.schema_branch.get_node(name="SchemaAttribute", duplicate=False))
90
94
  with contextlib.suppress(SchemaNotFoundError):
@@ -8,6 +8,7 @@ from functools import cached_property
8
8
  from typing import TYPE_CHECKING, Any
9
9
 
10
10
  from graphql import (
11
+ DocumentNode,
11
12
  FieldNode,
12
13
  FragmentDefinitionNode,
13
14
  FragmentSpreadNode,
@@ -389,6 +390,7 @@ class InfrahubGraphQLQueryAnalyzer(GraphQLQueryAnalyzer):
389
390
  schema: GraphQLSchema | None = None,
390
391
  query_variables: dict[str, Any] | None = None,
391
392
  operation_name: str | None = None,
393
+ document: DocumentNode | None = None,
392
394
  ) -> None:
393
395
  self.branch = branch
394
396
  self.schema_branch = schema_branch
@@ -396,7 +398,7 @@ class InfrahubGraphQLQueryAnalyzer(GraphQLQueryAnalyzer):
396
398
  self.query_variables: dict[str, Any] = query_variables or {}
397
399
  self._named_fragments: dict[str, GraphQLQueryNode] = {}
398
400
  self._fragment_dependencies: dict[str, set[str]] = {}
399
- super().__init__(query=query, schema=schema)
401
+ super().__init__(query=query, schema=schema, document=document)
400
402
 
401
403
  @property
402
404
  def operation_names(self) -> list[str]:
infrahub/graphql/app.py CHANGED
@@ -22,10 +22,7 @@ from graphql import (
22
22
  ExecutionContext,
23
23
  ExecutionResult,
24
24
  GraphQLError,
25
- GraphQLFormattedError,
26
25
  OperationType,
27
- graphql,
28
- parse,
29
26
  subscribe,
30
27
  validate,
31
28
  )
@@ -45,6 +42,7 @@ from infrahub.core.registry import registry
45
42
  from infrahub.core.timestamp import Timestamp
46
43
  from infrahub.exceptions import BranchNotFoundError, Error
47
44
  from infrahub.graphql.analyzer import InfrahubGraphQLQueryAnalyzer
45
+ from infrahub.graphql.execution import cached_parse, execute_graphql_query
48
46
  from infrahub.graphql.initialization import GraphqlParams, prepare_graphql_params
49
47
  from infrahub.log import get_logger
50
48
 
@@ -62,7 +60,7 @@ from .middleware import raise_on_mutation_on_branch_needing_rebase
62
60
 
63
61
  if TYPE_CHECKING:
64
62
  import graphene
65
- from graphql import GraphQLSchema
63
+ from graphql import GraphQLFormattedError, GraphQLSchema
66
64
  from graphql.language.ast import (
67
65
  DocumentNode,
68
66
  OperationDefinitionNode,
@@ -213,6 +211,7 @@ class InfrahubGraphQLApp:
213
211
  schema=graphql_params.schema,
214
212
  operation_name=operation_name,
215
213
  branch=branch,
214
+ document=cached_parse(query),
216
215
  )
217
216
 
218
217
  # if the query contains some mutation, it's not currently supported to set AT manually
@@ -228,6 +227,7 @@ class InfrahubGraphQLApp:
228
227
  schema=graphql_params.schema,
229
228
  operation_name=operation_name,
230
229
  branch=branch,
230
+ document=cached_parse(query),
231
231
  )
232
232
  impacted_models = analyzed_query.query_report.impacted_models
233
233
 
@@ -252,7 +252,7 @@ class InfrahubGraphQLApp:
252
252
  span.set_attributes(labels)
253
253
 
254
254
  with GRAPHQL_DURATION_METRICS.labels(**labels).time():
255
- result = await graphql(
255
+ result = await execute_graphql_query(
256
256
  schema=graphql_params.schema,
257
257
  source=query,
258
258
  context_value=graphql_params.context,
@@ -265,6 +265,7 @@ class InfrahubGraphQLApp:
265
265
 
266
266
  response: dict[str, Any] = {"data": result.data}
267
267
  if result.errors:
268
+ GRAPHQL_QUERY_ERRORS_METRICS.labels(**labels).observe(len(result.errors))
268
269
  for error in result.errors:
269
270
  if error.original_error:
270
271
  self._log_error(error=error.original_error)
@@ -283,10 +284,6 @@ class InfrahubGraphQLApp:
283
284
  GRAPHQL_TOP_LEVEL_QUERIES_METRICS.labels(**labels).observe(analyzed_query.nbr_queries)
284
285
  GRAPHQL_QUERY_OBJECTS_METRICS.labels(**labels).observe(len(impacted_models))
285
286
 
286
- _, errors = analyzed_query.is_valid
287
- if errors:
288
- GRAPHQL_QUERY_ERRORS_METRICS.labels(**labels).observe(len(errors))
289
-
290
287
  return json_response
291
288
 
292
289
  def _set_labels(self, request: Request, branch: Branch, query: InfrahubGraphQLQueryAnalyzer) -> dict[str, Any]: # noqa: ARG002
@@ -391,7 +388,7 @@ class InfrahubGraphQLApp:
391
388
  document: DocumentNode | None = None
392
389
 
393
390
  try:
394
- document = parse(query)
391
+ document = cached_parse(query)
395
392
  operation = get_operation_ast(document, operation_name)
396
393
  errors = validate(graphql_params.schema, document)
397
394
  except GraphQLError as e:
@@ -0,0 +1,95 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import lru_cache
4
+ from inspect import isawaitable
5
+ from typing import TYPE_CHECKING, Any, Callable
6
+
7
+ from graphql import GraphQLSchema, execute, parse, validate
8
+ from graphql.error import GraphQLError
9
+ from graphql.execution import ExecutionResult
10
+ from graphql.type import validate_schema
11
+
12
+ if TYPE_CHECKING:
13
+ from graphql import ExecutionContext, GraphQLFieldResolver, GraphQLTypeResolver
14
+ from graphql.execution import Middleware
15
+ from graphql.language import Source
16
+ from graphql.language.ast import DocumentNode
17
+
18
+
19
+ @lru_cache(maxsize=1024)
20
+ def _cached_parse(query: str) -> DocumentNode:
21
+ """Internal cached parse function for queries without @expand directive."""
22
+ return parse(query)
23
+
24
+
25
+ def cached_parse(query: str | Source) -> DocumentNode:
26
+ """Parse a GraphQL query string into a DocumentNode.
27
+
28
+ Queries containing the @expand directive are not cached because the parser
29
+ mutates the AST to add expanded fields, which would corrupt the cache.
30
+ """
31
+ query_str = query if isinstance(query, str) else query.body
32
+ if "@expand" in query_str:
33
+ return parse(query)
34
+ return _cached_parse(query_str)
35
+
36
+
37
+ @lru_cache(maxsize=1024)
38
+ def cached_validate(schema: GraphQLSchema, document_ast: DocumentNode) -> list[GraphQLError]:
39
+ return validate(schema, document_ast)
40
+
41
+
42
+ @lru_cache(maxsize=1024)
43
+ def cached_validate_schema(schema: GraphQLSchema) -> list[GraphQLError]:
44
+ return validate_schema(schema)
45
+
46
+
47
+ async def execute_graphql_query(
48
+ schema: GraphQLSchema,
49
+ source: str | Source,
50
+ root_value: Any = None,
51
+ context_value: Any = None,
52
+ variable_values: dict[str, Any] | None = None,
53
+ operation_name: str | None = None,
54
+ field_resolver: GraphQLFieldResolver | None = None,
55
+ type_resolver: GraphQLTypeResolver | None = None,
56
+ middleware: Middleware | None = None,
57
+ execution_context_class: type[ExecutionContext] | None = None,
58
+ is_awaitable: Callable[[Any], bool] | None = None,
59
+ ) -> ExecutionResult:
60
+ """Execute a query, return asynchronously only if necessary."""
61
+ # Validate Schema
62
+ schema_validation_errors = cached_validate_schema(schema)
63
+ if schema_validation_errors:
64
+ return ExecutionResult(data=None, errors=schema_validation_errors)
65
+
66
+ # Parse
67
+ try:
68
+ document = cached_parse(source)
69
+ except GraphQLError as error:
70
+ return ExecutionResult(data=None, errors=[error])
71
+
72
+ validation_errors = cached_validate(schema, document)
73
+ if validation_errors:
74
+ return ExecutionResult(data=None, errors=validation_errors)
75
+
76
+ # Execute
77
+ result = execute(
78
+ schema,
79
+ document,
80
+ root_value,
81
+ context_value,
82
+ variable_values,
83
+ operation_name,
84
+ field_resolver,
85
+ type_resolver,
86
+ None,
87
+ middleware,
88
+ execution_context_class,
89
+ is_awaitable,
90
+ )
91
+
92
+ if isawaitable(result):
93
+ return await result
94
+
95
+ return result
@@ -84,7 +84,22 @@ class InfrahubProposedChangeMutation(InfrahubMutationMixin, Mutation):
84
84
  if state and state != ProposedChangeState.OPEN.value:
85
85
  raise ValidationError(input_value="A proposed change has to be in the open state during creation")
86
86
 
87
+ source_branch_name = data.get("source_branch", {}).get("value")
88
+
87
89
  async with graphql_context.db.start_transaction() as dbt:
90
+ existing_open_pcs = await NodeManager.query(
91
+ db=dbt,
92
+ schema=InfrahubKind.PROPOSEDCHANGE,
93
+ filters={
94
+ "source_branch__value": source_branch_name,
95
+ "state__value": ProposedChangeState.OPEN.value,
96
+ },
97
+ )
98
+ if existing_open_pcs:
99
+ raise ValidationError(
100
+ input_value=f"An open proposed change already exists for branch '{source_branch_name}'"
101
+ )
102
+
88
103
  proposed_change, result = await super().mutate_create(
89
104
  info=info, data=data, branch=branch, database=dbt, override_data=override_data
90
105
  )
@@ -99,12 +99,9 @@ class GraphQLExtractor:
99
99
  key=attribute.name,
100
100
  node=FieldNode(
101
101
  kind="field",
102
- name=NameNode(
103
- kind="name",
104
- value=key,
105
- directives=[],
106
- arguments=[],
107
- ),
102
+ name=NameNode(kind="name", value=key),
103
+ directives=[],
104
+ arguments=[],
108
105
  ),
109
106
  path=attribute_path,
110
107
  fields={key: None},
@@ -117,7 +114,9 @@ class GraphQLExtractor:
117
114
  FieldNode(
118
115
  kind="field",
119
116
  name=NameNode(kind="name", value=attribute.name),
120
- selection_set=SelectionSetNode(selections=tuple(enrichers)),
117
+ selection_set=SelectionSetNode(selections=tuple(e.node for e in enrichers)),
118
+ directives=[],
119
+ arguments=[],
121
120
  )
122
121
  )
123
122
 
@@ -130,6 +129,8 @@ class GraphQLExtractor:
130
129
  kind="field",
131
130
  name=NameNode(kind="name", value="node"),
132
131
  selection_set=SelectionSetNode(selections=tuple(attribute_enrichers)),
132
+ directives=[],
133
+ arguments=[],
133
134
  ),
134
135
  fields={attribute.name: field_attributes for attribute in self.schema.attributes},
135
136
  )
@@ -166,6 +167,8 @@ class GraphQLExtractor:
166
167
  kind="field",
167
168
  name=NameNode(kind="name", value=sub_node.key),
168
169
  selection_set=SelectionSetNode(selections=(sub_node.node,)),
170
+ directives=[],
171
+ arguments=[],
169
172
  )
170
173
  )
171
174
  selection_set.selections = tuple(selections)