infrahub-server 1.2.7__py3-none-any.whl → 1.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. infrahub/api/transformation.py +1 -0
  2. infrahub/artifacts/models.py +4 -0
  3. infrahub/cli/db.py +15 -6
  4. infrahub/computed_attribute/tasks.py +34 -12
  5. infrahub/config.py +2 -1
  6. infrahub/constants/__init__.py +0 -0
  7. infrahub/core/branch/tasks.py +0 -2
  8. infrahub/core/constants/__init__.py +1 -0
  9. infrahub/core/diff/calculator.py +4 -3
  10. infrahub/core/diff/combiner.py +1 -2
  11. infrahub/core/diff/coordinator.py +44 -28
  12. infrahub/core/diff/data_check_synchronizer.py +3 -2
  13. infrahub/core/diff/enricher/hierarchy.py +38 -27
  14. infrahub/core/diff/ipam_diff_parser.py +5 -4
  15. infrahub/core/diff/merger/merger.py +20 -18
  16. infrahub/core/diff/model/field_specifiers_map.py +64 -0
  17. infrahub/core/diff/model/path.py +55 -58
  18. infrahub/core/diff/parent_node_adder.py +14 -16
  19. infrahub/core/diff/query/drop_nodes.py +42 -0
  20. infrahub/core/diff/query/field_specifiers.py +8 -7
  21. infrahub/core/diff/query/filters.py +15 -1
  22. infrahub/core/diff/query/save.py +3 -0
  23. infrahub/core/diff/query_parser.py +49 -52
  24. infrahub/core/diff/repository/deserializer.py +36 -23
  25. infrahub/core/diff/repository/repository.py +31 -12
  26. infrahub/core/graph/__init__.py +1 -1
  27. infrahub/core/graph/index.py +3 -1
  28. infrahub/core/initialization.py +23 -7
  29. infrahub/core/manager.py +16 -5
  30. infrahub/core/migrations/graph/__init__.py +2 -0
  31. infrahub/core/migrations/graph/m014_remove_index_attr_value.py +9 -8
  32. infrahub/core/migrations/graph/m027_delete_isolated_nodes.py +50 -0
  33. infrahub/core/protocols.py +1 -0
  34. infrahub/core/query/branch.py +27 -17
  35. infrahub/core/query/diff.py +65 -38
  36. infrahub/core/query/node.py +111 -33
  37. infrahub/core/query/relationship.py +17 -3
  38. infrahub/core/query/subquery.py +2 -2
  39. infrahub/core/schema/definitions/core/builtin.py +2 -4
  40. infrahub/core/schema/definitions/core/transform.py +1 -0
  41. infrahub/core/schema/schema_branch.py +3 -0
  42. infrahub/core/validators/aggregated_checker.py +2 -2
  43. infrahub/core/validators/uniqueness/query.py +30 -9
  44. infrahub/database/__init__.py +1 -16
  45. infrahub/database/index.py +1 -1
  46. infrahub/database/memgraph.py +1 -12
  47. infrahub/database/neo4j.py +1 -13
  48. infrahub/git/integrator.py +27 -3
  49. infrahub/git/models.py +4 -0
  50. infrahub/git/tasks.py +3 -0
  51. infrahub/git_credential/helper.py +2 -2
  52. infrahub/graphql/mutations/computed_attribute.py +5 -1
  53. infrahub/graphql/queries/diff/tree.py +2 -1
  54. infrahub/message_bus/operations/requests/proposed_change.py +6 -0
  55. infrahub/message_bus/types.py +3 -0
  56. infrahub/patch/queries/consolidate_duplicated_nodes.py +109 -0
  57. infrahub/patch/queries/delete_duplicated_edges.py +138 -0
  58. infrahub/proposed_change/tasks.py +1 -0
  59. infrahub/server.py +1 -3
  60. infrahub/transformations/models.py +3 -0
  61. infrahub/transformations/tasks.py +1 -0
  62. infrahub/trigger/models.py +11 -1
  63. infrahub/trigger/setup.py +38 -13
  64. infrahub/trigger/tasks.py +1 -4
  65. infrahub/webhook/models.py +3 -0
  66. infrahub/workflows/initialization.py +1 -3
  67. infrahub_sdk/client.py +4 -4
  68. infrahub_sdk/config.py +17 -0
  69. infrahub_sdk/ctl/cli_commands.py +7 -1
  70. infrahub_sdk/ctl/generator.py +2 -2
  71. infrahub_sdk/generator.py +12 -66
  72. infrahub_sdk/operation.py +80 -0
  73. infrahub_sdk/protocols.py +12 -0
  74. infrahub_sdk/recorder.py +3 -0
  75. infrahub_sdk/schema/repository.py +4 -0
  76. infrahub_sdk/transforms.py +15 -27
  77. {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/METADATA +2 -2
  78. {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/RECORD +84 -78
  79. infrahub_testcontainers/container.py +1 -0
  80. infrahub_testcontainers/docker-compose.test.yml +5 -1
  81. infrahub/database/manager.py +0 -15
  82. /infrahub/{database/constants.py → constants/database.py} +0 -0
  83. {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/LICENSE.txt +0 -0
  84. {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/WHEEL +0 -0
  85. {infrahub_server-1.2.7.dist-info → infrahub_server-1.2.9.dist-info}/entry_points.txt +0 -0
@@ -57,7 +57,7 @@ async def build_subquery_filter(
57
57
  params.update(field_params)
58
58
 
59
59
  field_where.append("all(r IN relationships(path) WHERE (%s))" % branch_filter)
60
- filter_str = f"({node_alias})" + "".join([str(item) for item in field_filter])
60
+ filter_str = f"({node_alias}:Node {{uuid: {node_alias}.uuid}})" + "".join([str(item) for item in field_filter])
61
61
  where_str = " AND ".join(field_where)
62
62
  branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)"
63
63
  froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from")
@@ -138,7 +138,7 @@ async def build_subquery_order(
138
138
  field_filter[-1].name = "last"
139
139
 
140
140
  field_where.append("all(r IN relationships(path) WHERE (%s))" % branch_filter)
141
- filter_str = f"({node_alias})" + "".join([str(item) for item in field_filter])
141
+ filter_str = f"({node_alias}:Node {{uuid: {node_alias}.uuid}})" + "".join([str(item) for item in field_filter])
142
142
  where_str = " AND ".join(field_where)
143
143
  branch_level_str = "reduce(br_lvl = 0, r in relationships(path) | br_lvl + r.branch_level)"
144
144
  froms_str = db.render_list_comprehension(items="relationships(path)", item_name="from")
@@ -1,6 +1,4 @@
1
- from infrahub.core.constants import (
2
- BranchSupportType,
3
- )
1
+ from infrahub.core.constants import BranchSupportType
4
2
 
5
3
  from ...attribute_schema import AttributeSchema as Attr
6
4
  from ...node_schema import NodeSchema
@@ -8,7 +6,7 @@ from ...node_schema import NodeSchema
8
6
  builtin_tag = NodeSchema(
9
7
  name="Tag",
10
8
  namespace="Builtin",
11
- description="Standard Tag object to attached to other objects to provide some context.",
9
+ description="Standard Tag object to attach to other objects to provide some context.",
12
10
  include_in_menu=True,
13
11
  icon="mdi:tag-multiple",
14
12
  label="Tag",
@@ -92,5 +92,6 @@ core_transform_python = NodeSchema(
92
92
  attributes=[
93
93
  Attr(name="file_path", kind="Text"),
94
94
  Attr(name="class_name", kind="Text"),
95
+ Attr(name="convert_query_response", kind="Boolean", optional=True, default_value=False),
95
96
  ],
96
97
  )
@@ -2050,6 +2050,9 @@ class SchemaBranch:
2050
2050
 
2051
2051
  identified.add(node_schema)
2052
2052
 
2053
+ if node_schema.is_node_schema:
2054
+ identified.update([self.get(name=kind, duplicate=False) for kind in node_schema.inherit_from])
2055
+
2053
2056
  for relationship in node_schema.relationships:
2054
2057
  if (
2055
2058
  relationship.peer in [InfrahubKind.GENERICGROUP, InfrahubKind.PROFILE]
@@ -97,9 +97,9 @@ class AggregatedConstraintChecker:
97
97
  error_detail_str += f"={data_path.value!r}"
98
98
  error_detail_str_list.append(error_detail_str)
99
99
  if data_path.peer_id:
100
- error_detail_str += f"{data_path.field_name}.id={data_path.peer_id}"
100
+ error_detail_str = f"{data_path.field_name}.id={data_path.peer_id}"
101
101
  error_detail_str_list.append(error_detail_str)
102
- if error_detail_str:
102
+ if error_detail_str_list:
103
103
  error_str += " The error relates to field "
104
104
  error_str += ",".join(error_detail_str_list)
105
105
  error_str += "."
@@ -30,7 +30,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
30
30
  def get_context(self) -> dict[str, str]:
31
31
  return {"kind": self.query_request.kind}
32
32
 
33
- async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002
33
+ async def query_init(self, db: InfrahubDatabase, **kwargs: Any) -> None: # noqa: ARG002,PLR0915
34
34
  branch_filter, branch_params = self.branch.get_query_filter_path(at=self.at.to_string(), is_isolated=False)
35
35
  self.params.update(branch_params)
36
36
  from_times = db.render_list_comprehension(items="relationships(potential_path)", item_name="from")
@@ -39,7 +39,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
39
39
  )
40
40
 
41
41
  attribute_names = set()
42
- attr_paths, attr_paths_with_value = [], []
42
+ attr_paths, attr_paths_with_value, attr_values = [], [], []
43
43
  for attr_path in self.query_request.unique_attribute_paths:
44
44
  try:
45
45
  property_rel_name = self.attribute_property_map[attr_path.property_name or "value"]
@@ -50,6 +50,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
50
50
  attribute_names.add(attr_path.attribute_name)
51
51
  if attr_path.value:
52
52
  attr_paths_with_value.append((attr_path.attribute_name, property_rel_name, attr_path.value))
53
+ attr_values.append(attr_path.value)
53
54
  else:
54
55
  attr_paths.append((attr_path.attribute_name, property_rel_name))
55
56
 
@@ -57,6 +58,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
57
58
  relationship_attr_paths = []
58
59
  relationship_only_attr_paths = []
59
60
  relationship_only_attr_values = []
61
+ relationship_attr_values = []
60
62
  relationship_attr_paths_with_value = []
61
63
  for rel_path in self.query_request.relationship_attribute_paths:
62
64
  relationship_names.add(rel_path.identifier)
@@ -64,6 +66,7 @@ class NodeUniqueAttributeConstraintQuery(Query):
64
66
  relationship_attr_paths_with_value.append(
65
67
  (rel_path.identifier, rel_path.attribute_name, rel_path.value)
66
68
  )
69
+ relationship_attr_values.append(rel_path.value)
67
70
  elif rel_path.attribute_name:
68
71
  relationship_attr_paths.append((rel_path.identifier, rel_path.attribute_name))
69
72
  else:
@@ -87,12 +90,14 @@ class NodeUniqueAttributeConstraintQuery(Query):
87
90
  "node_kind": self.query_request.kind,
88
91
  "attr_paths": attr_paths,
89
92
  "attr_paths_with_value": attr_paths_with_value,
93
+ "attr_values": attr_values,
90
94
  "attribute_names": list(attribute_names),
91
95
  "relationship_names": list(relationship_names),
92
96
  "relationship_attr_paths": relationship_attr_paths,
93
97
  "relationship_attr_paths_with_value": relationship_attr_paths_with_value,
94
98
  "relationship_only_attr_paths": relationship_only_attr_paths,
95
99
  "relationship_only_attr_values": relationship_only_attr_values,
100
+ "relationship_attr_values": relationship_attr_values,
96
101
  "min_count_required": self.min_count_required,
97
102
  }
98
103
  )
@@ -100,16 +105,28 @@ class NodeUniqueAttributeConstraintQuery(Query):
100
105
  attr_paths_subquery = """
101
106
  MATCH attr_path = (start_node:%(node_kind)s)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue)
102
107
  WHERE attr.name in $attribute_names
103
- AND ([attr.name, type(r)] in $attr_paths
104
- OR [attr.name, type(r), attr_value.value] in $attr_paths_with_value)
108
+ AND [attr.name, type(r)] in $attr_paths
105
109
  RETURN start_node, attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value
106
110
  """ % {"node_kind": self.query_request.kind}
107
111
 
108
- relationship_attr_paths_with_value_subquery = """
112
+ attr_paths_with_value_subquery = """
113
+ MATCH attr_path = (start_node:%(node_kind)s)-[:HAS_ATTRIBUTE]->(attr:Attribute)-[r:HAS_VALUE]->(attr_value:AttributeValue)
114
+ WHERE attr.name in $attribute_names AND attr_value.value in $attr_values
115
+ AND [attr.name, type(r), attr_value.value] in $attr_paths_with_value
116
+ RETURN start_node, attr_path as potential_path, NULL as rel_identifier, attr.name as potential_attr, attr_value.value as potential_attr_value
117
+ """ % {"node_kind": self.query_request.kind}
118
+
119
+ relationship_attr_paths_subquery = """
109
120
  MATCH rel_path = (start_node:%(node_kind)s)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue)
110
121
  WHERE relationship_node.name in $relationship_names
111
- AND ([relationship_node.name, rel_attr.name] in $relationship_attr_paths
112
- OR [relationship_node.name, rel_attr.name, rel_attr_value.value] in $relationship_attr_paths_with_value)
122
+ AND [relationship_node.name, rel_attr.name] in $relationship_attr_paths
123
+ RETURN start_node, rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value
124
+ """ % {"node_kind": self.query_request.kind}
125
+
126
+ relationship_attr_paths_with_value_subquery = """
127
+ MATCH rel_path = (start_node:%(node_kind)s)-[:IS_RELATED]-(relationship_node:Relationship)-[:IS_RELATED]-(related_n:Node)-[:HAS_ATTRIBUTE]->(rel_attr:Attribute)-[:HAS_VALUE]->(rel_attr_value:AttributeValue)
128
+ WHERE relationship_node.name in $relationship_names AND rel_attr_value.value in $relationship_attr_values
129
+ AND [relationship_node.name, rel_attr.name, rel_attr_value.value] in $relationship_attr_paths_with_value
113
130
  RETURN start_node, rel_path as potential_path, relationship_node.name as rel_identifier, rel_attr.name as potential_attr, rel_attr_value.value as potential_attr_value
114
131
  """ % {"node_kind": self.query_request.kind}
115
132
 
@@ -125,9 +142,13 @@ class NodeUniqueAttributeConstraintQuery(Query):
125
142
  }
126
143
 
127
144
  select_subqueries = []
128
- if attr_paths or attr_paths_with_value:
145
+ if attr_paths:
129
146
  select_subqueries.append(attr_paths_subquery)
130
- if relationship_attr_paths_with_value or relationship_attr_paths:
147
+ if attr_paths_with_value:
148
+ select_subqueries.append(attr_paths_with_value_subquery)
149
+ if relationship_attr_paths:
150
+ select_subqueries.append(relationship_attr_paths_subquery)
151
+ if relationship_attr_paths_with_value:
131
152
  select_subqueries.append(relationship_attr_paths_with_value_subquery)
132
153
  if relationship_only_attr_paths:
133
154
  select_subqueries.append(relationship_only_attr_paths_subquery)
@@ -26,16 +26,14 @@ from opentelemetry import trace
26
26
  from typing_extensions import Self
27
27
 
28
28
  from infrahub import config, lock
29
+ from infrahub.constants.database import DatabaseType, Neo4jRuntime
29
30
  from infrahub.core import registry
30
31
  from infrahub.core.query import QueryType
31
32
  from infrahub.exceptions import DatabaseError
32
33
  from infrahub.log import get_logger
33
34
  from infrahub.utils import InfrahubStringEnum
34
35
 
35
- from .constants import DatabaseType, Neo4jRuntime
36
- from .memgraph import DatabaseManagerMemgraph
37
36
  from .metrics import CONNECTION_POOL_USAGE, QUERY_EXECUTION_METRICS, TRANSACTION_RETRIES
38
- from .neo4j import DatabaseManagerNeo4j
39
37
 
40
38
  if TYPE_CHECKING:
41
39
  from types import TracebackType
@@ -44,8 +42,6 @@ if TYPE_CHECKING:
44
42
  from infrahub.core.schema import MainSchemaTypes, NodeSchema
45
43
  from infrahub.core.schema.schema_branch import SchemaBranch
46
44
 
47
- from .manager import DatabaseManager
48
-
49
45
  validated_database = {}
50
46
  R = TypeVar("R")
51
47
 
@@ -134,7 +130,6 @@ class InfrahubDatabase:
134
130
  mode: InfrahubDatabaseMode = InfrahubDatabaseMode.DRIVER,
135
131
  db_type: DatabaseType | None = None,
136
132
  default_neo4j_runtime: Neo4jRuntime = Neo4jRuntime.DEFAULT,
137
- db_manager: DatabaseManager | None = None,
138
133
  schemas: list[SchemaBranch] | None = None,
139
134
  session: AsyncSession | None = None,
140
135
  session_mode: InfrahubDatabaseSessionMode = InfrahubDatabaseSessionMode.WRITE,
@@ -161,14 +156,6 @@ class InfrahubDatabase:
161
156
  else:
162
157
  self.db_type = config.SETTINGS.database.db_type
163
158
 
164
- if db_manager:
165
- self.manager = db_manager
166
- self.manager.db = self
167
- elif self.db_type == DatabaseType.NEO4J:
168
- self.manager = DatabaseManagerNeo4j(db=self)
169
- elif self.db_type == DatabaseType.MEMGRAPH:
170
- self.manager = DatabaseManagerMemgraph(db=self)
171
-
172
159
  def __del__(self) -> None:
173
160
  if not self._session or not self._is_session_local or self._session.closed():
174
161
  return
@@ -228,7 +215,6 @@ class InfrahubDatabase:
228
215
  db_type=self.db_type,
229
216
  default_neo4j_runtime=self.default_neo4j_runtime,
230
217
  schemas=schemas or self._schemas.values(),
231
- db_manager=self.manager,
232
218
  driver=self._driver,
233
219
  session_mode=session_mode,
234
220
  queries_names_to_config=self.queries_names_to_config,
@@ -243,7 +229,6 @@ class InfrahubDatabase:
243
229
  db_type=self.db_type,
244
230
  default_neo4j_runtime=self.default_neo4j_runtime,
245
231
  schemas=schemas or self._schemas.values(),
246
- db_manager=self.manager,
247
232
  driver=self._driver,
248
233
  session=self._session,
249
234
  session_mode=self._session_mode,
@@ -5,7 +5,7 @@ from typing import TYPE_CHECKING
5
5
 
6
6
  from pydantic import BaseModel
7
7
 
8
- from .constants import EntityType, IndexType # noqa: TC001
8
+ from infrahub.constants.database import EntityType, IndexType # noqa: TC001
9
9
 
10
10
  if TYPE_CHECKING:
11
11
  from infrahub.database import InfrahubDatabase
@@ -1,13 +1,8 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING
3
+ from infrahub.constants.database import EntityType, IndexType
4
4
 
5
- from .constants import EntityType, IndexType
6
5
  from .index import IndexInfo, IndexItem, IndexManagerBase
7
- from .manager import DatabaseManager
8
-
9
- if TYPE_CHECKING:
10
- from . import InfrahubDatabase
11
6
 
12
7
 
13
8
  class IndexNodeMemgraph(IndexItem):
@@ -51,9 +46,3 @@ class IndexManagerMemgraph(IndexManagerBase):
51
46
  )
52
47
 
53
48
  return results
54
-
55
-
56
- class DatabaseManagerMemgraph(DatabaseManager):
57
- def __init__(self, db: InfrahubDatabase) -> None:
58
- super().__init__(db=db)
59
- self.index = IndexManagerMemgraph(db=db)
@@ -1,15 +1,9 @@
1
1
  from __future__ import annotations
2
2
 
3
- from typing import TYPE_CHECKING
4
-
3
+ from infrahub.constants.database import EntityType, IndexType
5
4
  from infrahub.core.query import QueryType
6
5
 
7
- from .constants import EntityType, IndexType
8
6
  from .index import IndexInfo, IndexItem, IndexManagerBase
9
- from .manager import DatabaseManager
10
-
11
- if TYPE_CHECKING:
12
- from . import InfrahubDatabase
13
7
 
14
8
 
15
9
  class IndexRelNeo4j(IndexItem):
@@ -68,9 +62,3 @@ class IndexManagerNeo4j(IndexManagerBase):
68
62
  )
69
63
 
70
64
  return results
71
-
72
-
73
- class DatabaseManagerNeo4j(DatabaseManager):
74
- def __init__(self, db: InfrahubDatabase) -> None:
75
- super().__init__(db=db)
76
- self.index = IndexManagerNeo4j(db=db)
@@ -10,6 +10,7 @@ import ujson
10
10
  import yaml
11
11
  from infrahub_sdk import InfrahubClient # noqa: TC002
12
12
  from infrahub_sdk.exceptions import ValidationError
13
+ from infrahub_sdk.node import InfrahubNode
13
14
  from infrahub_sdk.protocols import (
14
15
  CoreArtifact,
15
16
  CoreArtifactDefinition,
@@ -53,7 +54,6 @@ if TYPE_CHECKING:
53
54
  import types
54
55
 
55
56
  from infrahub_sdk.checks import InfrahubCheck
56
- from infrahub_sdk.node import InfrahubNode
57
57
  from infrahub_sdk.schema.repository import InfrahubRepositoryArtifactDefinitionConfig
58
58
  from infrahub_sdk.transforms import InfrahubTransform
59
59
 
@@ -123,6 +123,10 @@ class TransformPythonInformation(BaseModel):
123
123
  timeout: int
124
124
  """Timeout for the function."""
125
125
 
126
+ convert_query_response: bool = Field(
127
+ ..., description="Indicate if the transform should convert the query response to InfrahubNode objects"
128
+ )
129
+
126
130
 
127
131
  class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
128
132
  """
@@ -874,6 +878,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
874
878
  file_path=file_path,
875
879
  query=str(graphql_query.id),
876
880
  timeout=transform_class.timeout,
881
+ convert_query_response=transform.convert_query_response,
877
882
  )
878
883
  )
879
884
 
@@ -1005,6 +1010,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1005
1010
  "file_path": transform.file_path,
1006
1011
  "class_name": transform.class_name,
1007
1012
  "timeout": transform.timeout,
1013
+ "convert_query_response": transform.convert_query_response,
1008
1014
  }
1009
1015
  create_payload = self.sdk.schema.generate_payload_create(
1010
1016
  schema=schema,
@@ -1028,6 +1034,9 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1028
1034
  if existing_transform.timeout.value != local_transform.timeout:
1029
1035
  existing_transform.timeout.value = local_transform.timeout
1030
1036
 
1037
+ if existing_transform.convert_query_response.value != local_transform.convert_query_response:
1038
+ existing_transform.convert_query_response.value = local_transform.convert_query_response
1039
+
1031
1040
  await existing_transform.save()
1032
1041
 
1033
1042
  @classmethod
@@ -1038,6 +1047,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1038
1047
  existing_transform.query.id != local_transform.query
1039
1048
  or existing_transform.file_path.value != local_transform.file_path
1040
1049
  or existing_transform.timeout.value != local_transform.timeout
1050
+ or existing_transform.convert_query_response.value != local_transform.convert_query_response
1041
1051
  ):
1042
1052
  return False
1043
1053
  return True
@@ -1129,7 +1139,13 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1129
1139
 
1130
1140
  @task(name="python-transform-execute", task_run_name="Execute Python Transform", cache_policy=NONE) # type: ignore[arg-type]
1131
1141
  async def execute_python_transform(
1132
- self, branch_name: str, commit: str, location: str, client: InfrahubClient, data: dict | None = None
1142
+ self,
1143
+ branch_name: str,
1144
+ commit: str,
1145
+ location: str,
1146
+ client: InfrahubClient,
1147
+ convert_query_response: bool,
1148
+ data: dict | None = None,
1133
1149
  ) -> Any:
1134
1150
  """Execute A Python Transform stored in the repository."""
1135
1151
  log = get_run_logger()
@@ -1159,7 +1175,13 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1159
1175
 
1160
1176
  transform_class: type[InfrahubTransform] = getattr(module, class_name)
1161
1177
 
1162
- transform = transform_class(root_directory=commit_worktree.directory, branch=branch_name, client=client)
1178
+ transform = transform_class(
1179
+ root_directory=commit_worktree.directory,
1180
+ branch=branch_name,
1181
+ client=client,
1182
+ convert_query_response=convert_query_response,
1183
+ infrahub_node=InfrahubNode,
1184
+ )
1163
1185
  return await transform.run(data=data)
1164
1186
 
1165
1187
  except ModuleNotFoundError as exc:
@@ -1216,6 +1238,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1216
1238
  location=transformation_location,
1217
1239
  data=response,
1218
1240
  client=self.sdk,
1241
+ convert_query_response=transformation.convert_query_response.value,
1219
1242
  ) # type: ignore[misc]
1220
1243
 
1221
1244
  if definition.content_type.value == ContentType.APPLICATION_JSON.value and isinstance(artifact_content, dict):
@@ -1275,6 +1298,7 @@ class InfrahubRepositoryIntegrator(InfrahubRepositoryBase):
1275
1298
  location=message.transform_location,
1276
1299
  data=response,
1277
1300
  client=self.sdk,
1301
+ convert_query_response=message.convert_query_response,
1278
1302
  ) # type: ignore[misc]
1279
1303
 
1280
1304
  if message.content_type == ContentType.APPLICATION_JSON.value and isinstance(artifact_content, dict):
infrahub/git/models.py CHANGED
@@ -29,6 +29,10 @@ class RequestArtifactGenerate(BaseModel):
29
29
  repository_name: str = Field(..., description="The name of the Repository")
30
30
  repository_kind: str = Field(..., description="The kind of the Repository")
31
31
  branch_name: str = Field(..., description="The branch where the check is run")
32
+ convert_query_response: bool = Field(
33
+ default=False,
34
+ description="Indicate if the query response should be converted to InfrahubNode objects for Python transforms",
35
+ )
32
36
  target_id: str = Field(..., description="The ID of the target object for this artifact")
33
37
  target_kind: str = Field(..., description="The kind of the target object for this artifact")
34
38
  target_name: str = Field(..., description="Name of the artifact target")
infrahub/git/tasks.py CHANGED
@@ -339,10 +339,12 @@ async def generate_request_artifact_definition(
339
339
  )
340
340
  transform_location = ""
341
341
 
342
+ convert_query_response = False
342
343
  if transform.typename == InfrahubKind.TRANSFORMJINJA2:
343
344
  transform_location = transform.template_path.value
344
345
  elif transform.typename == InfrahubKind.TRANSFORMPYTHON:
345
346
  transform_location = f"{transform.file_path.value}::{transform.class_name.value}"
347
+ convert_query_response = transform.convert_query_response.value
346
348
 
347
349
  for relationship in group.members.peers:
348
350
  member = relationship.peer
@@ -368,6 +370,7 @@ async def generate_request_artifact_definition(
368
370
  target_name=member.display_label,
369
371
  target_kind=member.get_kind(),
370
372
  timeout=transform.timeout.value,
373
+ convert_query_response=convert_query_response,
371
374
  context=context,
372
375
  )
373
376
 
@@ -3,9 +3,9 @@ import sys
3
3
 
4
4
  import typer
5
5
  from infrahub_sdk import Config, InfrahubClientSync
6
+ from infrahub_sdk.protocols import CoreGenericRepository
6
7
 
7
8
  from infrahub import config
8
- from infrahub.core.constants import InfrahubKind
9
9
 
10
10
  logging.getLogger("httpx").setLevel(logging.ERROR)
11
11
  app = typer.Typer()
@@ -51,7 +51,7 @@ def get(
51
51
  raise typer.Exit(1) from exc
52
52
 
53
53
  client = InfrahubClientSync(config=Config(address=config.SETTINGS.main.internal_address, insert_tracker=True))
54
- repo = client.get(kind=InfrahubKind.GENERICREPOSITORY, location__value=location)
54
+ repo = client.get(kind=CoreGenericRepository.__name__, location__value=location)
55
55
 
56
56
  if not repo:
57
57
  print("Repository not found in the database.")
@@ -71,7 +71,11 @@ class UpdateComputedAttribute(Mutation):
71
71
 
72
72
  if not (
73
73
  target_node := await NodeManager.get_one(
74
- db=graphql_context.db, kind=node_schema.kind, id=str(data.id), branch=graphql_context.branch
74
+ db=graphql_context.db,
75
+ kind=node_schema.kind,
76
+ id=str(data.id),
77
+ branch=graphql_context.branch,
78
+ fields={target_attribute.name: None},
75
79
  )
76
80
  ):
77
81
  raise NodeNotFoundError(
@@ -10,6 +10,7 @@ from infrahub.core import registry
10
10
  from infrahub.core.constants import DiffAction, RelationshipCardinality
11
11
  from infrahub.core.constants.database import DatabaseEdgeType
12
12
  from infrahub.core.diff.model.path import NameTrackingId
13
+ from infrahub.core.diff.query.filters import EnrichedDiffQueryFilters
13
14
  from infrahub.core.diff.repository.repository import DiffRepository
14
15
  from infrahub.core.query.diff import DiffCountChanges
15
16
  from infrahub.core.timestamp import Timestamp
@@ -415,7 +416,7 @@ class DiffTreeResolver:
415
416
  diff_branch_names=[diff_branch.name],
416
417
  from_time=from_timestamp,
417
418
  to_time=to_timestamp,
418
- filters=filters_dict,
419
+ filters=EnrichedDiffQueryFilters(**filters_dict),
419
420
  include_parents=include_parents,
420
421
  limit=limit,
421
422
  offset=offset,
@@ -319,6 +319,9 @@ query GatherArtifactDefinitions {
319
319
  file_path {
320
320
  value
321
321
  }
322
+ convert_query_response {
323
+ value
324
+ }
322
325
  }
323
326
  repository {
324
327
  node {
@@ -526,6 +529,9 @@ def _parse_artifact_definitions(definitions: list[dict]) -> list[ProposedChangeA
526
529
  elif artifact_definition.transform_kind == InfrahubKind.TRANSFORMPYTHON:
527
530
  artifact_definition.class_name = definition["node"]["transformation"]["node"]["class_name"]["value"]
528
531
  artifact_definition.file_path = definition["node"]["transformation"]["node"]["file_path"]["value"]
532
+ artifact_definition.convert_query_response = definition["node"]["transformation"]["node"][
533
+ "convert_query_response"
534
+ ]["value"]
529
535
 
530
536
  parsed.append(artifact_definition)
531
537
 
@@ -96,6 +96,9 @@ class ProposedChangeArtifactDefinition(BaseModel):
96
96
  class_name: str = Field(default="")
97
97
  content_type: str
98
98
  file_path: str = Field(default="")
99
+ convert_query_response: bool = Field(
100
+ default=False, description="Convert query response to InfrahubNode objects for Python based transforms"
101
+ )
99
102
  timeout: int
100
103
 
101
104
  @property
@@ -0,0 +1,109 @@
1
+ from ..models import EdgeToAdd, EdgeToDelete, PatchPlan, VertexToDelete
2
+ from .base import PatchQuery
3
+
4
+
5
+ class ConsolidateDuplicatedNodesPatchQuery(PatchQuery):
6
+ """
7
+ Find any groups of nodes with the same labels and properties, move all the edges to one of the duplicated nodes,
8
+ then delete the other duplicated nodes
9
+ """
10
+
11
+ @property
12
+ def name(self) -> str:
13
+ return "consolidate-duplicated-nodes"
14
+
15
+ async def plan(self) -> PatchPlan:
16
+ query = """
17
+ //------------
18
+ // Find nodes with the same labels and UUID
19
+ //------------
20
+ MATCH (n:Node)
21
+ WITH n.uuid AS node_uuid, count(*) as num_nodes_with_uuid
22
+ WHERE num_nodes_with_uuid > 1
23
+ WITH DISTINCT node_uuid
24
+ MATCH (n:Node {uuid: node_uuid})
25
+ CALL {
26
+ WITH n
27
+ WITH labels(n) AS n_labels
28
+ UNWIND n_labels AS n_label
29
+ WITH n_label
30
+ ORDER BY n_label ASC
31
+ RETURN collect(n_label) AS sorted_labels
32
+ }
33
+ WITH n.uuid AS n_uuid, sorted_labels, collect(n) AS duplicate_nodes
34
+ WHERE size(duplicate_nodes) > 1
35
+ WITH n_uuid, head(duplicate_nodes) AS node_to_keep, tail(duplicate_nodes) AS nodes_to_delete
36
+ UNWIND nodes_to_delete AS node_to_delete
37
+ //------------
38
+ // Find the edges that we need to move to the selected node_to_keep
39
+ //------------
40
+ CALL {
41
+ WITH node_to_keep, node_to_delete
42
+ MATCH (node_to_delete)-[edge_to_delete]->(peer)
43
+ RETURN {
44
+ from_id: %(id_func_name)s(node_to_keep),
45
+ to_id: %(id_func_name)s(peer),
46
+ edge_type: type(edge_to_delete),
47
+ after_props: properties(edge_to_delete)
48
+ } AS edge_to_create
49
+ UNION
50
+ WITH node_to_keep, node_to_delete
51
+ MATCH (node_to_delete)<-[edge_to_delete]-(peer)
52
+ RETURN {
53
+ from_id: %(id_func_name)s(peer),
54
+ to_id: %(id_func_name)s(node_to_keep),
55
+ edge_type: type(edge_to_delete),
56
+ after_props: properties(edge_to_delete)
57
+ } AS edge_to_create
58
+ }
59
+ WITH node_to_delete, collect(edge_to_create) AS edges_to_create
60
+ //------------
61
+ // Find the edges that we need to remove from the duplicated nodes
62
+ //------------
63
+ CALL {
64
+ WITH node_to_delete
65
+ MATCH (node_to_delete)-[e]->(peer)
66
+ RETURN {
67
+ db_id: %(id_func_name)s(e),
68
+ from_id: %(id_func_name)s(node_to_delete),
69
+ to_id: %(id_func_name)s(peer),
70
+ edge_type: type(e),
71
+ before_props: properties(e)
72
+ } AS edge_to_delete
73
+ UNION
74
+ WITH node_to_delete
75
+ MATCH (node_to_delete)<-[e]-(peer)
76
+ RETURN {
77
+ db_id: %(id_func_name)s(e),
78
+ from_id: %(id_func_name)s(peer),
79
+ to_id: %(id_func_name)s(node_to_delete),
80
+ edge_type: type(e),
81
+ before_props: properties(e)
82
+ } AS edge_to_delete
83
+ }
84
+ WITH node_to_delete, edges_to_create, collect(edge_to_delete) AS edges_to_delete
85
+ RETURN
86
+ {db_id: %(id_func_name)s(node_to_delete), labels: labels(node_to_delete), before_props: properties(node_to_delete)} AS vertex_to_delete,
87
+ edges_to_create,
88
+ edges_to_delete
89
+ """ % {"id_func_name": self.db.get_id_function_name()}
90
+ results = await self.db.execute_query(query=query)
91
+ vertices_to_delete: list[VertexToDelete] = []
92
+ edges_to_delete: list[EdgeToDelete] = []
93
+ edges_to_add: list[EdgeToAdd] = []
94
+ for result in results:
95
+ serial_vertex_to_delete = result.get("vertex_to_delete")
96
+ if serial_vertex_to_delete:
97
+ vertex_to_delete = VertexToDelete(**serial_vertex_to_delete)
98
+ vertices_to_delete.append(vertex_to_delete)
99
+ for serial_edge_to_delete in result.get("edges_to_delete"):
100
+ edge_to_delete = EdgeToDelete(**serial_edge_to_delete)
101
+ edges_to_delete.append(edge_to_delete)
102
+ for serial_edge_to_create in result.get("edges_to_create"):
103
+ edges_to_add.append(EdgeToAdd(**serial_edge_to_create))
104
+ return PatchPlan(
105
+ name=self.name,
106
+ vertices_to_delete=vertices_to_delete,
107
+ edges_to_add=edges_to_add,
108
+ edges_to_delete=edges_to_delete,
109
+ )