infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (183) hide show
  1. infrahub/actions/tasks.py +8 -0
  2. infrahub/api/diff/diff.py +1 -1
  3. infrahub/api/internal.py +2 -0
  4. infrahub/api/oauth2.py +13 -19
  5. infrahub/api/oidc.py +15 -21
  6. infrahub/api/schema.py +24 -3
  7. infrahub/artifacts/models.py +2 -1
  8. infrahub/auth.py +137 -3
  9. infrahub/cli/__init__.py +2 -0
  10. infrahub/cli/db.py +103 -98
  11. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  12. infrahub/cli/dev.py +118 -0
  13. infrahub/cli/tasks.py +46 -0
  14. infrahub/cli/upgrade.py +30 -3
  15. infrahub/computed_attribute/tasks.py +20 -8
  16. infrahub/core/attribute.py +13 -5
  17. infrahub/core/branch/enums.py +1 -1
  18. infrahub/core/branch/models.py +7 -3
  19. infrahub/core/branch/tasks.py +70 -8
  20. infrahub/core/changelog/models.py +4 -12
  21. infrahub/core/constants/__init__.py +3 -0
  22. infrahub/core/constants/infrahubkind.py +1 -0
  23. infrahub/core/diff/model/path.py +4 -0
  24. infrahub/core/diff/payload_builder.py +1 -1
  25. infrahub/core/diff/query/artifact.py +1 -0
  26. infrahub/core/diff/query/field_summary.py +1 -0
  27. infrahub/core/graph/__init__.py +1 -1
  28. infrahub/core/initialization.py +5 -2
  29. infrahub/core/ipam/utilization.py +1 -1
  30. infrahub/core/manager.py +6 -3
  31. infrahub/core/migrations/__init__.py +3 -0
  32. infrahub/core/migrations/exceptions.py +4 -0
  33. infrahub/core/migrations/graph/__init__.py +12 -11
  34. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  35. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  36. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  37. infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
  38. infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
  39. infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
  40. infrahub/core/migrations/query/__init__.py +7 -8
  41. infrahub/core/migrations/query/attribute_add.py +8 -6
  42. infrahub/core/migrations/query/attribute_remove.py +134 -0
  43. infrahub/core/migrations/runner.py +54 -0
  44. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  45. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  46. infrahub/core/migrations/schema/node_attribute_add.py +35 -4
  47. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  48. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  49. infrahub/core/migrations/schema/node_remove.py +2 -1
  50. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  51. infrahub/core/migrations/shared.py +52 -19
  52. infrahub/core/node/__init__.py +158 -51
  53. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  54. infrahub/core/node/create.py +46 -63
  55. infrahub/core/node/lock_utils.py +70 -44
  56. infrahub/core/node/node_property_attribute.py +230 -0
  57. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  58. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  59. infrahub/core/node/resource_manager/number_pool.py +2 -1
  60. infrahub/core/node/standard.py +1 -1
  61. infrahub/core/protocols.py +7 -1
  62. infrahub/core/query/attribute.py +55 -0
  63. infrahub/core/query/ipam.py +1 -0
  64. infrahub/core/query/node.py +23 -4
  65. infrahub/core/query/relationship.py +1 -0
  66. infrahub/core/registry.py +2 -2
  67. infrahub/core/relationship/constraints/count.py +1 -1
  68. infrahub/core/relationship/model.py +1 -1
  69. infrahub/core/schema/__init__.py +56 -0
  70. infrahub/core/schema/attribute_schema.py +4 -0
  71. infrahub/core/schema/basenode_schema.py +42 -2
  72. infrahub/core/schema/definitions/core/__init__.py +2 -0
  73. infrahub/core/schema/definitions/core/generator.py +2 -0
  74. infrahub/core/schema/definitions/core/group.py +16 -2
  75. infrahub/core/schema/definitions/internal.py +16 -3
  76. infrahub/core/schema/generated/attribute_schema.py +2 -2
  77. infrahub/core/schema/generated/base_node_schema.py +6 -1
  78. infrahub/core/schema/manager.py +22 -1
  79. infrahub/core/schema/node_schema.py +5 -2
  80. infrahub/core/schema/schema_branch.py +300 -8
  81. infrahub/core/schema/schema_branch_display.py +123 -0
  82. infrahub/core/schema/schema_branch_hfid.py +114 -0
  83. infrahub/core/validators/aggregated_checker.py +1 -1
  84. infrahub/core/validators/determiner.py +12 -1
  85. infrahub/core/validators/relationship/peer.py +1 -1
  86. infrahub/core/validators/tasks.py +1 -1
  87. infrahub/database/graph.py +21 -0
  88. infrahub/display_labels/__init__.py +0 -0
  89. infrahub/display_labels/gather.py +48 -0
  90. infrahub/display_labels/models.py +240 -0
  91. infrahub/display_labels/tasks.py +192 -0
  92. infrahub/display_labels/triggers.py +22 -0
  93. infrahub/events/branch_action.py +27 -1
  94. infrahub/events/group_action.py +1 -1
  95. infrahub/events/node_action.py +1 -1
  96. infrahub/generators/constants.py +7 -0
  97. infrahub/generators/models.py +7 -0
  98. infrahub/generators/tasks.py +34 -22
  99. infrahub/git/base.py +4 -1
  100. infrahub/git/integrator.py +23 -15
  101. infrahub/git/models.py +2 -1
  102. infrahub/git/repository.py +22 -5
  103. infrahub/git/tasks.py +66 -10
  104. infrahub/git/utils.py +123 -1
  105. infrahub/graphql/analyzer.py +1 -1
  106. infrahub/graphql/api/endpoints.py +14 -4
  107. infrahub/graphql/manager.py +4 -9
  108. infrahub/graphql/mutations/convert_object_type.py +11 -1
  109. infrahub/graphql/mutations/display_label.py +118 -0
  110. infrahub/graphql/mutations/generator.py +25 -7
  111. infrahub/graphql/mutations/hfid.py +125 -0
  112. infrahub/graphql/mutations/ipam.py +54 -35
  113. infrahub/graphql/mutations/main.py +27 -28
  114. infrahub/graphql/mutations/relationship.py +2 -2
  115. infrahub/graphql/mutations/resource_manager.py +2 -2
  116. infrahub/graphql/mutations/schema.py +5 -5
  117. infrahub/graphql/queries/resource_manager.py +1 -1
  118. infrahub/graphql/resolvers/resolver.py +2 -0
  119. infrahub/graphql/schema.py +4 -0
  120. infrahub/graphql/schema_sort.py +170 -0
  121. infrahub/graphql/types/branch.py +4 -1
  122. infrahub/graphql/types/enums.py +3 -0
  123. infrahub/groups/tasks.py +1 -1
  124. infrahub/hfid/__init__.py +0 -0
  125. infrahub/hfid/gather.py +48 -0
  126. infrahub/hfid/models.py +240 -0
  127. infrahub/hfid/tasks.py +191 -0
  128. infrahub/hfid/triggers.py +22 -0
  129. infrahub/lock.py +67 -16
  130. infrahub/message_bus/types.py +2 -1
  131. infrahub/middleware.py +26 -1
  132. infrahub/permissions/constants.py +2 -0
  133. infrahub/proposed_change/tasks.py +35 -17
  134. infrahub/server.py +21 -4
  135. infrahub/services/__init__.py +8 -5
  136. infrahub/services/adapters/http/__init__.py +5 -0
  137. infrahub/services/adapters/workflow/worker.py +14 -3
  138. infrahub/task_manager/event.py +5 -0
  139. infrahub/task_manager/models.py +7 -0
  140. infrahub/task_manager/task.py +73 -0
  141. infrahub/trigger/catalogue.py +4 -0
  142. infrahub/trigger/models.py +2 -0
  143. infrahub/trigger/setup.py +13 -4
  144. infrahub/trigger/tasks.py +6 -0
  145. infrahub/workers/dependencies.py +10 -1
  146. infrahub/workers/infrahub_async.py +10 -2
  147. infrahub/workflows/catalogue.py +80 -0
  148. infrahub/workflows/initialization.py +21 -0
  149. infrahub/workflows/utils.py +2 -1
  150. infrahub_sdk/checks.py +1 -1
  151. infrahub_sdk/client.py +13 -10
  152. infrahub_sdk/config.py +29 -2
  153. infrahub_sdk/ctl/cli_commands.py +2 -0
  154. infrahub_sdk/ctl/generator.py +4 -0
  155. infrahub_sdk/ctl/graphql.py +184 -0
  156. infrahub_sdk/ctl/schema.py +28 -9
  157. infrahub_sdk/generator.py +7 -1
  158. infrahub_sdk/graphql/__init__.py +12 -0
  159. infrahub_sdk/graphql/constants.py +1 -0
  160. infrahub_sdk/graphql/plugin.py +85 -0
  161. infrahub_sdk/graphql/query.py +77 -0
  162. infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
  163. infrahub_sdk/graphql/utils.py +40 -0
  164. infrahub_sdk/protocols.py +14 -0
  165. infrahub_sdk/schema/__init__.py +70 -4
  166. infrahub_sdk/schema/repository.py +8 -0
  167. infrahub_sdk/spec/models.py +7 -0
  168. infrahub_sdk/spec/object.py +53 -44
  169. infrahub_sdk/spec/processors/__init__.py +0 -0
  170. infrahub_sdk/spec/processors/data_processor.py +10 -0
  171. infrahub_sdk/spec/processors/factory.py +34 -0
  172. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  173. infrahub_sdk/spec/range_expansion.py +1 -1
  174. infrahub_sdk/transforms.py +1 -1
  175. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
  176. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
  177. infrahub_testcontainers/container.py +115 -3
  178. infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
  179. infrahub_testcontainers/docker-compose.test.yml +6 -1
  180. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  181. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
  182. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
  183. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub_sdk/client.py CHANGED
@@ -94,7 +94,9 @@ class ProcessRelationsNodeSync(TypedDict):
94
94
  related_nodes: list[InfrahubNodeSync]
95
95
 
96
96
 
97
- def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): # type: ignore[no-untyped-def]
97
+ def handle_relogin(
98
+ func: Callable[..., Coroutine[Any, Any, httpx.Response]],
99
+ ) -> Callable[..., Coroutine[Any, Any, httpx.Response]]:
98
100
  @wraps(func)
99
101
  async def wrapper(client: InfrahubClient, *args: Any, **kwargs: Any) -> httpx.Response:
100
102
  response = await func(client, *args, **kwargs)
@@ -108,7 +110,7 @@ def handle_relogin(func: Callable[..., Coroutine[Any, Any, httpx.Response]]): #
108
110
  return wrapper
109
111
 
110
112
 
111
- def handle_relogin_sync(func: Callable[..., httpx.Response]): # type: ignore[no-untyped-def]
113
+ def handle_relogin_sync(func: Callable[..., httpx.Response]) -> Callable[..., httpx.Response]:
112
114
  @wraps(func)
113
115
  def wrapper(client: InfrahubClientSync, *args: Any, **kwargs: Any) -> httpx.Response:
114
116
  response = func(client, *args, **kwargs)
@@ -170,6 +172,7 @@ class BaseClient:
170
172
  self.group_context: InfrahubGroupContext | InfrahubGroupContextSync
171
173
  self._initialize()
172
174
  self._request_context: RequestContext | None = None
175
+ _ = self.config.tls_context # Early load of the TLS context to catch errors
173
176
 
174
177
  def _initialize(self) -> None:
175
178
  """Sets the properties for each version of the client"""
@@ -574,7 +577,7 @@ class InfrahubClient(BaseClient):
574
577
  schema_kind (str): The kind of schema being queried.
575
578
  branch (str): The branch name.
576
579
  prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
577
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
580
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
578
581
 
579
582
  Returns:
580
583
  ProcessRelationsNodeSync: A TypedDict containing two lists:
@@ -694,7 +697,7 @@ class InfrahubClient(BaseClient):
694
697
  at (Timestamp, optional): Time of the query. Defaults to Now.
695
698
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
696
699
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
697
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
700
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
698
701
  offset (int, optional): The offset for pagination.
699
702
  limit (int, optional): The limit for pagination.
700
703
  include (list[str], optional): List of attributes or relationships to include in the query.
@@ -791,7 +794,7 @@ class InfrahubClient(BaseClient):
791
794
  kind (str): kind of the nodes to query
792
795
  at (Timestamp, optional): Time of the query. Defaults to Now.
793
796
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
794
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
797
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
795
798
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
796
799
  offset (int, optional): The offset for pagination.
797
800
  limit (int, optional): The limit for pagination.
@@ -1073,7 +1076,7 @@ class InfrahubClient(BaseClient):
1073
1076
 
1074
1077
  async with httpx.AsyncClient(
1075
1078
  **proxy_config,
1076
- verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure,
1079
+ verify=self.config.tls_context,
1077
1080
  ) as client:
1078
1081
  try:
1079
1082
  response = await client.request(
@@ -1945,7 +1948,7 @@ class InfrahubClientSync(BaseClient):
1945
1948
  kind (str): kind of the nodes to query
1946
1949
  at (Timestamp, optional): Time of the query. Defaults to Now.
1947
1950
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
1948
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1951
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1949
1952
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
1950
1953
  offset (int, optional): The offset for pagination.
1951
1954
  limit (int, optional): The limit for pagination.
@@ -1992,7 +1995,7 @@ class InfrahubClientSync(BaseClient):
1992
1995
  schema_kind (str): The kind of schema being queried.
1993
1996
  branch (str): The branch name.
1994
1997
  prefetch_relationships (bool): Flag to indicate whether to prefetch relationship data.
1995
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
1998
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
1996
1999
 
1997
2000
  Returns:
1998
2001
  ProcessRelationsNodeSync: A TypedDict containing two lists:
@@ -2084,7 +2087,7 @@ class InfrahubClientSync(BaseClient):
2084
2087
  kind (str): kind of the nodes to query
2085
2088
  at (Timestamp, optional): Time of the query. Defaults to Now.
2086
2089
  branch (str, optional): Name of the branch to query from. Defaults to default_branch.
2087
- timeout (int, optional): Overrides default timeout used when querying the graphql API. Specified in seconds.
2090
+ timeout (int, optional): Overrides default timeout used when querying the GraphQL API. Specified in seconds.
2088
2091
  populate_store (bool, optional): Flag to indicate whether to populate the store with the retrieved nodes.
2089
2092
  offset (int, optional): The offset for pagination.
2090
2093
  limit (int, optional): The limit for pagination.
@@ -2913,7 +2916,7 @@ class InfrahubClientSync(BaseClient):
2913
2916
 
2914
2917
  with httpx.Client(
2915
2918
  **proxy_config,
2916
- verify=self.config.tls_ca_file if self.config.tls_ca_file else not self.config.tls_insecure,
2919
+ verify=self.config.tls_context,
2917
2920
  ) as client:
2918
2921
  try:
2919
2922
  response = client.request(
infrahub_sdk/config.py CHANGED
@@ -1,9 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import ssl
3
4
  from copy import deepcopy
4
5
  from typing import Any
5
6
 
6
- from pydantic import Field, field_validator, model_validator
7
+ from pydantic import Field, PrivateAttr, field_validator, model_validator
7
8
  from pydantic_settings import BaseSettings, SettingsConfigDict
8
9
  from typing_extensions import Self
9
10
 
@@ -78,6 +79,7 @@ class ConfigBase(BaseSettings):
78
79
  Can be useful to test with self-signed certificates.""",
79
80
  )
80
81
  tls_ca_file: str | None = Field(default=None, description="File path to CA cert or bundle in PEM format")
82
+ _ssl_context: ssl.SSLContext | None = PrivateAttr(default=None)
81
83
 
82
84
  @model_validator(mode="before")
83
85
  @classmethod
@@ -133,6 +135,28 @@ class ConfigBase(BaseSettings):
133
135
  def password_authentication(self) -> bool:
134
136
  return bool(self.username)
135
137
 
138
+ @property
139
+ def tls_context(self) -> ssl.SSLContext:
140
+ if self._ssl_context:
141
+ return self._ssl_context
142
+
143
+ if self.tls_insecure:
144
+ self._ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
145
+ self._ssl_context.check_hostname = False
146
+ self._ssl_context.verify_mode = ssl.CERT_NONE
147
+ return self._ssl_context
148
+
149
+ if self.tls_ca_file:
150
+ self._ssl_context = ssl.create_default_context(cafile=self.tls_ca_file)
151
+
152
+ if self._ssl_context is None:
153
+ self._ssl_context = ssl.create_default_context()
154
+
155
+ return self._ssl_context
156
+
157
+ def set_ssl_context(self, context: ssl.SSLContext) -> None:
158
+ self._ssl_context = context
159
+
136
160
 
137
161
  class Config(ConfigBase):
138
162
  recorder: RecorderType = Field(default=RecorderType.NONE, description="Select builtin recorder for later replay.")
@@ -174,4 +198,7 @@ class Config(ConfigBase):
174
198
  if field not in covered_keys:
175
199
  config[field] = deepcopy(getattr(self, field))
176
200
 
177
- return Config(**config)
201
+ new_config = Config(**config)
202
+ if self._ssl_context:
203
+ new_config.set_ssl_context(self._ssl_context)
204
+ return new_config
@@ -25,6 +25,7 @@ from ..ctl.check import run as run_check
25
25
  from ..ctl.client import initialize_client, initialize_client_sync
26
26
  from ..ctl.exceptions import QueryNotFoundError
27
27
  from ..ctl.generator import run as run_generator
28
+ from ..ctl.graphql import app as graphql_app
28
29
  from ..ctl.menu import app as menu_app
29
30
  from ..ctl.object import app as object_app
30
31
  from ..ctl.render import list_jinja2_transforms, print_template_errors
@@ -62,6 +63,7 @@ app.add_typer(validate_app, name="validate")
62
63
  app.add_typer(repository_app, name="repository")
63
64
  app.add_typer(menu_app, name="menu")
64
65
  app.add_typer(object_app, name="object")
66
+ app.add_typer(graphql_app, name="graphql")
65
67
 
66
68
  app.command(name="dump")(dump)
67
69
  app.command(name="load")(load)
@@ -64,6 +64,8 @@ async def run(
64
64
  branch=branch or "",
65
65
  params=variables_dict,
66
66
  convert_query_response=generator_config.convert_query_response,
67
+ execute_in_proposed_change=generator_config.execute_in_proposed_change,
68
+ execute_after_merge=generator_config.execute_after_merge,
67
69
  infrahub_node=InfrahubNode,
68
70
  )
69
71
  await generator._init_client.schema.all(branch=generator.branch_name)
@@ -93,6 +95,8 @@ async def run(
93
95
  branch=branch or "",
94
96
  params=params,
95
97
  convert_query_response=generator_config.convert_query_response,
98
+ execute_in_proposed_change=generator_config.execute_in_proposed_change,
99
+ execute_after_merge=generator_config.execute_after_merge,
96
100
  infrahub_node=InfrahubNode,
97
101
  )
98
102
  data = execute_graphql_query(
@@ -0,0 +1,184 @@
1
+ from __future__ import annotations
2
+
3
+ import ast
4
+ from collections import defaultdict
5
+ from pathlib import Path
6
+ from typing import Optional
7
+
8
+ import typer
9
+ from ariadne_codegen.client_generators.package import PackageGenerator, get_package_generator
10
+ from ariadne_codegen.exceptions import ParsingError
11
+ from ariadne_codegen.plugins.explorer import get_plugins_types
12
+ from ariadne_codegen.plugins.manager import PluginManager
13
+ from ariadne_codegen.schema import (
14
+ filter_fragments_definitions,
15
+ filter_operations_definitions,
16
+ get_graphql_schema_from_path,
17
+ )
18
+ from ariadne_codegen.settings import ClientSettings, CommentsStrategy
19
+ from ariadne_codegen.utils import ast_to_str
20
+ from graphql import DefinitionNode, GraphQLSchema, NoUnusedFragmentsRule, parse, specified_rules, validate
21
+ from rich.console import Console
22
+
23
+ from ..async_typer import AsyncTyper
24
+ from ..ctl.client import initialize_client
25
+ from ..ctl.utils import catch_exception
26
+ from ..graphql.utils import insert_fragments_inline, remove_fragment_import
27
+ from .parameters import CONFIG_PARAM
28
+
29
+ app = AsyncTyper()
30
+ console = Console()
31
+
32
+ ARIADNE_PLUGINS = [
33
+ "infrahub_sdk.graphql.plugin.PydanticBaseModelPlugin",
34
+ "infrahub_sdk.graphql.plugin.FutureAnnotationPlugin",
35
+ "infrahub_sdk.graphql.plugin.StandardTypeHintPlugin",
36
+ ]
37
+
38
+
39
+ def find_gql_files(query_path: Path) -> list[Path]:
40
+ """
41
+ Find all files with .gql extension in the specified directory.
42
+
43
+ Args:
44
+ query_path: Path to the directory to search for .gql files
45
+
46
+ Returns:
47
+ List of Path objects for all .gql files found
48
+ """
49
+ if not query_path.exists():
50
+ raise FileNotFoundError(f"File or directory not found: {query_path}")
51
+
52
+ if not query_path.is_dir() and query_path.is_file():
53
+ return [query_path]
54
+
55
+ return list(query_path.glob("**/*.gql"))
56
+
57
+
58
+ def get_graphql_query(queries_path: Path, schema: GraphQLSchema) -> tuple[DefinitionNode, ...]:
59
+ """Get GraphQL queries definitions from a single GraphQL file."""
60
+
61
+ if not queries_path.exists():
62
+ raise FileNotFoundError(f"File not found: {queries_path}")
63
+ if not queries_path.is_file():
64
+ raise ValueError(f"{queries_path} is not a file")
65
+
66
+ queries_str = queries_path.read_text(encoding="utf-8")
67
+ queries_ast = parse(queries_str)
68
+ validation_errors = validate(
69
+ schema=schema,
70
+ document_ast=queries_ast,
71
+ rules=[r for r in specified_rules if r is not NoUnusedFragmentsRule],
72
+ )
73
+ if validation_errors:
74
+ raise ValueError("\n\n".join(error.message for error in validation_errors))
75
+ return queries_ast.definitions
76
+
77
+
78
+ def generate_result_types(directory: Path, package: PackageGenerator, fragment: ast.Module) -> None:
79
+ for file_name, module in package._result_types_files.items():
80
+ file_path = directory / file_name
81
+
82
+ insert_fragments_inline(module, fragment)
83
+ remove_fragment_import(module)
84
+
85
+ code = package._add_comments_to_code(ast_to_str(module), package.queries_source)
86
+ if package.plugin_manager:
87
+ code = package.plugin_manager.generate_result_types_code(code)
88
+ file_path.write_text(code)
89
+ package._generated_files.append(file_path.name)
90
+
91
+
92
+ @app.callback()
93
+ def callback() -> None:
94
+ """
95
+ Various GraphQL related commands.
96
+ """
97
+
98
+
99
+ @app.command()
100
+ @catch_exception(console=console)
101
+ async def export_schema(
102
+ destination: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
103
+ _: str = CONFIG_PARAM,
104
+ ) -> None:
105
+ """Export the GraphQL schema to a file."""
106
+
107
+ client = initialize_client()
108
+ schema_text = await client.schema.get_graphql_schema()
109
+
110
+ destination.parent.mkdir(parents=True, exist_ok=True)
111
+ destination.write_text(schema_text)
112
+ console.print(f"[green]Schema exported to {destination}")
113
+
114
+
115
+ @app.command()
116
+ @catch_exception(console=console)
117
+ async def generate_return_types(
118
+ query: Optional[Path] = typer.Argument(
119
+ None, help="Location of the GraphQL query file(s). Defaults to current directory if not specified."
120
+ ),
121
+ schema: Path = typer.Option("schema.graphql", help="Path to the GraphQL schema file."),
122
+ _: str = CONFIG_PARAM,
123
+ ) -> None:
124
+ """Create Pydantic Models for GraphQL query return types"""
125
+
126
+ query = Path.cwd() if query is None else query
127
+
128
+ # Load the GraphQL schema
129
+ if not schema.exists():
130
+ raise FileNotFoundError(f"GraphQL Schema file not found: {schema}")
131
+ graphql_schema = get_graphql_schema_from_path(schema_path=str(schema))
132
+
133
+ # Initialize the plugin manager
134
+ plugin_manager = PluginManager(
135
+ schema=graphql_schema,
136
+ plugins_types=get_plugins_types(plugins_strs=ARIADNE_PLUGINS),
137
+ )
138
+
139
+ # Find the GraphQL files and organize them by directory
140
+ gql_files = find_gql_files(query)
141
+ gql_per_directory: dict[Path, list[Path]] = defaultdict(list)
142
+ for gql_file in gql_files:
143
+ gql_per_directory[gql_file.parent].append(gql_file)
144
+
145
+ # Generate the Pydantic Models for the GraphQL queries
146
+ for directory, gql_files in gql_per_directory.items():
147
+ for gql_file in gql_files:
148
+ try:
149
+ definitions = get_graphql_query(queries_path=gql_file, schema=graphql_schema)
150
+ except ValueError as exc:
151
+ console.print(f"[red]Error generating result types for {gql_file}: {exc}")
152
+ continue
153
+ queries = filter_operations_definitions(definitions)
154
+ fragments = filter_fragments_definitions(definitions)
155
+
156
+ package_generator = get_package_generator(
157
+ schema=graphql_schema,
158
+ fragments=fragments,
159
+ settings=ClientSettings(
160
+ schema_path=str(schema),
161
+ target_package_name=directory.name,
162
+ queries_path=str(directory),
163
+ include_comments=CommentsStrategy.NONE,
164
+ ),
165
+ plugin_manager=plugin_manager,
166
+ )
167
+
168
+ parsing_failed = False
169
+ try:
170
+ for query_operation in queries:
171
+ package_generator.add_operation(query_operation)
172
+ except ParsingError as exc:
173
+ console.print(f"[red]Unable to process {gql_file.name}: {exc}")
174
+ parsing_failed = True
175
+
176
+ if parsing_failed:
177
+ continue
178
+
179
+ module_fragment = package_generator.fragments_generator.generate()
180
+
181
+ generate_result_types(directory=directory, package=package_generator, fragment=module_fragment)
182
+
183
+ for file_name in package_generator._result_types_files.keys():
184
+ console.print(f"[green]Generated {file_name} in {directory}")
@@ -14,6 +14,7 @@ from ..async_typer import AsyncTyper
14
14
  from ..ctl.client import initialize_client
15
15
  from ..ctl.utils import catch_exception, init_logging
16
16
  from ..queries import SCHEMA_HASH_SYNC_STATUS
17
+ from ..schema import SchemaWarning
17
18
  from ..yaml import SchemaFile
18
19
  from .parameters import CONFIG_PARAM
19
20
  from .utils import load_yamlfile_from_disk_and_exit
@@ -73,7 +74,9 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche
73
74
  loc_type = loc_path[-1]
74
75
  input_str = error.get("input", None)
75
76
  error_message = f"{loc_type} ({input_str}) | {error['msg']} ({error['type']})"
76
- console.print(f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}")
77
+ console.print(
78
+ f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
79
+ )
77
80
 
78
81
  elif len(loc_path) > 6:
79
82
  loc_type = loc_path[5]
@@ -91,7 +94,9 @@ def display_schema_load_errors(response: dict[str, Any], schemas_data: list[Sche
91
94
 
92
95
  input_str = error.get("input", None)
93
96
  error_message = f"{loc_type[:-1].title()}: {input_label} ({input_str}) | {error['msg']} ({error['type']})"
94
- console.print(f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}")
97
+ console.print(
98
+ f" Node: {node.get('namespace', None)}{node.get('name', None)} | {error_message}", markup=False
99
+ )
95
100
 
96
101
 
97
102
  def handle_non_detail_errors(response: dict[str, Any]) -> None:
@@ -148,6 +153,8 @@ async def load(
148
153
 
149
154
  console.print(f"[green] {len(schemas_data)} {schema_definition} processed in {loading_time:.3f} seconds.")
150
155
 
156
+ _display_schema_warnings(console=console, warnings=response.warnings)
157
+
151
158
  if response.schema_updated and wait:
152
159
  waited = 0
153
160
  continue_waiting = True
@@ -183,12 +190,24 @@ async def check(
183
190
 
184
191
  success, response = await client.schema.check(schemas=[item.payload for item in schemas_data], branch=branch)
185
192
 
186
- if not success:
193
+ if not success or not response:
187
194
  display_schema_load_errors(response=response or {}, schemas_data=schemas_data)
195
+ return
196
+
197
+ for schema_file in schemas_data:
198
+ console.print(f"[green] schema '{schema_file.location}' is Valid!")
199
+
200
+ warnings = response.pop("warnings", [])
201
+ schema_warnings = [SchemaWarning.model_validate(warning) for warning in warnings]
202
+ _display_schema_warnings(console=console, warnings=schema_warnings)
203
+ if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
204
+ print("No diff")
188
205
  else:
189
- for schema_file in schemas_data:
190
- console.print(f"[green] schema '{schema_file.location}' is Valid!")
191
- if response == {"diff": {"added": {}, "changed": {}, "removed": {}}}:
192
- print("No diff")
193
- else:
194
- print(yaml.safe_dump(data=response, indent=4))
206
+ print(yaml.safe_dump(data=response, indent=4))
207
+
208
+
209
+ def _display_schema_warnings(console: Console, warnings: list[SchemaWarning]) -> None:
210
+ for warning in warnings:
211
+ console.print(
212
+ f"[yellow] {warning.type.value}: {warning.message} [{', '.join([kind.display for kind in warning.kinds])}]"
213
+ )
infrahub_sdk/generator.py CHANGED
@@ -26,6 +26,8 @@ class InfrahubGenerator(InfrahubOperation):
26
26
  generator_instance: str = "",
27
27
  params: dict | None = None,
28
28
  convert_query_response: bool = False,
29
+ execute_in_proposed_change: bool = True,
30
+ execute_after_merge: bool = True,
29
31
  logger: logging.Logger | None = None,
30
32
  request_context: RequestContext | None = None,
31
33
  ) -> None:
@@ -44,6 +46,8 @@ class InfrahubGenerator(InfrahubOperation):
44
46
  self._client: InfrahubClient | None = None
45
47
  self.logger = logger if logger else logging.getLogger("infrahub.tasks")
46
48
  self.request_context = request_context
49
+ self.execute_in_proposed_change = execute_in_proposed_change
50
+ self.execute_after_merge = execute_after_merge
47
51
 
48
52
  @property
49
53
  def subscribers(self) -> list[str] | None:
@@ -81,8 +85,10 @@ class InfrahubGenerator(InfrahubOperation):
81
85
  unpacked = data.get("data") or data
82
86
  await self.process_nodes(data=unpacked)
83
87
 
88
+ group_type = "CoreGeneratorGroup" if self.execute_after_merge else "CoreGeneratorAwareGroup"
89
+
84
90
  async with self._init_client.start_tracking(
85
- identifier=identifier, params=self.params, delete_unused_nodes=True, group_type="CoreGeneratorGroup"
91
+ identifier=identifier, params=self.params, delete_unused_nodes=True, group_type=group_type
86
92
  ) as self.client:
87
93
  await self.generate(data=unpacked)
88
94
 
@@ -0,0 +1,12 @@
1
+ from .constants import VARIABLE_TYPE_MAPPING
2
+ from .query import Mutation, Query
3
+ from .renderers import render_input_block, render_query_block, render_variables_to_string
4
+
5
+ __all__ = [
6
+ "VARIABLE_TYPE_MAPPING",
7
+ "Mutation",
8
+ "Query",
9
+ "render_input_block",
10
+ "render_query_block",
11
+ "render_variables_to_string",
12
+ ]
@@ -0,0 +1 @@
1
+ VARIABLE_TYPE_MAPPING = ((str, "String!"), (int, "Int!"), (float, "Float!"), (bool, "Boolean!"))
@@ -0,0 +1,85 @@
1
+ from __future__ import annotations
2
+
3
+ import ast
4
+ from typing import TYPE_CHECKING
5
+
6
+ from ariadne_codegen.plugins.base import Plugin
7
+
8
+ if TYPE_CHECKING:
9
+ from graphql import ExecutableDefinitionNode
10
+
11
+
12
+ class FutureAnnotationPlugin(Plugin):
13
+ @staticmethod
14
+ def insert_future_annotation(module: ast.Module) -> ast.Module:
15
+ # First check if the future annotation is already present
16
+ for item in module.body:
17
+ if isinstance(item, ast.ImportFrom) and item.module == "__future__":
18
+ if any(alias.name == "annotations" for alias in item.names):
19
+ return module
20
+
21
+ module.body.insert(0, ast.ImportFrom(module="__future__", names=[ast.alias(name="annotations")], level=0))
22
+ return module
23
+
24
+ def generate_result_types_module(
25
+ self,
26
+ module: ast.Module,
27
+ operation_definition: ExecutableDefinitionNode, # noqa: ARG002
28
+ ) -> ast.Module:
29
+ return self.insert_future_annotation(module)
30
+
31
+
32
+ class StandardTypeHintPlugin(Plugin):
33
+ @classmethod
34
+ def replace_list_in_subscript(cls, subscript: ast.Subscript) -> ast.Subscript:
35
+ if isinstance(subscript.value, ast.Name) and subscript.value.id == "List":
36
+ subscript.value.id = "list"
37
+ if isinstance(subscript.slice, ast.Subscript):
38
+ subscript.slice = cls.replace_list_in_subscript(subscript.slice)
39
+
40
+ return subscript
41
+
42
+ @classmethod
43
+ def replace_list_annotations(cls, module: ast.Module) -> ast.Module:
44
+ for item in module.body:
45
+ if not isinstance(item, ast.ClassDef):
46
+ continue
47
+
48
+ # replace List with list in the annotations when list is used as a type
49
+ for class_item in item.body:
50
+ if not isinstance(class_item, ast.AnnAssign):
51
+ continue
52
+ if isinstance(class_item.annotation, ast.Subscript):
53
+ class_item.annotation = cls.replace_list_in_subscript(class_item.annotation)
54
+
55
+ return module
56
+
57
+ def generate_result_types_module(
58
+ self,
59
+ module: ast.Module,
60
+ operation_definition: ExecutableDefinitionNode, # noqa: ARG002
61
+ ) -> ast.Module:
62
+ module = FutureAnnotationPlugin.insert_future_annotation(module)
63
+ return self.replace_list_annotations(module)
64
+
65
+
66
+ class PydanticBaseModelPlugin(Plugin):
67
+ @staticmethod
68
+ def find_base_model_index(module: ast.Module) -> int:
69
+ for idx, item in enumerate(module.body):
70
+ if isinstance(item, ast.ImportFrom) and item.module == "base_model":
71
+ return idx
72
+ raise ValueError("BaseModel not found in module")
73
+
74
+ @classmethod
75
+ def replace_base_model_import(cls, module: ast.Module) -> ast.Module:
76
+ base_model_index = cls.find_base_model_index(module)
77
+ module.body[base_model_index] = ast.ImportFrom(module="pydantic", names=[ast.alias(name="BaseModel")], level=0)
78
+ return module
79
+
80
+ def generate_result_types_module(
81
+ self,
82
+ module: ast.Module,
83
+ operation_definition: ExecutableDefinitionNode, # noqa: ARG002
84
+ ) -> ast.Module:
85
+ return self.replace_base_model_import(module)
@@ -0,0 +1,77 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any
4
+
5
+ from .renderers import render_input_block, render_query_block, render_variables_to_string
6
+
7
+
8
+ class BaseGraphQLQuery:
9
+ query_type: str = "not-defined"
10
+ indentation: int = 4
11
+
12
+ def __init__(self, query: dict, variables: dict | None = None, name: str | None = None):
13
+ self.query = query
14
+ self.variables = variables
15
+ self.name = name or ""
16
+
17
+ def render_first_line(self) -> str:
18
+ first_line = self.query_type
19
+
20
+ if self.name:
21
+ first_line += " " + self.name
22
+
23
+ if self.variables:
24
+ first_line += f" ({render_variables_to_string(self.variables)})"
25
+
26
+ first_line += " {"
27
+
28
+ return first_line
29
+
30
+
31
+ class Query(BaseGraphQLQuery):
32
+ query_type = "query"
33
+
34
+ def render(self, convert_enum: bool = False) -> str:
35
+ lines = [self.render_first_line()]
36
+ lines.extend(
37
+ render_query_block(
38
+ data=self.query, indentation=self.indentation, offset=self.indentation, convert_enum=convert_enum
39
+ )
40
+ )
41
+ lines.append("}")
42
+
43
+ return "\n" + "\n".join(lines) + "\n"
44
+
45
+
46
+ class Mutation(BaseGraphQLQuery):
47
+ query_type = "mutation"
48
+
49
+ def __init__(self, *args: Any, mutation: str, input_data: dict, **kwargs: Any):
50
+ self.input_data = input_data
51
+ self.mutation = mutation
52
+ super().__init__(*args, **kwargs)
53
+
54
+ def render(self, convert_enum: bool = False) -> str:
55
+ lines = [self.render_first_line()]
56
+ lines.append(" " * self.indentation + f"{self.mutation}(")
57
+ lines.extend(
58
+ render_input_block(
59
+ data=self.input_data,
60
+ indentation=self.indentation,
61
+ offset=self.indentation * 2,
62
+ convert_enum=convert_enum,
63
+ )
64
+ )
65
+ lines.append(" " * self.indentation + "){")
66
+ lines.extend(
67
+ render_query_block(
68
+ data=self.query,
69
+ indentation=self.indentation,
70
+ offset=self.indentation * 2,
71
+ convert_enum=convert_enum,
72
+ )
73
+ )
74
+ lines.append(" " * self.indentation + "}")
75
+ lines.append("}")
76
+
77
+ return "\n" + "\n".join(lines) + "\n"