infrahub-server 1.5.0b0__py3-none-any.whl → 1.5.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (183) hide show
  1. infrahub/actions/tasks.py +8 -0
  2. infrahub/api/diff/diff.py +1 -1
  3. infrahub/api/internal.py +2 -0
  4. infrahub/api/oauth2.py +13 -19
  5. infrahub/api/oidc.py +15 -21
  6. infrahub/api/schema.py +24 -3
  7. infrahub/artifacts/models.py +2 -1
  8. infrahub/auth.py +137 -3
  9. infrahub/cli/__init__.py +2 -0
  10. infrahub/cli/db.py +103 -98
  11. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +212 -0
  12. infrahub/cli/dev.py +118 -0
  13. infrahub/cli/tasks.py +46 -0
  14. infrahub/cli/upgrade.py +30 -3
  15. infrahub/computed_attribute/tasks.py +20 -8
  16. infrahub/core/attribute.py +13 -5
  17. infrahub/core/branch/enums.py +1 -1
  18. infrahub/core/branch/models.py +7 -3
  19. infrahub/core/branch/tasks.py +70 -8
  20. infrahub/core/changelog/models.py +4 -12
  21. infrahub/core/constants/__init__.py +3 -0
  22. infrahub/core/constants/infrahubkind.py +1 -0
  23. infrahub/core/diff/model/path.py +4 -0
  24. infrahub/core/diff/payload_builder.py +1 -1
  25. infrahub/core/diff/query/artifact.py +1 -0
  26. infrahub/core/diff/query/field_summary.py +1 -0
  27. infrahub/core/graph/__init__.py +1 -1
  28. infrahub/core/initialization.py +5 -2
  29. infrahub/core/ipam/utilization.py +1 -1
  30. infrahub/core/manager.py +6 -3
  31. infrahub/core/migrations/__init__.py +3 -0
  32. infrahub/core/migrations/exceptions.py +4 -0
  33. infrahub/core/migrations/graph/__init__.py +12 -11
  34. infrahub/core/migrations/graph/load_schema_branch.py +21 -0
  35. infrahub/core/migrations/graph/m013_convert_git_password_credential.py +1 -1
  36. infrahub/core/migrations/graph/m040_duplicated_attributes.py +81 -0
  37. infrahub/core/migrations/graph/m041_profile_attrs_in_db.py +145 -0
  38. infrahub/core/migrations/graph/m042_create_hfid_display_label_in_db.py +164 -0
  39. infrahub/core/migrations/graph/m043_backfill_hfid_display_label_in_db.py +866 -0
  40. infrahub/core/migrations/query/__init__.py +7 -8
  41. infrahub/core/migrations/query/attribute_add.py +8 -6
  42. infrahub/core/migrations/query/attribute_remove.py +134 -0
  43. infrahub/core/migrations/runner.py +54 -0
  44. infrahub/core/migrations/schema/attribute_kind_update.py +9 -3
  45. infrahub/core/migrations/schema/attribute_supports_profile.py +90 -0
  46. infrahub/core/migrations/schema/node_attribute_add.py +35 -4
  47. infrahub/core/migrations/schema/node_attribute_remove.py +13 -109
  48. infrahub/core/migrations/schema/node_kind_update.py +2 -1
  49. infrahub/core/migrations/schema/node_remove.py +2 -1
  50. infrahub/core/migrations/schema/placeholder_dummy.py +3 -2
  51. infrahub/core/migrations/shared.py +52 -19
  52. infrahub/core/node/__init__.py +158 -51
  53. infrahub/core/node/constraints/attribute_uniqueness.py +3 -1
  54. infrahub/core/node/create.py +46 -63
  55. infrahub/core/node/lock_utils.py +70 -44
  56. infrahub/core/node/node_property_attribute.py +230 -0
  57. infrahub/core/node/resource_manager/ip_address_pool.py +2 -1
  58. infrahub/core/node/resource_manager/ip_prefix_pool.py +2 -1
  59. infrahub/core/node/resource_manager/number_pool.py +2 -1
  60. infrahub/core/node/standard.py +1 -1
  61. infrahub/core/protocols.py +7 -1
  62. infrahub/core/query/attribute.py +55 -0
  63. infrahub/core/query/ipam.py +1 -0
  64. infrahub/core/query/node.py +23 -4
  65. infrahub/core/query/relationship.py +1 -0
  66. infrahub/core/registry.py +2 -2
  67. infrahub/core/relationship/constraints/count.py +1 -1
  68. infrahub/core/relationship/model.py +1 -1
  69. infrahub/core/schema/__init__.py +56 -0
  70. infrahub/core/schema/attribute_schema.py +4 -0
  71. infrahub/core/schema/basenode_schema.py +42 -2
  72. infrahub/core/schema/definitions/core/__init__.py +2 -0
  73. infrahub/core/schema/definitions/core/generator.py +2 -0
  74. infrahub/core/schema/definitions/core/group.py +16 -2
  75. infrahub/core/schema/definitions/internal.py +16 -3
  76. infrahub/core/schema/generated/attribute_schema.py +2 -2
  77. infrahub/core/schema/generated/base_node_schema.py +6 -1
  78. infrahub/core/schema/manager.py +22 -1
  79. infrahub/core/schema/node_schema.py +5 -2
  80. infrahub/core/schema/schema_branch.py +300 -8
  81. infrahub/core/schema/schema_branch_display.py +123 -0
  82. infrahub/core/schema/schema_branch_hfid.py +114 -0
  83. infrahub/core/validators/aggregated_checker.py +1 -1
  84. infrahub/core/validators/determiner.py +12 -1
  85. infrahub/core/validators/relationship/peer.py +1 -1
  86. infrahub/core/validators/tasks.py +1 -1
  87. infrahub/database/graph.py +21 -0
  88. infrahub/display_labels/__init__.py +0 -0
  89. infrahub/display_labels/gather.py +48 -0
  90. infrahub/display_labels/models.py +240 -0
  91. infrahub/display_labels/tasks.py +192 -0
  92. infrahub/display_labels/triggers.py +22 -0
  93. infrahub/events/branch_action.py +27 -1
  94. infrahub/events/group_action.py +1 -1
  95. infrahub/events/node_action.py +1 -1
  96. infrahub/generators/constants.py +7 -0
  97. infrahub/generators/models.py +7 -0
  98. infrahub/generators/tasks.py +34 -22
  99. infrahub/git/base.py +4 -1
  100. infrahub/git/integrator.py +23 -15
  101. infrahub/git/models.py +2 -1
  102. infrahub/git/repository.py +22 -5
  103. infrahub/git/tasks.py +66 -10
  104. infrahub/git/utils.py +123 -1
  105. infrahub/graphql/analyzer.py +1 -1
  106. infrahub/graphql/api/endpoints.py +14 -4
  107. infrahub/graphql/manager.py +4 -9
  108. infrahub/graphql/mutations/convert_object_type.py +11 -1
  109. infrahub/graphql/mutations/display_label.py +118 -0
  110. infrahub/graphql/mutations/generator.py +25 -7
  111. infrahub/graphql/mutations/hfid.py +125 -0
  112. infrahub/graphql/mutations/ipam.py +54 -35
  113. infrahub/graphql/mutations/main.py +27 -28
  114. infrahub/graphql/mutations/relationship.py +2 -2
  115. infrahub/graphql/mutations/resource_manager.py +2 -2
  116. infrahub/graphql/mutations/schema.py +5 -5
  117. infrahub/graphql/queries/resource_manager.py +1 -1
  118. infrahub/graphql/resolvers/resolver.py +2 -0
  119. infrahub/graphql/schema.py +4 -0
  120. infrahub/graphql/schema_sort.py +170 -0
  121. infrahub/graphql/types/branch.py +4 -1
  122. infrahub/graphql/types/enums.py +3 -0
  123. infrahub/groups/tasks.py +1 -1
  124. infrahub/hfid/__init__.py +0 -0
  125. infrahub/hfid/gather.py +48 -0
  126. infrahub/hfid/models.py +240 -0
  127. infrahub/hfid/tasks.py +191 -0
  128. infrahub/hfid/triggers.py +22 -0
  129. infrahub/lock.py +67 -16
  130. infrahub/message_bus/types.py +2 -1
  131. infrahub/middleware.py +26 -1
  132. infrahub/permissions/constants.py +2 -0
  133. infrahub/proposed_change/tasks.py +35 -17
  134. infrahub/server.py +21 -4
  135. infrahub/services/__init__.py +8 -5
  136. infrahub/services/adapters/http/__init__.py +5 -0
  137. infrahub/services/adapters/workflow/worker.py +14 -3
  138. infrahub/task_manager/event.py +5 -0
  139. infrahub/task_manager/models.py +7 -0
  140. infrahub/task_manager/task.py +73 -0
  141. infrahub/trigger/catalogue.py +4 -0
  142. infrahub/trigger/models.py +2 -0
  143. infrahub/trigger/setup.py +13 -4
  144. infrahub/trigger/tasks.py +6 -0
  145. infrahub/workers/dependencies.py +10 -1
  146. infrahub/workers/infrahub_async.py +10 -2
  147. infrahub/workflows/catalogue.py +80 -0
  148. infrahub/workflows/initialization.py +21 -0
  149. infrahub/workflows/utils.py +2 -1
  150. infrahub_sdk/checks.py +1 -1
  151. infrahub_sdk/client.py +13 -10
  152. infrahub_sdk/config.py +29 -2
  153. infrahub_sdk/ctl/cli_commands.py +2 -0
  154. infrahub_sdk/ctl/generator.py +4 -0
  155. infrahub_sdk/ctl/graphql.py +184 -0
  156. infrahub_sdk/ctl/schema.py +28 -9
  157. infrahub_sdk/generator.py +7 -1
  158. infrahub_sdk/graphql/__init__.py +12 -0
  159. infrahub_sdk/graphql/constants.py +1 -0
  160. infrahub_sdk/graphql/plugin.py +85 -0
  161. infrahub_sdk/graphql/query.py +77 -0
  162. infrahub_sdk/{graphql.py → graphql/renderers.py} +81 -73
  163. infrahub_sdk/graphql/utils.py +40 -0
  164. infrahub_sdk/protocols.py +14 -0
  165. infrahub_sdk/schema/__init__.py +70 -4
  166. infrahub_sdk/schema/repository.py +8 -0
  167. infrahub_sdk/spec/models.py +7 -0
  168. infrahub_sdk/spec/object.py +53 -44
  169. infrahub_sdk/spec/processors/__init__.py +0 -0
  170. infrahub_sdk/spec/processors/data_processor.py +10 -0
  171. infrahub_sdk/spec/processors/factory.py +34 -0
  172. infrahub_sdk/spec/processors/range_expand_processor.py +56 -0
  173. infrahub_sdk/spec/range_expansion.py +1 -1
  174. infrahub_sdk/transforms.py +1 -1
  175. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/METADATA +7 -4
  176. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/RECORD +182 -143
  177. infrahub_testcontainers/container.py +115 -3
  178. infrahub_testcontainers/docker-compose-cluster.test.yml +6 -1
  179. infrahub_testcontainers/docker-compose.test.yml +6 -1
  180. infrahub/core/migrations/graph/m040_profile_attrs_in_db.py +0 -166
  181. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/LICENSE.txt +0 -0
  182. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/WHEEL +0 -0
  183. {infrahub_server-1.5.0b0.dist-info → infrahub_server-1.5.0b2.dist-info}/entry_points.txt +0 -0
infrahub/cli/db.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import importlib
4
3
  import logging
5
4
  import os
6
5
  from collections import defaultdict
@@ -16,11 +15,15 @@ from infrahub_sdk.async_typer import AsyncTyper
16
15
  from prefect.testing.utilities import prefect_test_harness
17
16
  from rich import print as rprint
18
17
  from rich.console import Console
19
- from rich.logging import RichHandler
20
18
  from rich.table import Table
21
19
 
22
20
  from infrahub import config
21
+ from infrahub.auth import AccountSession, AuthType
22
+ from infrahub.context import InfrahubContext
23
23
  from infrahub.core import registry
24
+ from infrahub.core.branch import Branch
25
+ from infrahub.core.branch.tasks import rebase_branch
26
+ from infrahub.core.constants import GLOBAL_BRANCH_NAME
24
27
  from infrahub.core.graph import GRAPH_VERSION
25
28
  from infrahub.core.graph.constraints import ConstraintManagerBase, ConstraintManagerMemgraph, ConstraintManagerNeo4j
26
29
  from infrahub.core.graph.index import node_indexes, rel_indexes
@@ -32,28 +35,24 @@ from infrahub.core.graph.schema import (
32
35
  GraphRelationshipIsPartOf,
33
36
  GraphRelationshipProperties,
34
37
  )
35
- from infrahub.core.initialization import (
36
- first_time_initialization,
37
- get_root_node,
38
- initialization,
39
- initialize_registry,
40
- )
38
+ from infrahub.core.initialization import get_root_node, initialize_registry
39
+ from infrahub.core.migrations.exceptions import MigrationFailureError
41
40
  from infrahub.core.migrations.graph import get_graph_migrations, get_migration_by_number
42
41
  from infrahub.core.migrations.schema.models import SchemaApplyMigrationData
43
42
  from infrahub.core.migrations.schema.tasks import schema_apply_migrations
44
43
  from infrahub.core.schema import SchemaRoot, core_models, internal_schema
45
44
  from infrahub.core.schema.definitions.deprecated import deprecated_models
46
45
  from infrahub.core.schema.manager import SchemaManager
47
- from infrahub.core.utils import delete_all_nodes
48
46
  from infrahub.core.validators.models.validate_migration import SchemaValidateMigrationData
49
47
  from infrahub.core.validators.tasks import schema_validate_migrations
50
48
  from infrahub.database import DatabaseType
51
49
  from infrahub.database.memgraph import IndexManagerMemgraph
52
50
  from infrahub.database.neo4j import IndexManagerNeo4j
53
- from infrahub.log import get_logger
51
+ from infrahub.exceptions import ValidationError
54
52
 
55
53
  from .constants import ERROR_BADGE, FAILED_BADGE, SUCCESS_BADGE
56
54
  from .db_commands.check_inheritance import check_inheritance
55
+ from .db_commands.clean_duplicate_schema_fields import clean_duplicate_schema_fields
57
56
  from .patch import patch_app
58
57
 
59
58
 
@@ -64,7 +63,7 @@ def get_timestamp_string() -> str:
64
63
 
65
64
  if TYPE_CHECKING:
66
65
  from infrahub.cli.context import CliContext
67
- from infrahub.core.migrations.shared import ArbitraryMigration, GraphMigration, InternalSchemaMigration
66
+ from infrahub.core.migrations.shared import MigrationTypes
68
67
  from infrahub.database import InfrahubDatabase
69
68
  from infrahub.database.index import IndexManagerBase
70
69
 
@@ -93,75 +92,43 @@ def callback() -> None:
93
92
  """
94
93
 
95
94
 
96
- @app.command()
97
- async def init(
95
+ @app.command(name="migrate")
96
+ async def migrate_cmd(
98
97
  ctx: typer.Context,
99
- config_file: str = typer.Option(
100
- "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub"
101
- ),
98
+ check: bool = typer.Option(False, help="Check the state of the database without applying the migrations."),
99
+ config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
100
+ migration_number: int | None = typer.Option(None, help="Apply a specific migration by number"),
102
101
  ) -> None:
103
- """Erase the content of the database and initialize it with the core schema."""
104
-
105
- log = get_logger()
106
-
107
- # --------------------------------------------------
108
- # CLEANUP
109
- # - For now we delete everything in the database
110
- # TODO, if possible try to implement this in an idempotent way
111
- # --------------------------------------------------
112
-
102
+ """Check the current format of the internal graph and apply the necessary migrations"""
103
+ logging.getLogger("infrahub").setLevel(logging.WARNING)
113
104
  logging.getLogger("neo4j").setLevel(logging.ERROR)
114
- config.load_and_exit(config_file_name=config_file)
115
-
116
- context: CliContext = ctx.obj
117
- dbdriver = await context.init_db(retry=1)
118
- async with dbdriver.start_transaction() as db:
119
- log.info("Delete All Nodes")
120
- await delete_all_nodes(db=db)
121
- await first_time_initialization(db=db)
122
-
123
- await dbdriver.close()
124
-
125
-
126
- @app.command()
127
- async def load_test_data(
128
- ctx: typer.Context,
129
- config_file: str = typer.Option(
130
- "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub"
131
- ),
132
- dataset: str = "dataset01",
133
- ) -> None:
134
- """Load test data into the database from the `test_data` directory."""
105
+ logging.getLogger("prefect").setLevel(logging.ERROR)
135
106
 
136
- logging.getLogger("neo4j").setLevel(logging.ERROR)
137
107
  config.load_and_exit(config_file_name=config_file)
138
108
 
139
109
  context: CliContext = ctx.obj
140
110
  dbdriver = await context.init_db(retry=1)
141
111
 
142
- async with dbdriver.start_session() as db:
143
- await initialization(db=db)
144
-
145
- log_level = "DEBUG"
112
+ root_node = await get_root_node(db=dbdriver)
113
+ migrations = await detect_migration_to_run(
114
+ current_graph_version=root_node.graph_version, migration_number=migration_number
115
+ )
146
116
 
147
- FORMAT = "%(message)s"
148
- logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
149
- logging.getLogger("infrahub")
117
+ if check or not migrations:
118
+ return
150
119
 
151
- dataset_module = importlib.import_module(f"infrahub.test_data.{dataset}")
152
- await dataset_module.load_data(db=db)
120
+ await migrate_database(db=dbdriver, migrations=migrations, initialize=True)
153
121
 
154
122
  await dbdriver.close()
155
123
 
156
124
 
157
- @app.command(name="migrate")
158
- async def migrate_cmd(
125
+ @app.command(name="check-inheritance")
126
+ async def check_inheritance_cmd(
159
127
  ctx: typer.Context,
160
- check: bool = typer.Option(False, help="Check the state of the database without applying the migrations."),
128
+ fix: bool = typer.Option(False, help="Fix the inheritance of any invalid nodes."),
161
129
  config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
162
- migration_number: int | None = typer.Option(None, help="Apply a specific migration by number"),
163
130
  ) -> None:
164
- """Check the current format of the internal graph and apply the necessary migrations"""
131
+ """Check the database for any vertices with incorrect inheritance"""
165
132
  logging.getLogger("infrahub").setLevel(logging.WARNING)
166
133
  logging.getLogger("neo4j").setLevel(logging.ERROR)
167
134
  logging.getLogger("prefect").setLevel(logging.ERROR)
@@ -170,19 +137,22 @@ async def migrate_cmd(
170
137
 
171
138
  context: CliContext = ctx.obj
172
139
  dbdriver = await context.init_db(retry=1)
140
+ await initialize_registry(db=dbdriver)
173
141
 
174
- await migrate_database(db=dbdriver, initialize=True, check=check, migration_number=migration_number)
142
+ success = await check_inheritance(db=dbdriver, fix=fix)
143
+ if not success:
144
+ raise typer.Exit(code=1)
175
145
 
176
146
  await dbdriver.close()
177
147
 
178
148
 
179
- @app.command(name="check-inheritance")
180
- async def check_inheritance_cmd(
149
+ @app.command(name="check-duplicate-schema-fields")
150
+ async def check_duplicate_schema_fields_cmd(
181
151
  ctx: typer.Context,
182
- fix: bool = typer.Option(False, help="Fix the inheritance of any invalid nodes."),
152
+ fix: bool = typer.Option(False, help="Fix the duplicate schema fields on the default branch."),
183
153
  config_file: str = typer.Argument("infrahub.toml", envvar="INFRAHUB_CONFIG"),
184
154
  ) -> None:
185
- """Check the database for any vertices with incorrect inheritance"""
155
+ """Check for any duplicate schema attributes or relationships on the default branch"""
186
156
  logging.getLogger("infrahub").setLevel(logging.WARNING)
187
157
  logging.getLogger("neo4j").setLevel(logging.ERROR)
188
158
  logging.getLogger("prefect").setLevel(logging.ERROR)
@@ -191,9 +161,8 @@ async def check_inheritance_cmd(
191
161
 
192
162
  context: CliContext = ctx.obj
193
163
  dbdriver = await context.init_db(retry=1)
194
- await initialize_registry(db=dbdriver)
195
164
 
196
- success = await check_inheritance(db=dbdriver, fix=fix)
165
+ success = await clean_duplicate_schema_fields(db=dbdriver, fix=fix)
197
166
  if not success:
198
167
  raise typer.Exit(code=1)
199
168
 
@@ -311,8 +280,38 @@ async def index(
311
280
  await dbdriver.close()
312
281
 
313
282
 
283
+ async def detect_migration_to_run(
284
+ current_graph_version: int, migration_number: int | str | None = None
285
+ ) -> Sequence[MigrationTypes]:
286
+ """Return a sequence of migrations to apply to upgrade the database."""
287
+ rprint("Checking current state of the database")
288
+ migrations: list[MigrationTypes] = []
289
+
290
+ if migration_number:
291
+ migration = get_migration_by_number(migration_number)
292
+ migrations.append(migration)
293
+ if current_graph_version > migration.minimum_version:
294
+ rprint(
295
+ f"Migration {migration_number} already applied. To apply again, run the command without the --check flag."
296
+ )
297
+ return []
298
+ rprint(
299
+ f"Migration {migration_number} needs to be applied. Run `infrahub db migrate` to apply all outstanding migrations."
300
+ )
301
+ else:
302
+ migrations.extend(await get_graph_migrations(current_graph_version=current_graph_version))
303
+ if not migrations:
304
+ rprint(f"Database up-to-date (v{current_graph_version}), no migration to execute.")
305
+ return []
306
+
307
+ rprint(
308
+ f"Database needs to be updated (v{current_graph_version} -> v{GRAPH_VERSION}), {len(migrations)} migrations pending"
309
+ )
310
+ return migrations
311
+
312
+
314
313
  async def migrate_database(
315
- db: InfrahubDatabase, initialize: bool = False, check: bool = False, migration_number: int | str | None = None
314
+ db: InfrahubDatabase, migrations: Sequence[MigrationTypes], initialize: bool = False
316
315
  ) -> bool:
317
316
  """Apply the latest migrations to the database, this function will print the status directly in the console.
318
317
 
@@ -320,40 +319,16 @@ async def migrate_database(
320
319
 
321
320
  Args:
322
321
  db: The database object.
323
- check: If True, the function will only check the status of the database and not apply the migrations. Defaults to False.
324
- migration_number: If provided, the function will only apply the migration with the given number. Defaults to None.
322
+ migrations: Sequence of migrations to apply.
323
+ initialize: Whether to initialize the registry before running migrations.
325
324
  """
326
- rprint("Checking current state of the Database")
325
+ if not migrations:
326
+ return True
327
327
 
328
328
  if initialize:
329
329
  await initialize_registry(db=db)
330
330
 
331
331
  root_node = await get_root_node(db=db)
332
- if migration_number:
333
- migration = get_migration_by_number(migration_number)
334
- migrations: Sequence[GraphMigration | InternalSchemaMigration | ArbitraryMigration] = [migration]
335
- if check:
336
- if root_node.graph_version > migration.minimum_version:
337
- rprint(
338
- f"Migration {migration_number} already applied. To apply again, run the command without the --check flag."
339
- )
340
- return True
341
- rprint(
342
- f"Migration {migration_number} needs to be applied. Run `infrahub db migrate` to apply all outstanding migrations."
343
- )
344
- return False
345
- else:
346
- migrations = await get_graph_migrations(root=root_node)
347
- if not migrations:
348
- rprint(f"Database up-to-date (v{root_node.graph_version}), no migration to execute.")
349
- return True
350
-
351
- rprint(
352
- f"Database needs to be updated (v{root_node.graph_version} -> v{GRAPH_VERSION}), {len(migrations)} migrations pending"
353
- )
354
-
355
- if check:
356
- return True
357
332
 
358
333
  for migration in migrations:
359
334
  execution_result = await migration.execute(db=db)
@@ -378,6 +353,36 @@ async def migrate_database(
378
353
  return True
379
354
 
380
355
 
356
+ async def trigger_rebase_branches(db: InfrahubDatabase) -> None:
357
+ """Trigger rebase of non-default branches, also triggering migrations in the process."""
358
+ branches = [b for b in await Branch.get_list(db=db) if b.name not in [registry.default_branch, GLOBAL_BRANCH_NAME]]
359
+ if not branches:
360
+ return
361
+
362
+ rprint(f"Planning rebase and migrations for {len(branches)} branches: {', '.join([b.name for b in branches])}")
363
+
364
+ for branch in branches:
365
+ if branch.graph_version == GRAPH_VERSION:
366
+ rprint(
367
+ f"Ignoring branch rebase and migrations for '{branch.name}' (ID: {branch.uuid}), it is already up-to-date"
368
+ )
369
+ continue
370
+
371
+ rprint(f"Rebasing branch '{branch.name}' (ID: {branch.uuid})...", end="")
372
+ try:
373
+ await registry.schema.load_schema(db=db, branch=branch)
374
+ await rebase_branch(
375
+ branch=branch.name,
376
+ context=InfrahubContext.init(
377
+ branch=branch, account=AccountSession(auth_type=AuthType.NONE, authenticated=False, account_id="")
378
+ ),
379
+ send_events=False,
380
+ )
381
+ rprint(SUCCESS_BADGE)
382
+ except (ValidationError, MigrationFailureError):
383
+ rprint(FAILED_BADGE)
384
+
385
+
381
386
  async def initialize_internal_schema() -> None:
382
387
  registry.schema = SchemaManager()
383
388
  schema = SchemaRoot(**internal_schema)
@@ -0,0 +1,212 @@
1
+ from dataclasses import dataclass
2
+ from enum import Enum
3
+ from typing import Any
4
+
5
+ from rich import print as rprint
6
+ from rich.console import Console
7
+ from rich.table import Table
8
+
9
+ from infrahub.cli.constants import FAILED_BADGE, SUCCESS_BADGE
10
+ from infrahub.core.query import Query, QueryType
11
+ from infrahub.database import InfrahubDatabase
12
+
13
+
14
+ class SchemaFieldType(str, Enum):
15
+ ATTRIBUTE = "attribute"
16
+ RELATIONSHIP = "relationship"
17
+
18
+
19
+ @dataclass
20
+ class SchemaFieldDetails:
21
+ schema_kind: str
22
+ schema_uuid: str
23
+ field_type: SchemaFieldType
24
+ field_name: str
25
+
26
+
27
+ class DuplicateSchemaFields(Query):
28
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None: # noqa: ARG002
29
+ query = """
30
+ MATCH (root:Root)
31
+ LIMIT 1
32
+ WITH root.default_branch AS default_branch
33
+ MATCH (field:SchemaAttribute|SchemaRelationship)
34
+ CALL (default_branch, field) {
35
+ MATCH (field)-[is_part_of:IS_PART_OF]->(:Root)
36
+ WHERE is_part_of.branch = default_branch
37
+ ORDER BY is_part_of.from DESC
38
+ RETURN is_part_of
39
+ LIMIT 1
40
+ }
41
+ WITH default_branch, field, CASE
42
+ WHEN is_part_of.status = "active" AND is_part_of.to IS NULL THEN is_part_of.from
43
+ ELSE NULL
44
+ END AS active_from
45
+ WHERE active_from IS NOT NULL
46
+ WITH default_branch, field, active_from, "SchemaAttribute" IN labels(field) AS is_attribute
47
+ CALL (field, default_branch) {
48
+ MATCH (field)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
49
+ WHERE r1.branch = default_branch AND r2.branch = default_branch
50
+ AND r1.status = "active" AND r2.status = "active"
51
+ AND r1.to IS NULL AND r2.to IS NULL
52
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
53
+ LIMIT 1
54
+ RETURN name_value.value AS field_name
55
+ }
56
+ CALL (field, default_branch) {
57
+ MATCH (field)-[r1:IS_RELATED]-(rel:Relationship)-[r2:IS_RELATED]-(peer:SchemaNode|SchemaGeneric)
58
+ WHERE rel.name IN ["schema__node__relationships", "schema__node__attributes"]
59
+ AND r1.branch = default_branch AND r2.branch = default_branch
60
+ AND r1.status = "active" AND r2.status = "active"
61
+ AND r1.to IS NULL AND r2.to IS NULL
62
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
63
+ LIMIT 1
64
+ RETURN peer AS schema_vertex
65
+ }
66
+ WITH default_branch, field, field_name, is_attribute, active_from, schema_vertex
67
+ ORDER BY active_from DESC
68
+ WITH default_branch, field_name, is_attribute, schema_vertex, collect(field) AS fields_reverse_chron
69
+ WHERE size(fields_reverse_chron) > 1
70
+ """
71
+ self.add_to_query(query)
72
+
73
+
74
+ class GetDuplicateSchemaFields(DuplicateSchemaFields):
75
+ """
76
+ Get the kind, field type, and field name for any duplicated attributes or relationships on a given schema
77
+ on the default branch
78
+ """
79
+
80
+ name = "get_duplicate_schema_fields"
81
+ type = QueryType.READ
82
+ insert_return = False
83
+
84
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None:
85
+ await super().query_init(db=db, **kwargs)
86
+ query = """
87
+ CALL (schema_vertex, default_branch) {
88
+ MATCH (schema_vertex)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "namespace"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
89
+ WHERE r1.branch = default_branch AND r2.branch = default_branch
90
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
91
+ LIMIT 1
92
+ RETURN name_value.value AS schema_namespace
93
+ }
94
+ CALL (schema_vertex, default_branch) {
95
+ MATCH (schema_vertex)-[r1:HAS_ATTRIBUTE]->(:Attribute {name: "name"})-[r2:HAS_VALUE]->(name_value:AttributeValue)
96
+ WHERE r1.branch = default_branch AND r2.branch = default_branch
97
+ ORDER BY r1.from DESC, r1.status ASC, r2.from DESC, r2.status ASC
98
+ LIMIT 1
99
+ RETURN name_value.value AS schema_name
100
+ }
101
+ RETURN schema_namespace + schema_name AS schema_kind, schema_vertex.uuid AS schema_uuid, field_name, is_attribute
102
+ ORDER BY schema_kind ASC, is_attribute DESC, field_name ASC
103
+ """
104
+ self.return_labels = ["schema_kind", "schema_uuid", "field_name", "is_attribute"]
105
+ self.add_to_query(query)
106
+
107
+ def get_schema_field_details(self) -> list[SchemaFieldDetails]:
108
+ schema_field_details: list[SchemaFieldDetails] = []
109
+ for result in self.results:
110
+ schema_kind = result.get_as_type(label="schema_kind", return_type=str)
111
+ schema_uuid = result.get_as_type(label="schema_uuid", return_type=str)
112
+ field_name = result.get_as_type(label="field_name", return_type=str)
113
+ is_attribute = result.get_as_type(label="is_attribute", return_type=bool)
114
+ schema_field_details.append(
115
+ SchemaFieldDetails(
116
+ schema_kind=schema_kind,
117
+ schema_uuid=schema_uuid,
118
+ field_name=field_name,
119
+ field_type=SchemaFieldType.ATTRIBUTE if is_attribute else SchemaFieldType.RELATIONSHIP,
120
+ )
121
+ )
122
+ return schema_field_details
123
+
124
+
125
+ class FixDuplicateSchemaFields(DuplicateSchemaFields):
126
+ """
127
+ Fix the duplicate schema fields by hard deleting the earlier duplicate(s)
128
+ """
129
+
130
+ name = "fix_duplicate_schema_fields"
131
+ type = QueryType.WRITE
132
+ insert_return = False
133
+
134
+ async def query_init(self, db: InfrahubDatabase, **kwargs: dict[str, Any]) -> None:
135
+ await super().query_init(db=db, **kwargs)
136
+ query = """
137
+ WITH default_branch, tail(fields_reverse_chron) AS fields_to_delete
138
+ UNWIND fields_to_delete AS field_to_delete
139
+ CALL (field_to_delete, default_branch) {
140
+ MATCH (field_to_delete)-[r:IS_PART_OF {branch: default_branch}]-()
141
+ DELETE r
142
+ WITH field_to_delete
143
+ MATCH (field_to_delete)-[:IS_RELATED {branch: default_branch}]-(rel:Relationship)
144
+ WITH DISTINCT field_to_delete, rel
145
+ MATCH (rel)-[r {branch: default_branch}]-()
146
+ DELETE r
147
+ WITH field_to_delete, rel
148
+ OPTIONAL MATCH (rel)
149
+ WHERE NOT exists((rel)--())
150
+ DELETE rel
151
+ WITH DISTINCT field_to_delete
152
+ MATCH (field_to_delete)-[:HAS_ATTRIBUTE {branch: default_branch}]->(attr:Attribute)
153
+ MATCH (attr)-[r {branch: default_branch}]-()
154
+ DELETE r
155
+ WITH field_to_delete, attr
156
+ OPTIONAL MATCH (attr)
157
+ WHERE NOT exists((attr)--())
158
+ DELETE attr
159
+ WITH DISTINCT field_to_delete
160
+ OPTIONAL MATCH (field_to_delete)
161
+ WHERE NOT exists((field_to_delete)--())
162
+ DELETE field_to_delete
163
+ }
164
+ """
165
+ self.add_to_query(query)
166
+
167
+
168
+ def display_duplicate_schema_fields(duplicate_schema_fields: list[SchemaFieldDetails]) -> None:
169
+ console = Console()
170
+
171
+ table = Table(title="Duplicate Schema Fields on Default Branch")
172
+
173
+ table.add_column("Schema Kind")
174
+ table.add_column("Schema UUID")
175
+ table.add_column("Field Name")
176
+ table.add_column("Field Type")
177
+
178
+ for duplicate_schema_field in duplicate_schema_fields:
179
+ table.add_row(
180
+ duplicate_schema_field.schema_kind,
181
+ duplicate_schema_field.schema_uuid,
182
+ duplicate_schema_field.field_name,
183
+ duplicate_schema_field.field_type.value,
184
+ )
185
+
186
+ console.print(table)
187
+
188
+
189
+ async def clean_duplicate_schema_fields(db: InfrahubDatabase, fix: bool = False) -> bool:
190
+ """
191
+ Identify any attributes or relationships that are duplicated in a schema on the default branch
192
+ If fix is True, runs cypher queries to hard delete the earlier duplicate
193
+ """
194
+
195
+ duplicate_schema_fields_query = await GetDuplicateSchemaFields.init(db=db)
196
+ await duplicate_schema_fields_query.execute(db=db)
197
+ duplicate_schema_fields = duplicate_schema_fields_query.get_schema_field_details()
198
+
199
+ if not duplicate_schema_fields:
200
+ rprint(f"{SUCCESS_BADGE} No duplicate schema fields found")
201
+ return True
202
+
203
+ display_duplicate_schema_fields(duplicate_schema_fields)
204
+
205
+ if not fix:
206
+ rprint(f"{FAILED_BADGE} Use the --fix flag to fix the duplicate schema fields")
207
+ return False
208
+
209
+ fix_duplicate_schema_fields_query = await FixDuplicateSchemaFields.init(db=db)
210
+ await fix_duplicate_schema_fields_query.execute(db=db)
211
+ rprint(f"{SUCCESS_BADGE} Duplicate schema fields deleted from the default branch")
212
+ return True
infrahub/cli/dev.py ADDED
@@ -0,0 +1,118 @@
1
+ from __future__ import annotations
2
+
3
+ import importlib
4
+ import logging
5
+ from pathlib import Path # noqa: TC003
6
+ from typing import TYPE_CHECKING
7
+
8
+ import typer
9
+ from graphql import parse, print_ast, print_schema
10
+ from infrahub_sdk.async_typer import AsyncTyper
11
+ from rich.logging import RichHandler
12
+
13
+ from infrahub import config
14
+ from infrahub.core.initialization import (
15
+ first_time_initialization,
16
+ initialization,
17
+ )
18
+ from infrahub.core.schema import SchemaRoot, core_models, internal_schema
19
+ from infrahub.core.schema.schema_branch import SchemaBranch
20
+ from infrahub.core.utils import delete_all_nodes
21
+ from infrahub.graphql.manager import GraphQLSchemaManager
22
+ from infrahub.graphql.schema_sort import sort_schema_ast
23
+ from infrahub.log import get_logger
24
+
25
+ if TYPE_CHECKING:
26
+ from infrahub.cli.context import CliContext
27
+
28
+ app = AsyncTyper()
29
+
30
+
31
+ @app.command(name="export-graphql-schema")
32
+ async def export_graphql_schema(
33
+ ctx: typer.Context, # noqa: ARG001
34
+ config_file: str = typer.Option("infrahub.toml", envvar="INFRAHUB_CONFIG"),
35
+ out: Path = typer.Option("schema.graphql"), # noqa: B008
36
+ ) -> None:
37
+ """Export the Core GraphQL schema to a file."""
38
+
39
+ config.load_and_exit(config_file_name=config_file)
40
+
41
+ schema = SchemaRoot(**internal_schema)
42
+ full_schema = schema.merge(schema=SchemaRoot(**core_models))
43
+
44
+ schema_branch = SchemaBranch(cache={}, name="default")
45
+ schema_branch.load_schema(schema=full_schema)
46
+
47
+ schema_branch.process()
48
+
49
+ gqlm = GraphQLSchemaManager(schema=schema_branch)
50
+ gql_schema = gqlm.generate()
51
+
52
+ schema_str = print_schema(gql_schema)
53
+ schema_ast = parse(schema_str)
54
+ sorted_schema_ast = sort_schema_ast(schema_ast)
55
+ sorted_schema_str = print_ast(sorted_schema_ast)
56
+
57
+ out.write_text(sorted_schema_str)
58
+
59
+
60
+ @app.command(name="db-init")
61
+ async def database_init(
62
+ ctx: typer.Context,
63
+ config_file: str = typer.Option(
64
+ "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub"
65
+ ),
66
+ ) -> None:
67
+ """Erase the content of the database and initialize it with the core schema."""
68
+
69
+ log = get_logger()
70
+
71
+ # --------------------------------------------------
72
+ # CLEANUP
73
+ # - For now we delete everything in the database
74
+ # TODO, if possible try to implement this in an idempotent way
75
+ # --------------------------------------------------
76
+
77
+ logging.getLogger("neo4j").setLevel(logging.ERROR)
78
+ config.load_and_exit(config_file_name=config_file)
79
+
80
+ context: CliContext = ctx.obj
81
+ dbdriver = await context.init_db(retry=1)
82
+ async with dbdriver.start_transaction() as db:
83
+ log.info("Delete All Nodes")
84
+ await delete_all_nodes(db=db)
85
+ await first_time_initialization(db=db)
86
+
87
+ await dbdriver.close()
88
+
89
+
90
+ @app.command(name="load-test-data")
91
+ async def load_test_data(
92
+ ctx: typer.Context,
93
+ config_file: str = typer.Option(
94
+ "infrahub.toml", envvar="INFRAHUB_CONFIG", help="Location of the configuration file to use for Infrahub"
95
+ ),
96
+ dataset: str = "dataset01",
97
+ ) -> None:
98
+ """Load test data into the database from the `test_data` directory."""
99
+
100
+ logging.getLogger("neo4j").setLevel(logging.ERROR)
101
+ config.load_and_exit(config_file_name=config_file)
102
+
103
+ context: CliContext = ctx.obj
104
+ dbdriver = await context.init_db(retry=1)
105
+
106
+ async with dbdriver.start_session() as db:
107
+ await initialization(db=db)
108
+
109
+ log_level = "DEBUG"
110
+
111
+ FORMAT = "%(message)s"
112
+ logging.basicConfig(level=log_level, format=FORMAT, datefmt="[%X]", handlers=[RichHandler()])
113
+ logging.getLogger("infrahub")
114
+
115
+ dataset_module = importlib.import_module(f"infrahub.test_data.{dataset}")
116
+ await dataset_module.load_data(db=db)
117
+
118
+ await dbdriver.close()