infrahub-server 1.3.7__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (174) hide show
  1. infrahub/api/internal.py +5 -0
  2. infrahub/artifacts/tasks.py +17 -22
  3. infrahub/branch/merge_mutation_checker.py +38 -0
  4. infrahub/cli/__init__.py +2 -2
  5. infrahub/cli/context.py +7 -3
  6. infrahub/cli/db.py +5 -16
  7. infrahub/cli/upgrade.py +10 -29
  8. infrahub/computed_attribute/tasks.py +36 -46
  9. infrahub/config.py +57 -6
  10. infrahub/constants/environment.py +1 -0
  11. infrahub/core/attribute.py +15 -7
  12. infrahub/core/branch/tasks.py +43 -41
  13. infrahub/core/constants/__init__.py +21 -6
  14. infrahub/core/constants/infrahubkind.py +2 -0
  15. infrahub/core/diff/coordinator.py +3 -1
  16. infrahub/core/diff/model/path.py +0 -39
  17. infrahub/core/diff/repository/repository.py +0 -8
  18. infrahub/core/diff/tasks.py +11 -8
  19. infrahub/core/graph/__init__.py +1 -1
  20. infrahub/core/graph/index.py +1 -2
  21. infrahub/core/graph/schema.py +50 -29
  22. infrahub/core/initialization.py +81 -47
  23. infrahub/core/ipam/tasks.py +4 -3
  24. infrahub/core/merge.py +8 -10
  25. infrahub/core/migrations/__init__.py +2 -0
  26. infrahub/core/migrations/graph/__init__.py +4 -0
  27. infrahub/core/migrations/graph/m036_drop_attr_value_index.py +45 -0
  28. infrahub/core/migrations/graph/m037_index_attr_vals.py +577 -0
  29. infrahub/core/migrations/query/attribute_add.py +27 -2
  30. infrahub/core/migrations/schema/attribute_kind_update.py +156 -0
  31. infrahub/core/migrations/schema/tasks.py +6 -5
  32. infrahub/core/models.py +5 -1
  33. infrahub/core/node/proposed_change.py +43 -0
  34. infrahub/core/protocols.py +12 -0
  35. infrahub/core/query/attribute.py +32 -14
  36. infrahub/core/query/diff.py +11 -0
  37. infrahub/core/query/ipam.py +13 -7
  38. infrahub/core/query/node.py +51 -10
  39. infrahub/core/query/resource_manager.py +3 -3
  40. infrahub/core/schema/basenode_schema.py +8 -0
  41. infrahub/core/schema/definitions/core/__init__.py +10 -1
  42. infrahub/core/schema/definitions/core/ipam.py +28 -2
  43. infrahub/core/schema/definitions/core/propose_change.py +15 -0
  44. infrahub/core/schema/definitions/core/webhook.py +3 -0
  45. infrahub/core/schema/definitions/internal.py +1 -1
  46. infrahub/core/schema/generated/attribute_schema.py +1 -1
  47. infrahub/core/schema/generic_schema.py +10 -0
  48. infrahub/core/schema/manager.py +10 -1
  49. infrahub/core/schema/node_schema.py +22 -22
  50. infrahub/core/schema/profile_schema.py +8 -0
  51. infrahub/core/schema/schema_branch.py +11 -7
  52. infrahub/core/schema/template_schema.py +8 -0
  53. infrahub/core/validators/attribute/kind.py +5 -1
  54. infrahub/core/validators/checks_runner.py +5 -5
  55. infrahub/core/validators/determiner.py +22 -2
  56. infrahub/core/validators/tasks.py +6 -7
  57. infrahub/core/validators/uniqueness/checker.py +4 -2
  58. infrahub/core/validators/uniqueness/model.py +1 -0
  59. infrahub/core/validators/uniqueness/query.py +57 -7
  60. infrahub/database/__init__.py +2 -1
  61. infrahub/events/__init__.py +20 -0
  62. infrahub/events/constants.py +7 -0
  63. infrahub/events/generator.py +29 -2
  64. infrahub/events/proposed_change_action.py +203 -0
  65. infrahub/generators/tasks.py +24 -20
  66. infrahub/git/base.py +4 -7
  67. infrahub/git/integrator.py +21 -12
  68. infrahub/git/repository.py +15 -30
  69. infrahub/git/tasks.py +121 -106
  70. infrahub/graphql/app.py +2 -1
  71. infrahub/graphql/field_extractor.py +69 -0
  72. infrahub/graphql/manager.py +15 -11
  73. infrahub/graphql/mutations/account.py +2 -2
  74. infrahub/graphql/mutations/action.py +8 -2
  75. infrahub/graphql/mutations/artifact_definition.py +4 -1
  76. infrahub/graphql/mutations/branch.py +10 -5
  77. infrahub/graphql/mutations/graphql_query.py +2 -1
  78. infrahub/graphql/mutations/main.py +14 -8
  79. infrahub/graphql/mutations/menu.py +2 -1
  80. infrahub/graphql/mutations/proposed_change.py +230 -8
  81. infrahub/graphql/mutations/relationship.py +5 -0
  82. infrahub/graphql/mutations/repository.py +2 -1
  83. infrahub/graphql/mutations/tasks.py +7 -9
  84. infrahub/graphql/mutations/webhook.py +4 -1
  85. infrahub/graphql/parser.py +15 -6
  86. infrahub/graphql/queries/__init__.py +10 -1
  87. infrahub/graphql/queries/account.py +3 -3
  88. infrahub/graphql/queries/branch.py +2 -2
  89. infrahub/graphql/queries/diff/tree.py +56 -5
  90. infrahub/graphql/queries/event.py +13 -3
  91. infrahub/graphql/queries/ipam.py +23 -1
  92. infrahub/graphql/queries/proposed_change.py +84 -0
  93. infrahub/graphql/queries/relationship.py +2 -2
  94. infrahub/graphql/queries/resource_manager.py +3 -3
  95. infrahub/graphql/queries/search.py +3 -2
  96. infrahub/graphql/queries/status.py +3 -2
  97. infrahub/graphql/queries/task.py +2 -2
  98. infrahub/graphql/resolvers/ipam.py +440 -0
  99. infrahub/graphql/resolvers/many_relationship.py +4 -3
  100. infrahub/graphql/resolvers/resolver.py +5 -5
  101. infrahub/graphql/resolvers/single_relationship.py +3 -2
  102. infrahub/graphql/schema.py +25 -5
  103. infrahub/graphql/types/__init__.py +2 -2
  104. infrahub/graphql/types/attribute.py +3 -3
  105. infrahub/graphql/types/event.py +68 -0
  106. infrahub/groups/tasks.py +6 -6
  107. infrahub/lock.py +3 -2
  108. infrahub/menu/generator.py +8 -0
  109. infrahub/message_bus/operations/__init__.py +9 -12
  110. infrahub/message_bus/operations/git/file.py +6 -5
  111. infrahub/message_bus/operations/git/repository.py +12 -20
  112. infrahub/message_bus/operations/refresh/registry.py +15 -9
  113. infrahub/message_bus/operations/send/echo.py +7 -4
  114. infrahub/message_bus/types.py +1 -0
  115. infrahub/permissions/__init__.py +2 -1
  116. infrahub/permissions/constants.py +13 -0
  117. infrahub/permissions/globals.py +31 -2
  118. infrahub/permissions/manager.py +8 -5
  119. infrahub/pools/prefix.py +7 -5
  120. infrahub/prefect_server/app.py +31 -0
  121. infrahub/prefect_server/bootstrap.py +18 -0
  122. infrahub/proposed_change/action_checker.py +206 -0
  123. infrahub/proposed_change/approval_revoker.py +40 -0
  124. infrahub/proposed_change/branch_diff.py +3 -1
  125. infrahub/proposed_change/checker.py +45 -0
  126. infrahub/proposed_change/constants.py +32 -2
  127. infrahub/proposed_change/tasks.py +182 -150
  128. infrahub/py.typed +0 -0
  129. infrahub/server.py +29 -17
  130. infrahub/services/__init__.py +13 -28
  131. infrahub/services/adapters/cache/__init__.py +4 -0
  132. infrahub/services/adapters/cache/nats.py +2 -0
  133. infrahub/services/adapters/cache/redis.py +3 -0
  134. infrahub/services/adapters/message_bus/__init__.py +0 -2
  135. infrahub/services/adapters/message_bus/local.py +1 -2
  136. infrahub/services/adapters/message_bus/nats.py +6 -8
  137. infrahub/services/adapters/message_bus/rabbitmq.py +7 -9
  138. infrahub/services/adapters/workflow/__init__.py +1 -0
  139. infrahub/services/adapters/workflow/local.py +1 -8
  140. infrahub/services/component.py +2 -1
  141. infrahub/task_manager/event.py +56 -0
  142. infrahub/task_manager/models.py +9 -0
  143. infrahub/tasks/artifact.py +6 -7
  144. infrahub/tasks/check.py +4 -7
  145. infrahub/telemetry/tasks.py +15 -18
  146. infrahub/transformations/tasks.py +10 -6
  147. infrahub/trigger/tasks.py +4 -3
  148. infrahub/types.py +4 -0
  149. infrahub/validators/events.py +7 -7
  150. infrahub/validators/tasks.py +6 -7
  151. infrahub/webhook/models.py +45 -45
  152. infrahub/webhook/tasks.py +25 -24
  153. infrahub/workers/dependencies.py +143 -0
  154. infrahub/workers/infrahub_async.py +19 -43
  155. infrahub/workflows/catalogue.py +16 -2
  156. infrahub/workflows/initialization.py +5 -4
  157. infrahub/workflows/models.py +2 -0
  158. infrahub_sdk/client.py +2 -2
  159. infrahub_sdk/ctl/repository.py +51 -0
  160. infrahub_sdk/ctl/schema.py +9 -9
  161. infrahub_sdk/node/node.py +2 -2
  162. infrahub_sdk/pytest_plugin/items/graphql_query.py +1 -1
  163. infrahub_sdk/schema/repository.py +1 -1
  164. infrahub_sdk/testing/docker.py +1 -1
  165. infrahub_sdk/utils.py +2 -2
  166. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/METADATA +7 -5
  167. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/RECORD +174 -158
  168. infrahub_testcontainers/container.py +17 -0
  169. infrahub_testcontainers/docker-compose-cluster.test.yml +56 -1
  170. infrahub_testcontainers/docker-compose.test.yml +56 -1
  171. infrahub_testcontainers/helpers.py +4 -1
  172. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/LICENSE.txt +0 -0
  173. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/WHEEL +0 -0
  174. {infrahub_server-1.3.7.dist-info → infrahub_server-1.4.0.dist-info}/entry_points.txt +0 -0
@@ -113,6 +113,65 @@ class BranchDeletedEvent(ObjectType):
113
113
  payload = Field(GenericScalar, required=True)
114
114
 
115
115
 
116
+ # ---------------------------------------
117
+ # Proposed change events
118
+ # ---------------------------------------
119
+ class ProposedChangeReviewEvent(ObjectType):
120
+ class Meta:
121
+ interfaces = (EventNodeInterface,)
122
+
123
+ reviewer_account_id = String(required=True, description="The ID of the user who reviewed the proposed change")
124
+ reviewer_account_name = String(required=True, description="The name of the user who reviewed the proposed change")
125
+ reviewer_decision = String(required=True, description="The decision made by the reviewer")
126
+ payload = Field(GenericScalar, required=True)
127
+
128
+
129
+ class ProposedChangeReviewRevokedEvent(ObjectType):
130
+ class Meta:
131
+ interfaces = (EventNodeInterface,)
132
+
133
+ reviewer_account_id = String(required=True, description="The ID of the user who reviewed the proposed change")
134
+ reviewer_account_name = String(required=True, description="The name of the user who reviewed the proposed change")
135
+ reviewer_former_decision = String(required=True, description="The decision made by the reviewer")
136
+ payload = Field(GenericScalar, required=True)
137
+
138
+
139
+ class ProposedChangeApprovalsRevokedEvent(ObjectType):
140
+ class Meta:
141
+ interfaces = (EventNodeInterface,)
142
+
143
+ payload = Field(GenericScalar, required=True)
144
+
145
+
146
+ class ProposedChangeReviewRequestedEvent(ObjectType):
147
+ class Meta:
148
+ interfaces = (EventNodeInterface,)
149
+
150
+ requested_by_account_id = String(
151
+ required=True, description="The ID of the user who requested the proposed change to be reviewed"
152
+ )
153
+ requested_by_account_name = String(
154
+ required=True, description="The name of the user who requested the proposed change to be reviewed"
155
+ )
156
+ payload = Field(GenericScalar, required=True)
157
+
158
+
159
+ class ProposedChangeMergedEvent(ObjectType):
160
+ class Meta:
161
+ interfaces = (EventNodeInterface,)
162
+
163
+ merged_by_account_id = String(required=True, description="The ID of the user who merged the proposed change")
164
+ merged_by_account_name = String(required=True, description="The name of the user who merged the proposed change")
165
+ payload = Field(GenericScalar, required=True)
166
+
167
+
168
+ class ProposedChangeThreadEvent(ObjectType):
169
+ class Meta:
170
+ interfaces = (EventNodeInterface,)
171
+
172
+ payload = Field(GenericScalar, required=True)
173
+
174
+
116
175
  # ---------------------------------------
117
176
  # Node/Object events
118
177
  # ---------------------------------------
@@ -163,5 +222,14 @@ EVENT_TYPES: dict[str, type[ObjectType]] = {
163
222
  events.BranchDeletedEvent.event_name: BranchDeletedEvent,
164
223
  events.GroupMemberAddedEvent.event_name: GroupEvent,
165
224
  events.GroupMemberRemovedEvent.event_name: GroupEvent,
225
+ events.ProposedChangeApprovedEvent.event_name: ProposedChangeReviewEvent,
226
+ events.ProposedChangeApprovalRevokedEvent.event_name: ProposedChangeReviewRevokedEvent,
227
+ events.ProposedChangeRejectedEvent.event_name: ProposedChangeReviewEvent,
228
+ events.ProposedChangeRejectionRevokedEvent.event_name: ProposedChangeReviewRevokedEvent,
229
+ events.ProposedChangeReviewRequestedEvent.event_name: ProposedChangeReviewRequestedEvent,
230
+ events.ProposedChangeApprovalsRevokedEvent.event_name: ProposedChangeApprovalsRevokedEvent,
231
+ events.ProposedChangeMergedEvent.event_name: ProposedChangeMergedEvent,
232
+ events.ProposedChangeThreadCreatedEvent.event_name: ProposedChangeThreadEvent,
233
+ events.ProposedChangeThreadUpdatedEvent.event_name: ProposedChangeThreadEvent,
166
234
  "undefined": StandardEvent,
167
235
  }
infrahub/groups/tasks.py CHANGED
@@ -4,14 +4,16 @@ from prefect import flow
4
4
 
5
5
  from infrahub.core.constants import InfrahubKind
6
6
  from infrahub.groups.models import RequestGraphQLQueryGroupUpdate
7
- from infrahub.services import InfrahubServices
7
+ from infrahub.workers.dependencies import get_client
8
8
  from infrahub.workflows.utils import add_tags
9
9
 
10
10
 
11
11
  @flow(name="graphql-query-group-update", flow_run_name="Update GraphQLQuery Group '{model.query_name}'")
12
- async def update_graphql_query_group(model: RequestGraphQLQueryGroupUpdate, service: InfrahubServices) -> None:
12
+ async def update_graphql_query_group(model: RequestGraphQLQueryGroupUpdate) -> None:
13
13
  """Create or Update a GraphQLQueryGroup."""
14
14
 
15
+ client = get_client()
16
+
15
17
  # If there is only one subscriber, associate the task to it
16
18
  # If there are more than one, for now we can't associate all of them
17
19
  related_nodes = []
@@ -23,7 +25,7 @@ async def update_graphql_query_group(model: RequestGraphQLQueryGroupUpdate, serv
23
25
  params_hash = dict_hash(model.params)
24
26
  group_name = f"{model.query_name}__{params_hash}"
25
27
  group_label = f"Query {model.query_name} Hash({params_hash[:8]})"
26
- group = await service.client.create(
28
+ group = await client.create(
27
29
  kind=InfrahubKind.GRAPHQLQUERYGROUP,
28
30
  branch=model.branch,
29
31
  name=group_name,
@@ -36,6 +38,4 @@ async def update_graphql_query_group(model: RequestGraphQLQueryGroupUpdate, serv
36
38
  await group.save(allow_upsert=True)
37
39
 
38
40
  if model.subscribers:
39
- await group_add_subscriber(
40
- client=service.client, group=group, subscribers=model.subscribers, branch=model.branch
41
- )
41
+ await group_add_subscriber(client=client, group=group, subscribers=model.subscribers, branch=model.branch)
infrahub/lock.py CHANGED
@@ -12,6 +12,7 @@ from prometheus_client import Histogram
12
12
  from redis.asyncio.lock import Lock as GlobalLock
13
13
 
14
14
  from infrahub import config
15
+ from infrahub.core.timestamp import current_timestamp
15
16
 
16
17
  if TYPE_CHECKING:
17
18
  from types import TracebackType
@@ -91,7 +92,7 @@ class NATSLock:
91
92
  await self.release()
92
93
 
93
94
  async def acquire(self) -> None:
94
- token = uuid.uuid1().hex
95
+ token = current_timestamp()
95
96
  while True:
96
97
  if await self.do_acquire(token):
97
98
  self.token = token
@@ -155,7 +156,7 @@ class InfrahubLock:
155
156
  async def acquire(self) -> None:
156
157
  with LOCK_ACQUIRE_TIME_METRICS.labels(self.name, self.lock_type).time():
157
158
  if not self.use_local:
158
- await self.remote.acquire()
159
+ await self.remote.acquire(token=current_timestamp())
159
160
  else:
160
161
  await self.local.acquire()
161
162
  self.acquire_time = time.time_ns()
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from typing import TYPE_CHECKING
4
4
 
5
5
  from infrahub.core import registry
6
+ from infrahub.core.constants import InfrahubKind
6
7
  from infrahub.core.protocols import CoreMenuItem
7
8
  from infrahub.log import get_logger
8
9
 
@@ -133,4 +134,11 @@ async def generate_menu(db: InfrahubDatabase, branch: Branch, menu_items: list[C
133
134
  default_menu.children[str(menu_item.identifier)] = menu_item
134
135
  items_to_add[item_name] = True
135
136
 
137
+ builtin_ipaddress = registry.schema.get_generic_schema(name=InfrahubKind.IPADDRESS, branch=branch, duplicate=False)
138
+ builtin_ipprefix = registry.schema.get_generic_schema(name=InfrahubKind.IPPREFIX, branch=branch, duplicate=False)
139
+ ipam_missing = len(builtin_ipaddress.used_by + builtin_ipprefix.used_by) == 0
140
+
141
+ if ipam_missing:
142
+ structure.data.pop("BuiltinIPAM")
143
+
136
144
  return structure
@@ -1,14 +1,11 @@
1
1
  import ujson
2
2
  from prefect import Flow
3
3
 
4
+ from infrahub.log import get_logger
4
5
  from infrahub.message_bus import RPCErrorResponse, messages
5
- from infrahub.message_bus.operations import (
6
- git,
7
- refresh,
8
- send,
9
- )
6
+ from infrahub.message_bus.operations import git, refresh, send
10
7
  from infrahub.message_bus.types import MessageTTL
11
- from infrahub.services import InfrahubServices
8
+ from infrahub.services.adapters.message_bus import InfrahubMessageBus
12
9
  from infrahub.tasks.check import set_check_status
13
10
 
14
11
  COMMAND_MAP = {
@@ -22,7 +19,7 @@ COMMAND_MAP = {
22
19
 
23
20
 
24
21
  async def execute_message(
25
- routing_key: str, message_body: bytes, service: InfrahubServices, skip_flow: bool = False
22
+ routing_key: str, message_body: bytes, message_bus: InfrahubMessageBus, skip_flow: bool = False
26
23
  ) -> MessageTTL | None:
27
24
  message_data = ujson.loads(message_body)
28
25
  message = messages.MESSAGE_MAP[routing_key](**message_data)
@@ -31,16 +28,16 @@ async def execute_message(
31
28
  func = COMMAND_MAP[routing_key]
32
29
  if skip_flow and isinstance(func, Flow):
33
30
  func = func.fn
34
- await func(message=message, service=service)
31
+ await func(message=message)
35
32
  except Exception as exc:
36
33
  if message.reply_requested:
37
34
  response = RPCErrorResponse(errors=[str(exc)], initial_message=message.model_dump())
38
- await service.message_bus.reply_if_initiator_meta(message=response, initiator=message)
35
+ await message_bus.reply_if_initiator_meta(message=response, initiator=message)
39
36
  return None
40
37
  if message.reached_max_retries:
41
- service.log.exception("Message failed after maximum number of retries", error=exc)
42
- await set_check_status(message, conclusion="failure", service=service)
38
+ get_logger().exception("Message failed after maximum number of retries", error=exc)
39
+ await set_check_status(message, conclusion="failure")
43
40
  return None
44
41
  message.increase_retry_count()
45
- await service.message_bus.send(message, delay=MessageTTL.FIVE, is_retry=True)
42
+ await message_bus.send(message, delay=MessageTTL.FIVE, is_retry=True)
46
43
  return MessageTTL.FIVE
@@ -6,29 +6,30 @@ from infrahub.message_bus.messages.git_file_get import (
6
6
  GitFileGetResponse,
7
7
  GitFileGetResponseData,
8
8
  )
9
- from infrahub.services import InfrahubServices
9
+ from infrahub.workers.dependencies import get_client, get_message_bus
10
10
 
11
11
  log = get_logger()
12
12
 
13
13
 
14
- async def get(message: messages.GitFileGet, service: InfrahubServices) -> None:
14
+ async def get(message: messages.GitFileGet) -> None:
15
15
  log.info("Collecting file from repository", repository=message.repository_name, file=message.file)
16
16
 
17
17
  repo = await get_initialized_repo(
18
+ client=get_client(),
18
19
  repository_id=message.repository_id,
19
20
  name=message.repository_name,
20
- service=service,
21
21
  repository_kind=message.repository_kind,
22
22
  commit=message.commit,
23
23
  )
24
24
 
25
+ message_bus = await get_message_bus()
25
26
  try:
26
27
  content = await repo.get_file(commit=message.commit, location=message.file)
27
28
  except (FileOutOfRepositoryError, RepositoryFileNotFoundError) as e:
28
29
  if message.reply_requested:
29
30
  response = GitFileGetResponse(data=GitFileGetResponseData(error_message=e.message, http_code=e.HTTP_CODE))
30
- await service.message_bus.reply_if_initiator_meta(message=response, initiator=message)
31
+ await message_bus.reply_if_initiator_meta(message=response, initiator=message)
31
32
  else:
32
33
  if message.reply_requested:
33
34
  response = GitFileGetResponse(data=GitFileGetResponseData(content=content))
34
- await service.message_bus.reply_if_initiator_meta(message=response, initiator=message)
35
+ await message_bus.reply_if_initiator_meta(message=response, initiator=message)
@@ -1,21 +1,21 @@
1
1
  from prefect import flow
2
2
 
3
3
  from infrahub.exceptions import RepositoryError
4
- from infrahub.git.repository import InfrahubRepository, get_initialized_repo, initialize_repo
4
+ from infrahub.git.repository import InfrahubRepository, get_initialized_repo
5
5
  from infrahub.log import get_logger
6
6
  from infrahub.message_bus import messages
7
7
  from infrahub.message_bus.messages.git_repository_connectivity import (
8
8
  GitRepositoryConnectivityResponse,
9
9
  GitRepositoryConnectivityResponseData,
10
10
  )
11
- from infrahub.services import InfrahubServices
12
11
  from infrahub.worker import WORKER_IDENTITY
12
+ from infrahub.workers.dependencies import get_client, get_message_bus
13
13
 
14
14
  log = get_logger()
15
15
 
16
16
 
17
17
  @flow(name="git-repository-check-connectivity", flow_run_name="Check connectivity for {message.repository_name}")
18
- async def connectivity(message: messages.GitRepositoryConnectivity, service: InfrahubServices) -> None:
18
+ async def connectivity(message: messages.GitRepositoryConnectivity) -> None:
19
19
  response_data = GitRepositoryConnectivityResponseData(message="Successfully accessed repository", success=True)
20
20
 
21
21
  try:
@@ -28,30 +28,22 @@ async def connectivity(message: messages.GitRepositoryConnectivity, service: Inf
28
28
  response = GitRepositoryConnectivityResponse(
29
29
  data=response_data,
30
30
  )
31
- await service.message_bus.reply_if_initiator_meta(message=response, initiator=message)
31
+ message_bus = await get_message_bus()
32
+ await message_bus.reply_if_initiator_meta(message=response, initiator=message)
32
33
 
33
34
 
34
35
  @flow(name="refresh-git-fetch", flow_run_name="Fetch git repository {message.repository_name} on " + WORKER_IDENTITY)
35
- async def fetch(message: messages.RefreshGitFetch, service: InfrahubServices) -> None:
36
+ async def fetch(message: messages.RefreshGitFetch) -> None:
36
37
  if message.meta and message.meta.initiator_id == WORKER_IDENTITY:
37
38
  log.info("Ignoring git fetch request originating from self", worker=WORKER_IDENTITY)
38
39
  return
39
40
 
40
- try:
41
- repo = await get_initialized_repo(
42
- repository_id=message.repository_id,
43
- name=message.repository_name,
44
- service=service,
45
- repository_kind=message.repository_kind,
46
- )
47
- except RepositoryError:
48
- repo = await initialize_repo(
49
- location=message.location,
50
- repository_id=message.repository_id,
51
- name=message.repository_name,
52
- service=service,
53
- repository_kind=message.repository_kind,
54
- )
41
+ repo = await get_initialized_repo(
42
+ client=get_client(),
43
+ repository_id=message.repository_id,
44
+ name=message.repository_name,
45
+ repository_kind=message.repository_kind,
46
+ )
55
47
 
56
48
  await repo.fetch()
57
49
  await repo.pull(
@@ -1,28 +1,34 @@
1
+ from infrahub.log import get_logger
1
2
  from infrahub.message_bus import messages
2
- from infrahub.services import InfrahubServices
3
3
  from infrahub.tasks.registry import refresh_branches
4
4
  from infrahub.worker import WORKER_IDENTITY
5
+ from infrahub.workers.dependencies import get_component, get_database
5
6
 
6
7
 
7
- async def branches(message: messages.RefreshRegistryBranches, service: InfrahubServices) -> None:
8
+ async def branches(message: messages.RefreshRegistryBranches) -> None:
8
9
  if message.meta and message.meta.initiator_id == WORKER_IDENTITY:
9
- service.log.info("Ignoring refresh registry refresh request originating from self", worker=WORKER_IDENTITY)
10
+ get_logger().info("Ignoring refresh registry refresh request originating from self", worker=WORKER_IDENTITY)
10
11
  return
11
12
 
12
- async with service.database.start_session(read_only=True) as db:
13
+ database = await get_database()
14
+ async with database.start_session(read_only=False) as db:
13
15
  await refresh_branches(db=db)
14
16
 
15
- await service.component.refresh_schema_hash()
17
+ component = await get_component()
18
+ await component.refresh_schema_hash()
16
19
 
17
20
 
18
- async def rebased_branch(message: messages.RefreshRegistryRebasedBranch, service: InfrahubServices) -> None:
21
+ async def rebased_branch(message: messages.RefreshRegistryRebasedBranch) -> None:
19
22
  if message.meta and message.meta.initiator_id == WORKER_IDENTITY:
20
- service.log.info(
23
+ get_logger().info(
21
24
  "Ignoring refresh registry refreshed branch for request originating from self", worker=WORKER_IDENTITY
22
25
  )
23
26
  return
24
27
 
25
- async with service.database.start_session(read_only=True) as db:
28
+ database = await get_database()
29
+
30
+ async with database.start_session(read_only=True) as db:
26
31
  await refresh_branches(db=db)
27
32
 
28
- await service.component.refresh_schema_hash()
33
+ component = await get_component()
34
+ await component.refresh_schema_hash()
@@ -1,13 +1,16 @@
1
1
  from prefect import flow
2
2
 
3
+ from infrahub.log import get_logger
3
4
  from infrahub.message_bus import messages
4
5
  from infrahub.message_bus.messages.send_echo_request import SendEchoRequestResponse, SendEchoRequestResponseData
5
- from infrahub.services import InfrahubServices
6
+ from infrahub.workers.dependencies import get_message_bus
6
7
 
7
8
 
8
9
  @flow(name="echo-request")
9
- async def request(message: messages.SendEchoRequest, service: InfrahubServices) -> None:
10
- service.log.info(f"Received message: {message.message}")
10
+ async def request(message: messages.SendEchoRequest) -> None:
11
+ get_logger().info(f"Received message: {message.message}")
12
+
11
13
  if message.reply_requested:
12
14
  response = SendEchoRequestResponse(data=SendEchoRequestResponseData(response=f"Reply to: {message.message}"))
13
- await service.message_bus.reply_if_initiator_meta(message=response, initiator=message)
15
+ message_bus = await get_message_bus()
16
+ await message_bus.reply_if_initiator_meta(message=response, initiator=message)
@@ -31,6 +31,7 @@ class KVTTL(int, Enum):
31
31
  ONE = 1
32
32
  TEN = 10
33
33
  FIFTEEN = 15
34
+ ONE_MINUTE = 60
34
35
  TWO_HOURS = 7200
35
36
 
36
37
  @classmethod
@@ -1,5 +1,5 @@
1
1
  from infrahub.permissions.backend import PermissionBackend
2
- from infrahub.permissions.globals import define_global_permission_from_branch
2
+ from infrahub.permissions.globals import define_global_permission_from_branch, get_or_create_global_permission
3
3
  from infrahub.permissions.local_backend import LocalPermissionBackend
4
4
  from infrahub.permissions.manager import PermissionManager
5
5
  from infrahub.permissions.report import report_schema_permissions
@@ -12,5 +12,6 @@ __all__ = [
12
12
  "PermissionManager",
13
13
  "define_global_permission_from_branch",
14
14
  "get_global_permission_for_kind",
15
+ "get_or_create_global_permission",
15
16
  "report_schema_permissions",
16
17
  ]
@@ -25,8 +25,21 @@ GLOBAL_PERMISSION_DENIAL_MESSAGE = {
25
25
  GlobalPermissions.EDIT_DEFAULT_BRANCH.value: "You are not allowed to change data in the default branch",
26
26
  GlobalPermissions.MERGE_BRANCH.value: "You are not allowed to merge a branch",
27
27
  GlobalPermissions.MERGE_PROPOSED_CHANGE.value: "You are not allowed to merge proposed changes",
28
+ GlobalPermissions.REVIEW_PROPOSED_CHANGE.value: "You are not allowed to review proposed changes",
28
29
  GlobalPermissions.MANAGE_SCHEMA.value: "You are not allowed to manage the schema",
29
30
  GlobalPermissions.MANAGE_ACCOUNTS.value: "You are not allowed to manage user accounts, groups or roles",
30
31
  GlobalPermissions.MANAGE_PERMISSIONS.value: "You are not allowed to manage permissions",
31
32
  GlobalPermissions.MANAGE_REPOSITORIES.value: "You are not allowed to manage repositories",
32
33
  }
34
+
35
+ GLOBAL_PERMISSION_DESCRIPTION = {
36
+ GlobalPermissions.EDIT_DEFAULT_BRANCH: "Allow a user to change data in the default branch",
37
+ GlobalPermissions.MERGE_BRANCH: "Allow a user to merge branches",
38
+ GlobalPermissions.MERGE_PROPOSED_CHANGE: "Allow a user to merge proposed changes",
39
+ GlobalPermissions.REVIEW_PROPOSED_CHANGE: "Allow a user to approve or reject proposed changes",
40
+ GlobalPermissions.MANAGE_SCHEMA: "Allow a user to manage the schema",
41
+ GlobalPermissions.MANAGE_ACCOUNTS: "Allow a user to manage accounts, account roles and account groups",
42
+ GlobalPermissions.MANAGE_PERMISSIONS: "Allow a user to manage permissions",
43
+ GlobalPermissions.MANAGE_REPOSITORIES: "Allow a user to manage repositories",
44
+ GlobalPermissions.SUPER_ADMIN: "Allow a user to do anything",
45
+ }
@@ -1,7 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
1
5
  from infrahub.core.account import GlobalPermission
2
6
  from infrahub.core.constants import GLOBAL_BRANCH_NAME, GlobalPermissions, PermissionDecision
7
+ from infrahub.core.manager import NodeManager
8
+ from infrahub.core.node import Node
9
+ from infrahub.core.protocols import CoreGlobalPermission
3
10
  from infrahub.core.registry import registry
4
11
 
12
+ from .constants import GLOBAL_PERMISSION_DESCRIPTION
13
+
14
+ if TYPE_CHECKING:
15
+ from infrahub.database import InfrahubDatabase
16
+
5
17
 
6
18
  def define_global_permission_from_branch(permission: GlobalPermissions, branch_name: str) -> GlobalPermission:
7
19
  if branch_name in (GLOBAL_BRANCH_NAME, registry.default_branch):
@@ -9,7 +21,24 @@ def define_global_permission_from_branch(permission: GlobalPermissions, branch_n
9
21
  else:
10
22
  decision = PermissionDecision.ALLOW_OTHER
11
23
 
12
- return GlobalPermission(
24
+ return GlobalPermission(action=permission.value, decision=decision.value)
25
+
26
+
27
+ async def get_or_create_global_permission(db: InfrahubDatabase, permission: GlobalPermissions) -> CoreGlobalPermission:
28
+ permissions = await NodeManager.query(
29
+ db=db, schema=CoreGlobalPermission, filters={"action__value": permission.value}, limit=1
30
+ )
31
+
32
+ if permissions:
33
+ return permissions[0]
34
+
35
+ p = await Node.init(db=db, schema=CoreGlobalPermission)
36
+ await p.new(
37
+ db=db,
13
38
  action=permission.value,
14
- decision=decision.value,
39
+ decision=PermissionDecision.ALLOW_ALL.value,
40
+ description=GLOBAL_PERMISSION_DESCRIPTION[permission],
15
41
  )
42
+ await p.save(db=db)
43
+
44
+ return p
@@ -48,6 +48,13 @@ class PermissionManager:
48
48
  specificity += 1
49
49
  return specificity
50
50
 
51
+ def is_super_admin(self) -> bool:
52
+ return self.resolve_global_permission(
53
+ permission_to_check=GlobalPermission(
54
+ action=GlobalPermissions.SUPER_ADMIN, decision=PermissionDecision.ALLOW_ALL
55
+ ),
56
+ )
57
+
51
58
  def report_object_permission(self, namespace: str, name: str, action: str) -> PermissionDecisionFlag:
52
59
  """Given a set of permissions, return the permission decision for a given kind and action."""
53
60
  highest_specificity: int = -1
@@ -94,11 +101,7 @@ class PermissionManager:
94
101
 
95
102
  def has_permission(self, permission: GlobalPermission | ObjectPermission) -> bool:
96
103
  """Tell if a permission is granted given the permissions loaded in memory."""
97
- is_super_admin = self.resolve_global_permission(
98
- permission_to_check=GlobalPermission(
99
- action=GlobalPermissions.SUPER_ADMIN, decision=PermissionDecision.ALLOW_ALL
100
- ),
101
- )
104
+ is_super_admin = self.is_super_admin()
102
105
 
103
106
  if isinstance(permission, GlobalPermission):
104
107
  return self.resolve_global_permission(permission_to_check=permission) or is_super_admin
infrahub/pools/prefix.py CHANGED
@@ -9,7 +9,9 @@ if TYPE_CHECKING:
9
9
  from infrahub.core.ipam.constants import IPNetworkType
10
10
 
11
11
 
12
- def get_next_available_prefix(pool: IPSet, prefix_length: int, prefix_ver: Literal[4, 6] = 4) -> IPNetworkType:
12
+ def get_next_available_prefix(
13
+ pool: IPSet, prefix_length: int | None = None, prefix_ver: Literal[4, 6] = 4
14
+ ) -> IPNetworkType:
13
15
  """Get the next available prefix of a given prefix length from an IPSet.
14
16
 
15
17
  Args:
@@ -20,10 +22,7 @@ def get_next_available_prefix(pool: IPSet, prefix_length: int, prefix_ver: Liter
20
22
  Raises:
21
23
  ValueError: If there are no available subnets in the pool
22
24
  """
23
- prefix_ver_map = {
24
- 4: ipaddress.IPv4Network,
25
- 6: ipaddress.IPv6Network,
26
- }
25
+ prefix_ver_map = {4: ipaddress.IPv4Network, 6: ipaddress.IPv6Network}
27
26
 
28
27
  filtered_pool = IPSet([])
29
28
  for subnet in pool.iter_cidrs():
@@ -31,6 +30,9 @@ def get_next_available_prefix(pool: IPSet, prefix_length: int, prefix_ver: Liter
31
30
  filtered_pool.add(subnet)
32
31
 
33
32
  for cidr in filtered_pool.iter_cidrs():
33
+ if prefix_length is None:
34
+ return cidr
35
+
34
36
  if cidr.prefixlen <= prefix_length:
35
37
  next_available = ipaddress.ip_network(f"{cidr.network}/{prefix_length}")
36
38
  return next_available
@@ -1,16 +1,47 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import asyncio
4
+ import os
5
+
3
6
  from fastapi import APIRouter, FastAPI
4
7
  from prefect.server.api.server import create_app
5
8
 
6
9
  from . import events
10
+ from .bootstrap import init_prefect
11
+
12
+ GLOBAL_TASKMGR_INIT_LOCK = "global.taskmgr.init"
7
13
 
8
14
  router = APIRouter(prefix="/infrahub")
9
15
 
10
16
  router.include_router(events.router)
11
17
 
12
18
 
19
+ async def _init_prefect() -> None:
20
+ # Import there in case we are running Prefect within a testsuite using the original Prefect container
21
+ from infrahub import lock
22
+ from infrahub.lock import initialize_lock
23
+ from infrahub.services import InfrahubServices
24
+ from infrahub.workers.dependencies import get_cache
25
+
26
+ cache = await get_cache()
27
+ service = await InfrahubServices.new(cache=cache)
28
+ initialize_lock(service=service)
29
+
30
+ async with lock.registry.get(name=GLOBAL_TASKMGR_INIT_LOCK):
31
+ await init_prefect()
32
+
33
+
13
34
  def create_infrahub_prefect() -> FastAPI:
35
+ if (
36
+ os.getenv("PREFECT_API_BLOCKS_REGISTER_ON_START") == "false"
37
+ and os.getenv("PREFECT_API_DATABASE_MIGRATE_ON_START") == "false"
38
+ ):
39
+ # We are probably running distributed mode
40
+ from infrahub import config
41
+
42
+ config.SETTINGS.initialize_and_exit()
43
+ asyncio.run(_init_prefect())
44
+
14
45
  app = create_app()
15
46
  api_app: FastAPI = app.__dict__["api_app"]
16
47
  api_app.include_router(router=router)
@@ -0,0 +1,18 @@
1
+ import asyncio
2
+
3
+ from prefect.server.database import provide_database_interface
4
+ from prefect.server.models.block_registration import run_block_auto_registration
5
+
6
+
7
+ async def init_prefect() -> None:
8
+ db = provide_database_interface()
9
+
10
+ await db.create_db()
11
+ session = await db.session()
12
+
13
+ async with session:
14
+ await run_block_auto_registration(session=session)
15
+
16
+
17
+ if __name__ == "__main__":
18
+ asyncio.run(init_prefect())