infrahub-server 1.5.5__py3-none-any.whl → 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. infrahub/api/artifact.py +5 -3
  2. infrahub/auth.py +5 -6
  3. infrahub/cli/db.py +3 -3
  4. infrahub/cli/db_commands/clean_duplicate_schema_fields.py +2 -2
  5. infrahub/cli/dev.py +30 -0
  6. infrahub/config.py +62 -14
  7. infrahub/constants/database.py +5 -5
  8. infrahub/core/branch/models.py +24 -6
  9. infrahub/core/constants/__init__.py +1 -0
  10. infrahub/core/diff/model/diff.py +2 -2
  11. infrahub/core/graph/constraints.py +2 -2
  12. infrahub/core/manager.py +191 -60
  13. infrahub/core/merge.py +29 -2
  14. infrahub/core/migrations/graph/m041_deleted_dup_edges.py +2 -3
  15. infrahub/core/migrations/shared.py +2 -2
  16. infrahub/core/models.py +5 -6
  17. infrahub/core/node/__init__.py +12 -6
  18. infrahub/core/node/create.py +36 -8
  19. infrahub/core/node/ipam.py +4 -4
  20. infrahub/core/node/node_property_attribute.py +2 -2
  21. infrahub/core/node/standard.py +1 -1
  22. infrahub/core/query/attribute.py +1 -1
  23. infrahub/core/query/branch.py +11 -0
  24. infrahub/core/query/node.py +9 -5
  25. infrahub/core/query/standard_node.py +3 -0
  26. infrahub/core/relationship/model.py +15 -10
  27. infrahub/core/schema/__init__.py +3 -3
  28. infrahub/core/schema/generic_schema.py +1 -1
  29. infrahub/core/schema/schema_branch.py +35 -16
  30. infrahub/core/task/user_task.py +2 -2
  31. infrahub/core/validators/determiner.py +3 -6
  32. infrahub/core/validators/enum.py +2 -2
  33. infrahub/database/__init__.py +1 -1
  34. infrahub/dependencies/interface.py +2 -2
  35. infrahub/events/constants.py +2 -2
  36. infrahub/git/base.py +42 -1
  37. infrahub/git/models.py +2 -1
  38. infrahub/git/repository.py +5 -1
  39. infrahub/git/tasks.py +28 -1
  40. infrahub/git/utils.py +9 -0
  41. infrahub/graphql/analyzer.py +4 -4
  42. infrahub/graphql/loaders/peers.py +6 -0
  43. infrahub/graphql/mutations/computed_attribute.py +1 -1
  44. infrahub/graphql/mutations/convert_object_type.py +1 -1
  45. infrahub/graphql/mutations/display_label.py +1 -1
  46. infrahub/graphql/mutations/hfid.py +1 -1
  47. infrahub/graphql/mutations/ipam.py +1 -1
  48. infrahub/graphql/mutations/profile.py +9 -1
  49. infrahub/graphql/mutations/relationship.py +2 -2
  50. infrahub/graphql/mutations/resource_manager.py +1 -1
  51. infrahub/graphql/queries/__init__.py +2 -1
  52. infrahub/graphql/queries/branch.py +58 -3
  53. infrahub/graphql/queries/ipam.py +9 -4
  54. infrahub/graphql/queries/resource_manager.py +7 -11
  55. infrahub/graphql/queries/search.py +5 -6
  56. infrahub/graphql/resolvers/ipam.py +20 -0
  57. infrahub/graphql/resolvers/many_relationship.py +12 -11
  58. infrahub/graphql/resolvers/resolver.py +6 -2
  59. infrahub/graphql/resolvers/single_relationship.py +1 -11
  60. infrahub/graphql/schema.py +2 -0
  61. infrahub/graphql/types/__init__.py +3 -1
  62. infrahub/graphql/types/branch.py +98 -2
  63. infrahub/lock.py +6 -6
  64. infrahub/log.py +1 -1
  65. infrahub/message_bus/messages/__init__.py +0 -12
  66. infrahub/patch/constants.py +2 -2
  67. infrahub/profiles/node_applier.py +9 -0
  68. infrahub/proposed_change/tasks.py +1 -1
  69. infrahub/task_manager/task.py +4 -4
  70. infrahub/telemetry/constants.py +2 -2
  71. infrahub/trigger/models.py +2 -2
  72. infrahub/trigger/setup.py +6 -9
  73. infrahub/utils.py +19 -1
  74. infrahub/validators/tasks.py +1 -1
  75. infrahub/workers/infrahub_async.py +39 -1
  76. infrahub_sdk/async_typer.py +2 -1
  77. infrahub_sdk/batch.py +2 -2
  78. infrahub_sdk/client.py +121 -10
  79. infrahub_sdk/config.py +2 -2
  80. infrahub_sdk/ctl/branch.py +176 -2
  81. infrahub_sdk/ctl/check.py +3 -3
  82. infrahub_sdk/ctl/cli.py +2 -2
  83. infrahub_sdk/ctl/cli_commands.py +10 -9
  84. infrahub_sdk/ctl/generator.py +2 -2
  85. infrahub_sdk/ctl/graphql.py +3 -4
  86. infrahub_sdk/ctl/importer.py +2 -3
  87. infrahub_sdk/ctl/repository.py +5 -6
  88. infrahub_sdk/ctl/task.py +2 -4
  89. infrahub_sdk/ctl/utils.py +4 -4
  90. infrahub_sdk/ctl/validate.py +1 -2
  91. infrahub_sdk/diff.py +80 -3
  92. infrahub_sdk/graphql/constants.py +14 -1
  93. infrahub_sdk/graphql/renderers.py +5 -1
  94. infrahub_sdk/node/attribute.py +10 -10
  95. infrahub_sdk/node/constants.py +2 -3
  96. infrahub_sdk/node/node.py +54 -11
  97. infrahub_sdk/node/related_node.py +1 -2
  98. infrahub_sdk/node/relationship.py +1 -2
  99. infrahub_sdk/object_store.py +4 -4
  100. infrahub_sdk/operation.py +2 -2
  101. infrahub_sdk/protocols_base.py +0 -1
  102. infrahub_sdk/protocols_generator/generator.py +1 -1
  103. infrahub_sdk/pytest_plugin/items/jinja2_transform.py +1 -1
  104. infrahub_sdk/pytest_plugin/models.py +1 -1
  105. infrahub_sdk/pytest_plugin/plugin.py +1 -1
  106. infrahub_sdk/query_groups.py +2 -2
  107. infrahub_sdk/schema/__init__.py +10 -14
  108. infrahub_sdk/schema/main.py +2 -2
  109. infrahub_sdk/schema/repository.py +2 -2
  110. infrahub_sdk/spec/object.py +2 -2
  111. infrahub_sdk/spec/range_expansion.py +1 -1
  112. infrahub_sdk/template/__init__.py +2 -1
  113. infrahub_sdk/transfer/importer/json.py +3 -3
  114. infrahub_sdk/types.py +2 -2
  115. infrahub_sdk/utils.py +2 -2
  116. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0.dist-info}/METADATA +58 -59
  117. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0.dist-info}/RECORD +240 -246
  118. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0.dist-info}/WHEEL +1 -1
  119. infrahub_server-1.6.0.dist-info/entry_points.txt +12 -0
  120. infrahub_testcontainers/container.py +2 -2
  121. infrahub_testcontainers/docker-compose-cluster.test.yml +1 -1
  122. infrahub_testcontainers/docker-compose.test.yml +1 -1
  123. infrahub/core/schema/generated/__init__.py +0 -0
  124. infrahub/core/schema/generated/attribute_schema.py +0 -133
  125. infrahub/core/schema/generated/base_node_schema.py +0 -111
  126. infrahub/core/schema/generated/genericnode_schema.py +0 -30
  127. infrahub/core/schema/generated/node_schema.py +0 -40
  128. infrahub/core/schema/generated/relationship_schema.py +0 -141
  129. infrahub_server-1.5.5.dist-info/entry_points.txt +0 -13
  130. {infrahub_server-1.5.5.dist-info → infrahub_server-1.6.0.dist-info/licenses}/LICENSE.txt +0 -0
@@ -2,11 +2,12 @@ from __future__ import annotations
2
2
 
3
3
  from typing import TYPE_CHECKING, Any
4
4
 
5
- from graphene import Boolean, Field, Int, String
5
+ from graphene import Boolean, Field, Int, List, NonNull, String
6
6
 
7
7
  from infrahub.core.branch import Branch
8
8
  from infrahub.core.constants import GLOBAL_BRANCH_NAME
9
9
 
10
+ from ...exceptions import BranchNotFoundError
10
11
  from .enums import InfrahubBranchStatus
11
12
  from .standard_node import InfrahubObjectType
12
13
 
@@ -33,6 +34,10 @@ class BranchType(InfrahubObjectType):
33
34
  name = "Branch"
34
35
  model = Branch
35
36
 
37
+ @staticmethod
38
+ async def _map_fields_to_graphql(objs: list[Branch], fields: dict) -> list[dict[str, Any]]:
39
+ return [await obj.to_graphql(fields=fields) for obj in objs]
40
+
36
41
  @classmethod
37
42
  async def get_list(
38
43
  cls,
@@ -46,4 +51,95 @@ class BranchType(InfrahubObjectType):
46
51
  if not objs:
47
52
  return []
48
53
 
49
- return [await obj.to_graphql(fields=fields) for obj in objs if obj.name != GLOBAL_BRANCH_NAME]
54
+ return await cls._map_fields_to_graphql(objs=objs, fields=fields)
55
+
56
+ @classmethod
57
+ async def get_by_name(
58
+ cls,
59
+ fields: dict,
60
+ graphql_context: GraphqlContext,
61
+ name: str,
62
+ ) -> dict[str, Any]:
63
+ branch_responses = await cls.get_list(fields=fields, graphql_context=graphql_context, name=name)
64
+
65
+ if branch_responses:
66
+ return branch_responses[0]
67
+ raise BranchNotFoundError(f"Branch with name '{name}' not found")
68
+
69
+
70
+ class RequiredStringValueField(InfrahubObjectType):
71
+ value = String(required=True)
72
+
73
+
74
+ class NonRequiredStringValueField(InfrahubObjectType):
75
+ value = String(required=False)
76
+
77
+
78
+ class NonRequiredIntValueField(InfrahubObjectType):
79
+ value = Int(required=False)
80
+
81
+
82
+ class NonRequiredBooleanValueField(InfrahubObjectType):
83
+ value = Boolean(required=False)
84
+
85
+
86
+ class StatusField(InfrahubObjectType):
87
+ value = InfrahubBranchStatus(required=True)
88
+
89
+
90
+ class InfrahubBranch(BranchType):
91
+ name = Field(RequiredStringValueField, required=True)
92
+ description = Field(NonRequiredStringValueField, required=False)
93
+ origin_branch = Field(NonRequiredStringValueField, required=False)
94
+ branched_from = Field(NonRequiredStringValueField, required=False)
95
+ graph_version = Field(NonRequiredIntValueField, required=False)
96
+ status = Field(StatusField, required=True)
97
+ sync_with_git = Field(NonRequiredBooleanValueField, required=False)
98
+ is_default = Field(NonRequiredBooleanValueField, required=False)
99
+ is_isolated = Field(
100
+ NonRequiredBooleanValueField, required=False, deprecation_reason="non isolated mode is not supported anymore"
101
+ )
102
+ has_schema_changes = Field(NonRequiredBooleanValueField, required=False)
103
+
104
+ class Meta:
105
+ description = "InfrahubBranch"
106
+ name = "InfrahubBranch"
107
+
108
+ @staticmethod
109
+ async def _map_fields_to_graphql(objs: list[Branch], fields: dict) -> list[dict[str, Any]]:
110
+ field_keys = fields.keys()
111
+ result: list[dict[str, Any]] = []
112
+ for obj in objs:
113
+ if obj.name == GLOBAL_BRANCH_NAME:
114
+ continue
115
+ data: dict[str, Any] = {}
116
+ for field in field_keys:
117
+ if field == "id":
118
+ data["id"] = obj.uuid
119
+ continue
120
+ value = getattr(obj, field, None)
121
+ if isinstance(fields.get(field), dict):
122
+ data[field] = {"value": value}
123
+ else:
124
+ data[field] = value
125
+ result.append(data)
126
+ return result
127
+
128
+
129
+ class InfrahubBranchEdge(InfrahubObjectType):
130
+ node = Field(InfrahubBranch, required=True)
131
+
132
+
133
+ class InfrahubBranchType(InfrahubObjectType):
134
+ count = Field(Int, description="Total number of items")
135
+ edges = Field(NonNull(List(of_type=NonNull(InfrahubBranchEdge))))
136
+ default_branch = Field(
137
+ InfrahubBranch,
138
+ required=True,
139
+ description="The default branch of the Infrahub instance, provides a direct way to access the default branch regardless of filters.",
140
+ )
141
+
142
+ @classmethod
143
+ async def get_list_count(cls, graphql_context: GraphqlContext, **kwargs: Any) -> int:
144
+ async with graphql_context.db.start_session(read_only=True) as db:
145
+ return await Branch.get_list_count(db=db, **kwargs)
infrahub/lock.py CHANGED
@@ -58,7 +58,7 @@ class InfrahubMultiLock:
58
58
  self.locks = locks or []
59
59
  self.metrics = metrics
60
60
 
61
- async def __aenter__(self):
61
+ async def __aenter__(self) -> None:
62
62
  await self.acquire()
63
63
 
64
64
  async def __aexit__(
@@ -66,7 +66,7 @@ class InfrahubMultiLock:
66
66
  exc_type: type[BaseException] | None,
67
67
  exc_value: BaseException | None,
68
68
  traceback: TracebackType | None,
69
- ):
69
+ ) -> None:
70
70
  await self.release()
71
71
 
72
72
  async def acquire(self) -> None:
@@ -86,7 +86,7 @@ class NATSLock:
86
86
  self.token = None
87
87
  self.service = service
88
88
 
89
- async def __aenter__(self):
89
+ async def __aenter__(self) -> None:
90
90
  await self.acquire()
91
91
 
92
92
  async def __aexit__(
@@ -94,7 +94,7 @@ class NATSLock:
94
94
  exc_type: type[BaseException] | None,
95
95
  exc_value: BaseException | None,
96
96
  traceback: TracebackType | None,
97
- ):
97
+ ) -> None:
98
98
  await self.release()
99
99
 
100
100
  async def acquire(self) -> None:
@@ -162,7 +162,7 @@ class InfrahubLock:
162
162
  def acquire_time(self, value: int) -> None:
163
163
  self._acquire_time = value
164
164
 
165
- async def __aenter__(self):
165
+ async def __aenter__(self) -> None:
166
166
  await self.acquire()
167
167
 
168
168
  async def __aexit__(
@@ -170,7 +170,7 @@ class InfrahubLock:
170
170
  exc_type: type[BaseException] | None,
171
171
  exc_value: BaseException | None,
172
172
  traceback: TracebackType | None,
173
- ):
173
+ ) -> None:
174
174
  await self.release()
175
175
 
176
176
  async def acquire(self) -> None:
infrahub/log.py CHANGED
@@ -10,7 +10,7 @@ from structlog.dev import plain_traceback
10
10
  if TYPE_CHECKING:
11
11
  from structlog.types import Processor
12
12
 
13
- INFRAHUB_PRODUCTION = TypeAdapter(bool).validate_python(os.environ.get("INFRAHUB_PRODUCTION", True))
13
+ INFRAHUB_PRODUCTION = TypeAdapter(bool).validate_python(os.environ.get("INFRAHUB_PRODUCTION", "true"))
14
14
  INFRAHUB_LOG_LEVEL = os.environ.get("INFRAHUB_LOG_LEVEL", "INFO")
15
15
 
16
16
 
@@ -22,20 +22,8 @@ RESPONSE_MAP: dict[str, type[InfrahubResponse]] = {
22
22
  }
23
23
 
24
24
  PRIORITY_MAP = {
25
- "check.artifact.create": 2,
26
- "check.repository.check_definition": 2,
27
- "check.repository.merge_conflicts": 2,
28
25
  "send.echo.request": 5, # Currently only for testing purposes, will be removed once all message bus have been migrated to prefect
29
- "event.branch.delete": 5,
30
- "event.branch.merge": 5,
31
- "event.schema.update": 5,
32
- "git.diff.names_only": 4,
33
26
  "git.file.get": 4,
34
- "request.artifact.generate": 2,
35
- "request.git.sync": 4,
36
- "request.proposed_change.pipeline": 5,
37
- "transform.jinja.template": 4,
38
- "transform.python.data": 4,
39
27
  }
40
28
 
41
29
 
@@ -1,7 +1,7 @@
1
- from enum import Enum
1
+ from enum import StrEnum
2
2
 
3
3
 
4
- class PatchPlanFilename(str, Enum):
4
+ class PatchPlanFilename(StrEnum):
5
5
  VERTICES_TO_ADD = "vertices_to_add.json"
6
6
  VERTICES_TO_UPDATE = "vertices_to_update.json"
7
7
  VERTICES_TO_DELETE = "vertices_to_delete.json"
@@ -9,6 +9,15 @@ from .queries.get_profile_data import GetProfileDataQuery, ProfileData
9
9
 
10
10
 
11
11
  class NodeProfilesApplier:
12
+ """Applies profile values to nodes and templates.
13
+
14
+ Profile values take precedence over both default values and template-sourced values.
15
+ When a template has profiles assigned:
16
+ 1. Profile values are applied to the template itself
17
+ 2. Nodes created from that template inherit the profile values (not the template's own values)
18
+ 3. Profile priority determines which profile wins when multiple profiles set the same attribute
19
+ """
20
+
12
21
  def __init__(self, db: InfrahubDatabase, branch: Branch):
13
22
  self.db = db
14
23
  self.branch = branch
@@ -480,7 +480,7 @@ async def _get_proposed_change_schema_integrity_constraints(
480
480
  DiffElementType.RELATIONSHIP_ONE.value.lower(),
481
481
  ):
482
482
  field_summary.relationship_names.add(element_name)
483
- elif element_type.lower() in (DiffElementType.ATTRIBUTE.value.lower(),):
483
+ elif element_type.lower() == DiffElementType.ATTRIBUTE.value.lower():
484
484
  field_summary.attribute_names.add(element_name)
485
485
 
486
486
  determiner = ConstraintValidatorDeterminer(schema_branch=schema)
@@ -1,7 +1,7 @@
1
1
  import asyncio
2
2
  import hashlib
3
3
  import json
4
- from datetime import datetime, timedelta, timezone
4
+ from datetime import UTC, datetime, timedelta
5
5
  from typing import Any
6
6
  from uuid import UUID
7
7
 
@@ -151,7 +151,7 @@ class PrefectTask:
151
151
  remaining -= nb_fetched
152
152
 
153
153
  for flow_log in all_logs:
154
- if flow_log.flow_run_id and flow_log.message not in ["Finished in state Completed()"]:
154
+ if flow_log.flow_run_id and flow_log.message != "Finished in state Completed()":
155
155
  logs_flow.logs[flow_log.flow_run_id].append(flow_log)
156
156
 
157
157
  return logs_flow
@@ -325,7 +325,7 @@ class PrefectTask:
325
325
  "parameters": flow.parameters,
326
326
  "branch": await cls._extract_branch_name(flow=flow),
327
327
  "tags": flow.tags,
328
- "workflow": workflow_names.get(flow.flow_id, None),
328
+ "workflow": workflow_names.get(flow.flow_id),
329
329
  "related_node": related_node.id if related_node else None,
330
330
  "related_node_kind": related_node.kind if related_node else None,
331
331
  "related_nodes": related_nodes_info.get_related_nodes_as_dict(flow_id=flow.id),
@@ -353,7 +353,7 @@ class PrefectTask:
353
353
  logger = get_logger()
354
354
 
355
355
  async with get_client(sync_client=False) as client:
356
- cutoff = datetime.now(timezone.utc) - timedelta(days=days_to_keep)
356
+ cutoff = datetime.now(UTC) - timedelta(days=days_to_keep)
357
357
 
358
358
  flow_run_filter = FlowRunFilter(
359
359
  start_time=FlowRunFilterStartTime(before_=cutoff), # type: ignore[arg-type]
@@ -1,9 +1,9 @@
1
- from enum import Enum
1
+ from enum import StrEnum
2
2
 
3
3
  TELEMETRY_KIND: str = "community"
4
4
  TELEMETRY_VERSION: str = "20250318"
5
5
 
6
6
 
7
- class InfrahubType(str, Enum):
7
+ class InfrahubType(StrEnum):
8
8
  COMMUNITY = "community"
9
9
  ENTERPRISE = "enterprise"
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from datetime import timedelta
4
- from enum import Enum, StrEnum
4
+ from enum import StrEnum
5
5
  from typing import TYPE_CHECKING, Any, TypeVar
6
6
 
7
7
  from prefect.events.actions import RunDeployment
@@ -92,7 +92,7 @@ class TriggerSetupReport(BaseModel):
92
92
  return created + updated
93
93
 
94
94
 
95
- class TriggerType(str, Enum):
95
+ class TriggerType(StrEnum):
96
96
  ACTION_TRIGGER_RULE = "action_trigger_rule"
97
97
  BUILTIN = "builtin"
98
98
  WEBHOOK = "webhook"
infrahub/trigger/setup.py CHANGED
@@ -122,7 +122,7 @@ async def setup_triggers(
122
122
  actions=[action.get_prefect(mapping=deployments_mapping) for action in trigger.actions],
123
123
  )
124
124
 
125
- existing_automation = existing_automations.get(trigger.generate_name(), None)
125
+ existing_automation = existing_automations.get(trigger.generate_name())
126
126
 
127
127
  if existing_automation:
128
128
  trigger_comparison = compare_automations(
@@ -171,19 +171,16 @@ async def gather_all_automations(client: PrefectClient) -> list[Automation]:
171
171
  retrieves them all by paginating through the results. The default within Prefect is 200 items,
172
172
  and client.read_automations() doesn't support pagination parameters.
173
173
  """
174
- automation_count_response = await client.request("POST", "/automations/count")
175
- automation_count_response.raise_for_status()
176
- automation_count: int = automation_count_response.json()
177
174
  offset = 0
178
175
  limit = 200
179
- missing_automations = True
180
176
  automations: list[Automation] = []
181
- while missing_automations:
177
+ while True:
182
178
  response = await client.request("POST", "/automations/filter", json={"limit": limit, "offset": offset})
183
179
  response.raise_for_status()
184
- automations.extend(Automation.model_validate_list(response.json()))
185
- if len(automations) >= automation_count:
186
- missing_automations = False
180
+ batch = Automation.model_validate_list(response.json())
181
+ automations.extend(batch)
182
+ if len(batch) < limit:
183
+ break
187
184
  offset += limit
188
185
 
189
186
  return automations
infrahub/utils.py CHANGED
@@ -76,10 +76,28 @@ def get_nested_dict(nested_dict: dict[str, Any], keys: list[str]) -> dict[str, A
76
76
  return current_level if isinstance(current_level, dict) else {}
77
77
 
78
78
 
79
- def get_all_subclasses(cls: AnyClass) -> list[AnyClass]:
79
+ def get_all_subclasses[AnyClass: type](cls: AnyClass) -> list[AnyClass]:
80
80
  """Recursively get all subclasses of the given class."""
81
81
  subclasses: list[AnyClass] = []
82
82
  for subclass in cls.__subclasses__():
83
83
  subclasses.append(subclass)
84
84
  subclasses.extend(get_all_subclasses(subclass))
85
85
  return subclasses
86
+
87
+
88
+ def has_any_key(data: dict[str, Any], keys: list[str]) -> bool:
89
+ """Recursively check if any of the specified keys exist in the dictionary at any level.
90
+
91
+ Args:
92
+ data: The dictionary to search through
93
+ keys: List of key names to search for
94
+
95
+ Returns:
96
+ True if any of the keys are found at any level of the dictionary, False otherwise
97
+ """
98
+ for key, value in data.items():
99
+ if key in keys:
100
+ return True
101
+ if isinstance(value, dict) and has_any_key(data=value, keys=keys):
102
+ return True
103
+ return False
@@ -12,7 +12,7 @@ from .events import send_start_validator
12
12
  ValidatorType = TypeVar("ValidatorType", bound=CoreValidator)
13
13
 
14
14
 
15
- async def start_validator(
15
+ async def start_validator[ValidatorType: CoreValidator](
16
16
  client: InfrahubClient,
17
17
  validator: CoreValidator | None,
18
18
  validator_type: type[ValidatorType],
@@ -1,5 +1,8 @@
1
+ import asyncio
2
+ import contextlib
1
3
  import logging
2
4
  import os
5
+ from pathlib import Path
3
6
  from typing import Any
4
7
 
5
8
  import typer
@@ -105,7 +108,7 @@ class InfrahubWorkerAsync(BaseWorker):
105
108
 
106
109
  # Start metric endpoint
107
110
  if metric_port is None or metric_port != 0:
108
- metric_port = metric_port or int(os.environ.get("INFRAHUB_METRICS_PORT", 8000))
111
+ metric_port = metric_port or int(os.environ.get("INFRAHUB_METRICS_PORT", "8000"))
109
112
  self._logger.info(f"Starting metric endpoint on port {metric_port}")
110
113
  start_http_server(metric_port)
111
114
 
@@ -122,6 +125,7 @@ class InfrahubWorkerAsync(BaseWorker):
122
125
  )
123
126
 
124
127
  set_component_type(component_type=self.component_type)
128
+ await self.set_git_global_config()
125
129
  await self._init_services(client=client)
126
130
 
127
131
  if not registry.schema_has_been_initialized():
@@ -204,3 +208,37 @@ class InfrahubWorkerAsync(BaseWorker):
204
208
  )
205
209
 
206
210
  self.service = service
211
+
212
+ async def set_git_global_config(self) -> None:
213
+ global_config_file = config.SETTINGS.git.global_config_file
214
+ if not os.getenv("GIT_CONFIG_GLOBAL") and global_config_file:
215
+ config_dir = Path(global_config_file).parent
216
+ with contextlib.suppress(FileExistsError):
217
+ config_dir.mkdir(exist_ok=True, parents=True)
218
+ os.environ["GIT_CONFIG_GLOBAL"] = global_config_file
219
+ self._logger.info(f"Set git config file to {global_config_file}")
220
+
221
+ await self._run_git_config_global(config.SETTINGS.git.user_name, setting_name="user.name")
222
+ await self._run_git_config_global(config.SETTINGS.git.user_email, setting_name="user.email")
223
+ await self._run_git_config_global("*", "--replace-all", setting_name="safe.directory")
224
+ await self._run_git_config_global("true", setting_name="credential.usehttppath")
225
+ await self._run_git_config_global(
226
+ f"/usr/bin/env {config.SETTINGS.dev.git_credential_helper}", setting_name="credential.helper"
227
+ )
228
+
229
+ async def _run_git_config_global(self, *args: str, setting_name: str) -> None:
230
+ proc = await asyncio.create_subprocess_exec(
231
+ "git",
232
+ "config",
233
+ "--global",
234
+ setting_name,
235
+ *args,
236
+ stdout=asyncio.subprocess.PIPE,
237
+ stderr=asyncio.subprocess.PIPE,
238
+ )
239
+ _, stderr = await proc.communicate()
240
+ if proc.returncode != 0:
241
+ error_msg = stderr.decode("utf-8", errors="ignore").strip() or "unknown error"
242
+ self._logger.error(f"Failed to set git {setting_name}: %s", error_msg)
243
+ else:
244
+ self._logger.info(f"Git {setting_name} set")
@@ -2,8 +2,9 @@ from __future__ import annotations
2
2
 
3
3
  import asyncio
4
4
  import inspect
5
+ from collections.abc import Callable
5
6
  from functools import partial, wraps
6
- from typing import Any, Callable
7
+ from typing import Any
7
8
 
8
9
  from typer import Typer
9
10
 
infrahub_sdk/batch.py CHANGED
@@ -1,10 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- from collections.abc import AsyncGenerator, Awaitable, Generator
4
+ from collections.abc import AsyncGenerator, Awaitable, Callable, Generator
5
5
  from concurrent.futures import ThreadPoolExecutor
6
6
  from dataclasses import dataclass
7
- from typing import TYPE_CHECKING, Any, Callable
7
+ from typing import TYPE_CHECKING, Any
8
8
 
9
9
  if TYPE_CHECKING:
10
10
  from .node import InfrahubNode, InfrahubNodeSync