infrahub-server 1.2.11__py3-none-any.whl → 1.3.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- infrahub/actions/constants.py +86 -0
- infrahub/actions/gather.py +114 -0
- infrahub/actions/models.py +241 -0
- infrahub/actions/parsers.py +104 -0
- infrahub/actions/schema.py +382 -0
- infrahub/actions/tasks.py +126 -0
- infrahub/actions/triggers.py +21 -0
- infrahub/cli/db.py +1 -2
- infrahub/core/account.py +24 -47
- infrahub/core/attribute.py +13 -15
- infrahub/core/constants/__init__.py +5 -0
- infrahub/core/constants/infrahubkind.py +9 -0
- infrahub/core/convert_object_type/__init__.py +0 -0
- infrahub/core/convert_object_type/conversion.py +122 -0
- infrahub/core/convert_object_type/schema_mapping.py +56 -0
- infrahub/core/diff/query/all_conflicts.py +1 -5
- infrahub/core/diff/query/artifact.py +10 -20
- infrahub/core/diff/query/diff_get.py +3 -6
- infrahub/core/diff/query/field_summary.py +2 -4
- infrahub/core/diff/query/merge.py +70 -123
- infrahub/core/diff/query/save.py +20 -32
- infrahub/core/diff/query/summary_counts_enricher.py +34 -54
- infrahub/core/manager.py +14 -11
- infrahub/core/migrations/graph/m003_relationship_parent_optional.py +1 -2
- infrahub/core/migrations/graph/m013_convert_git_password_credential.py +2 -4
- infrahub/core/migrations/graph/m019_restore_rels_to_time.py +11 -22
- infrahub/core/migrations/graph/m020_duplicate_edges.py +3 -6
- infrahub/core/migrations/graph/m021_missing_hierarchy_merge.py +1 -2
- infrahub/core/migrations/graph/m024_missing_hierarchy_backfill.py +1 -2
- infrahub/core/migrations/query/attribute_add.py +1 -2
- infrahub/core/migrations/query/attribute_rename.py +5 -10
- infrahub/core/migrations/query/delete_element_in_schema.py +19 -17
- infrahub/core/migrations/query/node_duplicate.py +19 -21
- infrahub/core/migrations/query/relationship_duplicate.py +19 -17
- infrahub/core/migrations/schema/node_attribute_remove.py +4 -8
- infrahub/core/migrations/schema/node_remove.py +19 -19
- infrahub/core/models.py +29 -2
- infrahub/core/node/__init__.py +90 -18
- infrahub/core/node/create.py +211 -0
- infrahub/core/node/resource_manager/number_pool.py +31 -5
- infrahub/core/node/standard.py +6 -1
- infrahub/core/protocols.py +56 -0
- infrahub/core/protocols_base.py +3 -0
- infrahub/core/query/__init__.py +2 -2
- infrahub/core/query/diff.py +19 -32
- infrahub/core/query/ipam.py +10 -20
- infrahub/core/query/node.py +28 -46
- infrahub/core/query/relationship.py +53 -32
- infrahub/core/query/resource_manager.py +1 -2
- infrahub/core/query/subquery.py +2 -4
- infrahub/core/relationship/model.py +3 -0
- infrahub/core/schema/__init__.py +2 -1
- infrahub/core/schema/attribute_parameters.py +160 -0
- infrahub/core/schema/attribute_schema.py +111 -8
- infrahub/core/schema/basenode_schema.py +25 -1
- infrahub/core/schema/definitions/core/__init__.py +29 -1
- infrahub/core/schema/definitions/core/group.py +45 -0
- infrahub/core/schema/definitions/internal.py +27 -4
- infrahub/core/schema/generated/attribute_schema.py +16 -3
- infrahub/core/schema/manager.py +3 -0
- infrahub/core/schema/schema_branch.py +67 -7
- infrahub/core/validators/__init__.py +13 -1
- infrahub/core/validators/attribute/choices.py +1 -3
- infrahub/core/validators/attribute/enum.py +1 -3
- infrahub/core/validators/attribute/kind.py +1 -3
- infrahub/core/validators/attribute/length.py +13 -7
- infrahub/core/validators/attribute/min_max.py +118 -0
- infrahub/core/validators/attribute/number_pool.py +106 -0
- infrahub/core/validators/attribute/optional.py +1 -4
- infrahub/core/validators/attribute/regex.py +5 -6
- infrahub/core/validators/attribute/unique.py +1 -3
- infrahub/core/validators/determiner.py +18 -2
- infrahub/core/validators/enum.py +12 -0
- infrahub/core/validators/node/hierarchy.py +3 -6
- infrahub/core/validators/query.py +1 -3
- infrahub/core/validators/relationship/count.py +6 -12
- infrahub/core/validators/relationship/optional.py +2 -4
- infrahub/core/validators/relationship/peer.py +3 -8
- infrahub/core/validators/uniqueness/query.py +5 -9
- infrahub/database/__init__.py +11 -2
- infrahub/events/group_action.py +1 -0
- infrahub/git/base.py +5 -3
- infrahub/git/integrator.py +102 -3
- infrahub/graphql/analyzer.py +139 -18
- infrahub/graphql/manager.py +4 -0
- infrahub/graphql/mutations/action.py +164 -0
- infrahub/graphql/mutations/convert_object_type.py +62 -0
- infrahub/graphql/mutations/main.py +24 -175
- infrahub/graphql/mutations/proposed_change.py +20 -17
- infrahub/graphql/mutations/resource_manager.py +62 -6
- infrahub/graphql/queries/convert_object_type_mapping.py +36 -0
- infrahub/graphql/queries/resource_manager.py +7 -1
- infrahub/graphql/schema.py +6 -0
- infrahub/menu/menu.py +31 -0
- infrahub/message_bus/messages/__init__.py +0 -10
- infrahub/message_bus/operations/__init__.py +0 -8
- infrahub/patch/queries/consolidate_duplicated_nodes.py +3 -6
- infrahub/patch/queries/delete_duplicated_edges.py +5 -10
- infrahub/pools/number.py +5 -3
- infrahub/prefect_server/models.py +1 -19
- infrahub/proposed_change/models.py +68 -3
- infrahub/proposed_change/tasks.py +907 -30
- infrahub/task_manager/models.py +10 -6
- infrahub/trigger/catalogue.py +2 -0
- infrahub/trigger/models.py +18 -2
- infrahub/trigger/tasks.py +3 -1
- infrahub/types.py +6 -0
- infrahub/workflows/catalogue.py +76 -0
- infrahub_sdk/client.py +43 -10
- infrahub_sdk/node/__init__.py +39 -0
- infrahub_sdk/node/attribute.py +122 -0
- infrahub_sdk/node/constants.py +21 -0
- infrahub_sdk/{node.py → node/node.py} +50 -749
- infrahub_sdk/node/parsers.py +15 -0
- infrahub_sdk/node/property.py +24 -0
- infrahub_sdk/node/related_node.py +266 -0
- infrahub_sdk/node/relationship.py +302 -0
- infrahub_sdk/protocols.py +112 -0
- infrahub_sdk/protocols_base.py +34 -2
- infrahub_sdk/query_groups.py +13 -2
- infrahub_sdk/schema/main.py +1 -0
- infrahub_sdk/schema/repository.py +16 -0
- infrahub_sdk/spec/object.py +1 -1
- infrahub_sdk/store.py +1 -1
- infrahub_sdk/testing/schemas/car_person.py +1 -0
- {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/METADATA +4 -4
- {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/RECORD +134 -122
- {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/WHEEL +1 -1
- infrahub_testcontainers/container.py +0 -1
- infrahub_testcontainers/docker-compose.test.yml +1 -1
- infrahub_testcontainers/helpers.py +8 -2
- infrahub/message_bus/messages/check_generator_run.py +0 -26
- infrahub/message_bus/messages/finalize_validator_execution.py +0 -15
- infrahub/message_bus/messages/proposed_change/base_with_diff.py +0 -16
- infrahub/message_bus/messages/proposed_change/request_proposedchange_refreshartifacts.py +0 -11
- infrahub/message_bus/messages/request_generatordefinition_check.py +0 -20
- infrahub/message_bus/messages/request_proposedchange_pipeline.py +0 -23
- infrahub/message_bus/operations/check/__init__.py +0 -3
- infrahub/message_bus/operations/check/generator.py +0 -156
- infrahub/message_bus/operations/finalize/__init__.py +0 -3
- infrahub/message_bus/operations/finalize/validator.py +0 -133
- infrahub/message_bus/operations/requests/__init__.py +0 -9
- infrahub/message_bus/operations/requests/generator_definition.py +0 -140
- infrahub/message_bus/operations/requests/proposed_change.py +0 -629
- /infrahub/{message_bus/messages/proposed_change → actions}/__init__.py +0 -0
- {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/LICENSE.txt +0 -0
- {infrahub_server-1.2.11.dist-info → infrahub_server-1.3.0b1.dist-info}/entry_points.txt +0 -0
infrahub/task_manager/models.py
CHANGED
|
@@ -84,11 +84,15 @@ class FlowProgress(BaseModel):
|
|
|
84
84
|
|
|
85
85
|
|
|
86
86
|
class InfrahubEventFilter(EventFilter):
|
|
87
|
-
|
|
87
|
+
def add_related_filter(self, related: EventRelatedFilter) -> None:
|
|
88
|
+
if not isinstance(self.related, list):
|
|
89
|
+
self.related = []
|
|
90
|
+
|
|
91
|
+
self.related.append(related)
|
|
88
92
|
|
|
89
93
|
def add_account_filter(self, account__ids: list[str] | None) -> None:
|
|
90
94
|
if account__ids:
|
|
91
|
-
self.
|
|
95
|
+
self.add_related_filter(
|
|
92
96
|
EventRelatedFilter(
|
|
93
97
|
labels=ResourceSpecification(
|
|
94
98
|
{"prefect.resource.role": "infrahub.account", "infrahub.resource.id": account__ids}
|
|
@@ -98,7 +102,7 @@ class InfrahubEventFilter(EventFilter):
|
|
|
98
102
|
|
|
99
103
|
def add_branch_filter(self, branches: list[str] | None = None) -> None:
|
|
100
104
|
if branches:
|
|
101
|
-
self.
|
|
105
|
+
self.add_related_filter(
|
|
102
106
|
EventRelatedFilter(
|
|
103
107
|
labels=ResourceSpecification(
|
|
104
108
|
{"prefect.resource.role": "infrahub.branch", "infrahub.resource.label": branches}
|
|
@@ -116,7 +120,7 @@ class InfrahubEventFilter(EventFilter):
|
|
|
116
120
|
|
|
117
121
|
if event_filter:
|
|
118
122
|
event_filter["prefect.resource.role"] = "infrahub.event"
|
|
119
|
-
self.
|
|
123
|
+
self.add_related_filter(EventRelatedFilter(labels=ResourceSpecification(event_filter)))
|
|
120
124
|
|
|
121
125
|
def add_event_id_filter(self, ids: list[str] | None = None) -> None:
|
|
122
126
|
if ids:
|
|
@@ -151,7 +155,7 @@ class InfrahubEventFilter(EventFilter):
|
|
|
151
155
|
|
|
152
156
|
def add_parent_filter(self, parent__ids: list[str] | None) -> None:
|
|
153
157
|
if parent__ids:
|
|
154
|
-
self.
|
|
158
|
+
self.add_related_filter(
|
|
155
159
|
EventRelatedFilter(
|
|
156
160
|
labels=ResourceSpecification(
|
|
157
161
|
{"prefect.resource.role": "infrahub.child_event", "infrahub.event_parent.id": parent__ids}
|
|
@@ -161,7 +165,7 @@ class InfrahubEventFilter(EventFilter):
|
|
|
161
165
|
|
|
162
166
|
def add_related_node_filter(self, related_node__ids: list[str] | None) -> None:
|
|
163
167
|
if related_node__ids:
|
|
164
|
-
self.
|
|
168
|
+
self.add_related_filter(
|
|
165
169
|
EventRelatedFilter(
|
|
166
170
|
labels=ResourceSpecification(
|
|
167
171
|
{"prefect.resource.role": "infrahub.related.node", "prefect.resource.id": related_node__ids}
|
infrahub/trigger/catalogue.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from infrahub.actions.triggers import TRIGGER_ACTION_RULE_UPDATE
|
|
1
2
|
from infrahub.computed_attribute.triggers import (
|
|
2
3
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
3
4
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
@@ -6,6 +7,7 @@ from infrahub.trigger.models import TriggerDefinition
|
|
|
6
7
|
from infrahub.webhook.triggers import TRIGGER_WEBHOOK_DELETE, TRIGGER_WEBHOOK_SETUP_UPDATE
|
|
7
8
|
|
|
8
9
|
builtin_triggers: list[TriggerDefinition] = [
|
|
10
|
+
TRIGGER_ACTION_RULE_UPDATE,
|
|
9
11
|
TRIGGER_COMPUTED_ATTRIBUTE_ALL_SCHEMA,
|
|
10
12
|
TRIGGER_COMPUTED_ATTRIBUTE_PYTHON_SETUP_COMMIT,
|
|
11
13
|
TRIGGER_WEBHOOK_DELETE,
|
infrahub/trigger/models.py
CHANGED
|
@@ -30,6 +30,7 @@ class TriggerSetupReport(BaseModel):
|
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
class TriggerType(str, Enum):
|
|
33
|
+
ACTION_TRIGGER_RULE = "action_trigger_rule"
|
|
33
34
|
BUILTIN = "builtin"
|
|
34
35
|
WEBHOOK = "webhook"
|
|
35
36
|
COMPUTED_ATTR_JINJA2 = "computed_attr_jinja2"
|
|
@@ -38,10 +39,15 @@ class TriggerType(str, Enum):
|
|
|
38
39
|
# OBJECT = "object"
|
|
39
40
|
|
|
40
41
|
|
|
42
|
+
def _match_related_dict() -> dict:
|
|
43
|
+
# Make Mypy happy as match related is a dict[str, Any] | list[dict[str, Any]]
|
|
44
|
+
return {}
|
|
45
|
+
|
|
46
|
+
|
|
41
47
|
class EventTrigger(BaseModel):
|
|
42
48
|
events: set = Field(default_factory=set)
|
|
43
49
|
match: dict[str, Any] = Field(default_factory=dict)
|
|
44
|
-
match_related: dict[str, Any] = Field(default_factory=
|
|
50
|
+
match_related: dict[str, Any] | list[dict[str, Any]] = Field(default_factory=_match_related_dict)
|
|
45
51
|
|
|
46
52
|
def get_prefect(self) -> PrefectEventTrigger:
|
|
47
53
|
return PrefectEventTrigger(
|
|
@@ -49,10 +55,20 @@ class EventTrigger(BaseModel):
|
|
|
49
55
|
expect=self.events,
|
|
50
56
|
within=timedelta(0),
|
|
51
57
|
match=ResourceSpecification(self.match),
|
|
52
|
-
match_related=
|
|
58
|
+
match_related=self.related_resource_specification,
|
|
53
59
|
threshold=1,
|
|
54
60
|
)
|
|
55
61
|
|
|
62
|
+
@property
|
|
63
|
+
def related_resource_specification(self) -> ResourceSpecification | list[ResourceSpecification]:
|
|
64
|
+
if isinstance(self.match_related, dict):
|
|
65
|
+
return ResourceSpecification(self.match_related)
|
|
66
|
+
|
|
67
|
+
if len(self.match_related) == 1:
|
|
68
|
+
return ResourceSpecification(self.match_related[0])
|
|
69
|
+
|
|
70
|
+
return [ResourceSpecification(related_match) for related_match in self.match_related]
|
|
71
|
+
|
|
56
72
|
|
|
57
73
|
class ExecuteWorkflow(BaseModel):
|
|
58
74
|
workflow: WorkflowDefinition
|
infrahub/trigger/tasks.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from prefect import flow
|
|
2
2
|
from prefect.client.orchestration import get_client
|
|
3
3
|
|
|
4
|
+
from infrahub.actions.gather import gather_trigger_action_rules
|
|
4
5
|
from infrahub.computed_attribute.gather import (
|
|
5
6
|
gather_trigger_computed_attribute_jinja2,
|
|
6
7
|
gather_trigger_computed_attribute_python,
|
|
@@ -21,13 +22,14 @@ async def trigger_configure_all(service: InfrahubServices) -> None:
|
|
|
21
22
|
computed_attribute_python_triggers,
|
|
22
23
|
computed_attribute_python_query_triggers,
|
|
23
24
|
) = await gather_trigger_computed_attribute_python(db=db)
|
|
24
|
-
|
|
25
|
+
action_rules = await gather_trigger_action_rules(db=db)
|
|
25
26
|
triggers = (
|
|
26
27
|
computed_attribute_j2_triggers
|
|
27
28
|
+ computed_attribute_python_triggers
|
|
28
29
|
+ computed_attribute_python_query_triggers
|
|
29
30
|
+ builtin_triggers
|
|
30
31
|
+ webhook_trigger
|
|
32
|
+
+ action_rules
|
|
31
33
|
)
|
|
32
34
|
|
|
33
35
|
async with get_client(sync_client=False) as prefect_client:
|
infrahub/types.py
CHANGED
|
@@ -235,6 +235,10 @@ class Number(InfrahubDataType):
|
|
|
235
235
|
infrahub = "Integer"
|
|
236
236
|
|
|
237
237
|
|
|
238
|
+
class NumberPool(Number):
|
|
239
|
+
label: str = "Number Pool"
|
|
240
|
+
|
|
241
|
+
|
|
238
242
|
class Bandwidth(InfrahubDataType):
|
|
239
243
|
label: str = "Bandwidth"
|
|
240
244
|
graphql = graphene.Int
|
|
@@ -329,6 +333,7 @@ ATTRIBUTE_TYPES: dict[str, type[InfrahubDataType]] = {
|
|
|
329
333
|
"MacAddress": MacAddress,
|
|
330
334
|
"Color": Color,
|
|
331
335
|
"Number": Number,
|
|
336
|
+
"NumberPool": NumberPool,
|
|
332
337
|
"Bandwidth": Bandwidth,
|
|
333
338
|
"IPHost": IPHost,
|
|
334
339
|
"IPNetwork": IPNetwork,
|
|
@@ -353,6 +358,7 @@ ATTRIBUTE_PYTHON_TYPES: dict[str, type] = {
|
|
|
353
358
|
"MacAddress": str, # MAC addresses can be straightforward strings
|
|
354
359
|
"Color": str, # Colors often represented as hex strings
|
|
355
360
|
"Number": float, # Numbers can be floats for general use
|
|
361
|
+
"NumberPool": float, # Numbers can be floats for general use
|
|
356
362
|
"Bandwidth": float, # Bandwidth in some units, represented as a float
|
|
357
363
|
"IPHost": IPvAnyAddress, # type: ignore[dict-item]
|
|
358
364
|
"IPNetwork": str,
|
infrahub/workflows/catalogue.py
CHANGED
|
@@ -6,6 +6,35 @@ from .models import WorkerPoolDefinition, WorkflowDefinition
|
|
|
6
6
|
INFRAHUB_WORKER_POOL = WorkerPoolDefinition(name="infrahub-worker", description="Default Pool for internal tasks")
|
|
7
7
|
|
|
8
8
|
|
|
9
|
+
ACTION_ADD_NODE_TO_GROUP = WorkflowDefinition(
|
|
10
|
+
name="action-add-node-to-group",
|
|
11
|
+
type=WorkflowType.CORE,
|
|
12
|
+
module="infrahub.actions.tasks",
|
|
13
|
+
function="add_node_to_group",
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
ACTION_RUN_GENERATOR = WorkflowDefinition(
|
|
17
|
+
name="action-run-generator",
|
|
18
|
+
type=WorkflowType.CORE,
|
|
19
|
+
module="infrahub.actions.tasks",
|
|
20
|
+
function="run_generator",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
ACTION_RUN_GENERATOR_GROUP_EVENT = WorkflowDefinition(
|
|
24
|
+
name="action-run-generator-group-event",
|
|
25
|
+
type=WorkflowType.CORE,
|
|
26
|
+
module="infrahub.actions.tasks",
|
|
27
|
+
function="run_generator_group_event",
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
CONFIGURE_ACTION_RULES = WorkflowDefinition(
|
|
32
|
+
name="configure-action-rules",
|
|
33
|
+
type=WorkflowType.CORE,
|
|
34
|
+
module="infrahub.actions.tasks",
|
|
35
|
+
function="configure_action_rules",
|
|
36
|
+
)
|
|
37
|
+
|
|
9
38
|
TRANSFORM_JINJA2_RENDER = WorkflowDefinition(
|
|
10
39
|
name="transform_render_jinja2_template",
|
|
11
40
|
type=WorkflowType.USER,
|
|
@@ -66,6 +95,13 @@ IPAM_RECONCILIATION = WorkflowDefinition(
|
|
|
66
95
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
67
96
|
)
|
|
68
97
|
|
|
98
|
+
REMOVE_ADD_NODE_FROM_GROUP = WorkflowDefinition(
|
|
99
|
+
name="action-remove-node-from-group",
|
|
100
|
+
type=WorkflowType.CORE,
|
|
101
|
+
module="infrahub.actions.tasks",
|
|
102
|
+
function="remove_node_from_group",
|
|
103
|
+
)
|
|
104
|
+
|
|
69
105
|
REQUEST_GENERATOR_RUN = WorkflowDefinition(
|
|
70
106
|
name="generator-run",
|
|
71
107
|
type=WorkflowType.USER,
|
|
@@ -74,6 +110,14 @@ REQUEST_GENERATOR_RUN = WorkflowDefinition(
|
|
|
74
110
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
75
111
|
)
|
|
76
112
|
|
|
113
|
+
RUN_GENERATOR_AS_CHECK = WorkflowDefinition(
|
|
114
|
+
name="run-generator-as-check",
|
|
115
|
+
type=WorkflowType.USER,
|
|
116
|
+
module="infrahub.proposed_change.tasks",
|
|
117
|
+
function="run_generator_as_check",
|
|
118
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
119
|
+
)
|
|
120
|
+
|
|
77
121
|
REQUEST_GENERATOR_DEFINITION_RUN = WorkflowDefinition(
|
|
78
122
|
name="request-generator-definition-run",
|
|
79
123
|
type=WorkflowType.CORE,
|
|
@@ -82,6 +126,14 @@ REQUEST_GENERATOR_DEFINITION_RUN = WorkflowDefinition(
|
|
|
82
126
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
83
127
|
)
|
|
84
128
|
|
|
129
|
+
REQUEST_GENERATOR_DEFINITION_CHECK = WorkflowDefinition(
|
|
130
|
+
name="request-generator-definition-check",
|
|
131
|
+
type=WorkflowType.CORE,
|
|
132
|
+
module="infrahub.proposed_change.tasks",
|
|
133
|
+
function="request_generator_definition_check",
|
|
134
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
135
|
+
)
|
|
136
|
+
|
|
85
137
|
REQUEST_ARTIFACT_GENERATE = WorkflowDefinition(
|
|
86
138
|
name="artifact-generate",
|
|
87
139
|
type=WorkflowType.CORE, # NOTE need to check
|
|
@@ -338,6 +390,21 @@ GIT_REPOSITORIES_IMPORT_OBJECTS = WorkflowDefinition(
|
|
|
338
390
|
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
339
391
|
)
|
|
340
392
|
|
|
393
|
+
REQUEST_PROPOSED_CHANGE_PIPELINE = WorkflowDefinition(
|
|
394
|
+
name="proposed-changed-pipeline",
|
|
395
|
+
type=WorkflowType.INTERNAL,
|
|
396
|
+
module="infrahub.proposed_change.tasks",
|
|
397
|
+
function="run_proposed_change_pipeline",
|
|
398
|
+
tags=[WorkflowTag.DATABASE_CHANGE],
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS = WorkflowDefinition(
|
|
402
|
+
name="proposed-changed-refresh-artifacts",
|
|
403
|
+
type=WorkflowType.INTERNAL,
|
|
404
|
+
module="infrahub.proposed_change.tasks",
|
|
405
|
+
function="refresh_artifacts",
|
|
406
|
+
)
|
|
407
|
+
|
|
341
408
|
REQUEST_PROPOSED_CHANGE_RUN_GENERATORS = WorkflowDefinition(
|
|
342
409
|
name="proposed-changed-run-generator",
|
|
343
410
|
type=WorkflowType.INTERNAL,
|
|
@@ -442,6 +509,9 @@ TRIGGER_CONFIGURE_ALL = WorkflowDefinition(
|
|
|
442
509
|
worker_pools = [INFRAHUB_WORKER_POOL]
|
|
443
510
|
|
|
444
511
|
workflows = [
|
|
512
|
+
ACTION_ADD_NODE_TO_GROUP,
|
|
513
|
+
ACTION_RUN_GENERATOR,
|
|
514
|
+
ACTION_RUN_GENERATOR_GROUP_EVENT,
|
|
445
515
|
ANONYMOUS_TELEMETRY_SEND,
|
|
446
516
|
BRANCH_CANCEL_PROPOSED_CHANGES,
|
|
447
517
|
BRANCH_CREATE,
|
|
@@ -456,6 +526,7 @@ workflows = [
|
|
|
456
526
|
COMPUTED_ATTRIBUTE_PROCESS_TRANSFORM,
|
|
457
527
|
COMPUTED_ATTRIBUTE_SETUP_JINJA2,
|
|
458
528
|
COMPUTED_ATTRIBUTE_SETUP_PYTHON,
|
|
529
|
+
CONFIGURE_ACTION_RULES,
|
|
459
530
|
DIFF_REFRESH,
|
|
460
531
|
DIFF_REFRESH_ALL,
|
|
461
532
|
DIFF_UPDATE,
|
|
@@ -477,16 +548,21 @@ workflows = [
|
|
|
477
548
|
IPAM_RECONCILIATION,
|
|
478
549
|
PROPOSED_CHANGE_MERGE,
|
|
479
550
|
QUERY_COMPUTED_ATTRIBUTE_TRANSFORM_TARGETS,
|
|
551
|
+
REMOVE_ADD_NODE_FROM_GROUP,
|
|
480
552
|
REQUEST_ARTIFACT_DEFINITION_CHECK,
|
|
481
553
|
REQUEST_ARTIFACT_DEFINITION_GENERATE,
|
|
482
554
|
REQUEST_ARTIFACT_GENERATE,
|
|
555
|
+
REQUEST_GENERATOR_DEFINITION_CHECK,
|
|
483
556
|
REQUEST_GENERATOR_DEFINITION_RUN,
|
|
484
557
|
REQUEST_GENERATOR_RUN,
|
|
485
558
|
REQUEST_PROPOSED_CHANGE_DATA_INTEGRITY,
|
|
559
|
+
REQUEST_PROPOSED_CHANGE_PIPELINE,
|
|
560
|
+
REQUEST_PROPOSED_CHANGE_REFRESH_ARTIFACTS,
|
|
486
561
|
REQUEST_PROPOSED_CHANGE_REPOSITORY_CHECKS,
|
|
487
562
|
REQUEST_PROPOSED_CHANGE_RUN_GENERATORS,
|
|
488
563
|
REQUEST_PROPOSED_CHANGE_SCHEMA_INTEGRITY,
|
|
489
564
|
REQUEST_PROPOSED_CHANGE_USER_TESTS,
|
|
565
|
+
RUN_GENERATOR_AS_CHECK,
|
|
490
566
|
SCHEMA_APPLY_MIGRATION,
|
|
491
567
|
SCHEMA_VALIDATE_MIGRATION,
|
|
492
568
|
TRANSFORM_JINJA2_RENDER,
|
infrahub_sdk/client.py
CHANGED
|
@@ -172,11 +172,18 @@ class BaseClient:
|
|
|
172
172
|
params: dict[str, Any] | None = None,
|
|
173
173
|
delete_unused_nodes: bool = False,
|
|
174
174
|
group_type: str | None = None,
|
|
175
|
+
group_params: dict[str, Any] | None = None,
|
|
176
|
+
branch: str | None = None,
|
|
175
177
|
) -> Self:
|
|
176
178
|
self.mode = InfrahubClientMode.TRACKING
|
|
177
179
|
identifier = identifier or self.identifier or "python-sdk"
|
|
178
180
|
self.set_context_properties(
|
|
179
|
-
identifier=identifier,
|
|
181
|
+
identifier=identifier,
|
|
182
|
+
params=params,
|
|
183
|
+
delete_unused_nodes=delete_unused_nodes,
|
|
184
|
+
group_type=group_type,
|
|
185
|
+
group_params=group_params,
|
|
186
|
+
branch=branch,
|
|
180
187
|
)
|
|
181
188
|
return self
|
|
182
189
|
|
|
@@ -187,14 +194,22 @@ class BaseClient:
|
|
|
187
194
|
delete_unused_nodes: bool = True,
|
|
188
195
|
reset: bool = True,
|
|
189
196
|
group_type: str | None = None,
|
|
197
|
+
group_params: dict[str, Any] | None = None,
|
|
198
|
+
branch: str | None = None,
|
|
190
199
|
) -> None:
|
|
191
200
|
if reset:
|
|
192
201
|
if isinstance(self, InfrahubClient):
|
|
193
202
|
self.group_context = InfrahubGroupContext(self)
|
|
194
203
|
elif isinstance(self, InfrahubClientSync):
|
|
195
204
|
self.group_context = InfrahubGroupContextSync(self)
|
|
205
|
+
|
|
196
206
|
self.group_context.set_properties(
|
|
197
|
-
identifier=identifier,
|
|
207
|
+
identifier=identifier,
|
|
208
|
+
params=params,
|
|
209
|
+
delete_unused_nodes=delete_unused_nodes,
|
|
210
|
+
group_type=group_type,
|
|
211
|
+
group_params=group_params,
|
|
212
|
+
branch=branch,
|
|
198
213
|
)
|
|
199
214
|
|
|
200
215
|
def _graphql_url(
|
|
@@ -562,18 +577,27 @@ class InfrahubClient(BaseClient):
|
|
|
562
577
|
at: Timestamp | None = None,
|
|
563
578
|
branch: str | None = None,
|
|
564
579
|
timeout: int | None = None,
|
|
580
|
+
partial_match: bool = False,
|
|
565
581
|
**kwargs: Any,
|
|
566
582
|
) -> int:
|
|
567
583
|
"""Return the number of nodes of a given kind."""
|
|
568
|
-
filters = kwargs
|
|
569
|
-
schema = await self.schema.get(kind=kind, branch=branch)
|
|
584
|
+
filters: dict[str, Any] = dict(kwargs)
|
|
570
585
|
|
|
586
|
+
if partial_match:
|
|
587
|
+
filters["partial_match"] = True
|
|
588
|
+
|
|
589
|
+
schema = await self.schema.get(kind=kind, branch=branch)
|
|
571
590
|
branch = branch or self.default_branch
|
|
572
591
|
if at:
|
|
573
592
|
at = Timestamp(at)
|
|
574
593
|
|
|
594
|
+
data: dict[str, Any] = {
|
|
595
|
+
"count": None,
|
|
596
|
+
"@filters": filters,
|
|
597
|
+
}
|
|
598
|
+
|
|
575
599
|
response = await self.execute_graphql(
|
|
576
|
-
query=Query(query={schema.kind:
|
|
600
|
+
query=Query(query={schema.kind: data}).render(),
|
|
577
601
|
branch_name=branch,
|
|
578
602
|
at=at,
|
|
579
603
|
timeout=timeout,
|
|
@@ -801,7 +825,7 @@ class InfrahubClient(BaseClient):
|
|
|
801
825
|
nodes = []
|
|
802
826
|
related_nodes = []
|
|
803
827
|
batch_process = await self.create_batch()
|
|
804
|
-
count = await self.count(kind=schema.kind, **filters)
|
|
828
|
+
count = await self.count(kind=schema.kind, partial_match=partial_match, **filters)
|
|
805
829
|
total_pages = (count + pagination_size - 1) // pagination_size
|
|
806
830
|
|
|
807
831
|
for page_number in range(1, total_pages + 1):
|
|
@@ -1683,18 +1707,27 @@ class InfrahubClientSync(BaseClient):
|
|
|
1683
1707
|
at: Timestamp | None = None,
|
|
1684
1708
|
branch: str | None = None,
|
|
1685
1709
|
timeout: int | None = None,
|
|
1710
|
+
partial_match: bool = False,
|
|
1686
1711
|
**kwargs: Any,
|
|
1687
1712
|
) -> int:
|
|
1688
1713
|
"""Return the number of nodes of a given kind."""
|
|
1689
|
-
filters = kwargs
|
|
1690
|
-
|
|
1714
|
+
filters: dict[str, Any] = dict(kwargs)
|
|
1715
|
+
|
|
1716
|
+
if partial_match:
|
|
1717
|
+
filters["partial_match"] = True
|
|
1691
1718
|
|
|
1719
|
+
schema = self.schema.get(kind=kind, branch=branch)
|
|
1692
1720
|
branch = branch or self.default_branch
|
|
1693
1721
|
if at:
|
|
1694
1722
|
at = Timestamp(at)
|
|
1695
1723
|
|
|
1724
|
+
data: dict[str, Any] = {
|
|
1725
|
+
"count": None,
|
|
1726
|
+
"@filters": filters,
|
|
1727
|
+
}
|
|
1728
|
+
|
|
1696
1729
|
response = self.execute_graphql(
|
|
1697
|
-
query=Query(query={schema.kind:
|
|
1730
|
+
query=Query(query={schema.kind: data}).render(),
|
|
1698
1731
|
branch_name=branch,
|
|
1699
1732
|
at=at,
|
|
1700
1733
|
timeout=timeout,
|
|
@@ -1957,7 +1990,7 @@ class InfrahubClientSync(BaseClient):
|
|
|
1957
1990
|
related_nodes = []
|
|
1958
1991
|
batch_process = self.create_batch()
|
|
1959
1992
|
|
|
1960
|
-
count = self.count(kind=schema.kind, **filters)
|
|
1993
|
+
count = self.count(kind=schema.kind, partial_match=partial_match, **filters)
|
|
1961
1994
|
total_pages = (count + pagination_size - 1) // pagination_size
|
|
1962
1995
|
|
|
1963
1996
|
for page_number in range(1, total_pages + 1):
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from .constants import (
|
|
4
|
+
ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE,
|
|
5
|
+
ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE,
|
|
6
|
+
ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE,
|
|
7
|
+
HFID_STR_SEPARATOR,
|
|
8
|
+
IP_TYPES,
|
|
9
|
+
PROPERTIES_FLAG,
|
|
10
|
+
PROPERTIES_OBJECT,
|
|
11
|
+
SAFE_VALUE,
|
|
12
|
+
)
|
|
13
|
+
from .node import InfrahubNode, InfrahubNodeBase, InfrahubNodeSync
|
|
14
|
+
from .parsers import parse_human_friendly_id
|
|
15
|
+
from .property import NodeProperty
|
|
16
|
+
from .related_node import RelatedNode, RelatedNodeBase, RelatedNodeSync
|
|
17
|
+
from .relationship import RelationshipManager, RelationshipManagerBase, RelationshipManagerSync
|
|
18
|
+
|
|
19
|
+
__all__ = [
|
|
20
|
+
"ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE",
|
|
21
|
+
"ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE",
|
|
22
|
+
"ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE",
|
|
23
|
+
"HFID_STR_SEPARATOR",
|
|
24
|
+
"IP_TYPES",
|
|
25
|
+
"PROPERTIES_FLAG",
|
|
26
|
+
"PROPERTIES_OBJECT",
|
|
27
|
+
"SAFE_VALUE",
|
|
28
|
+
"InfrahubNode",
|
|
29
|
+
"InfrahubNodeBase",
|
|
30
|
+
"InfrahubNodeSync",
|
|
31
|
+
"NodeProperty",
|
|
32
|
+
"RelatedNode",
|
|
33
|
+
"RelatedNodeBase",
|
|
34
|
+
"RelatedNodeSync",
|
|
35
|
+
"RelationshipManager",
|
|
36
|
+
"RelationshipManagerBase",
|
|
37
|
+
"RelationshipManagerSync",
|
|
38
|
+
"parse_human_friendly_id",
|
|
39
|
+
]
|
|
@@ -0,0 +1,122 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ipaddress
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Callable, get_args
|
|
5
|
+
|
|
6
|
+
from ..protocols_base import CoreNodeBase
|
|
7
|
+
from ..uuidt import UUIDT
|
|
8
|
+
from .constants import IP_TYPES, PROPERTIES_FLAG, PROPERTIES_OBJECT, SAFE_VALUE
|
|
9
|
+
from .property import NodeProperty
|
|
10
|
+
|
|
11
|
+
if TYPE_CHECKING:
|
|
12
|
+
from ..schema import AttributeSchemaAPI
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class Attribute:
|
|
16
|
+
"""Represents an attribute of a Node, including its schema, value, and properties."""
|
|
17
|
+
|
|
18
|
+
def __init__(self, name: str, schema: AttributeSchemaAPI, data: Any | dict):
|
|
19
|
+
"""
|
|
20
|
+
Args:
|
|
21
|
+
name (str): The name of the attribute.
|
|
22
|
+
schema (AttributeSchema): The schema defining the attribute.
|
|
23
|
+
data (Union[Any, dict]): The data for the attribute, either in raw form or as a dictionary.
|
|
24
|
+
"""
|
|
25
|
+
self.name = name
|
|
26
|
+
self._schema = schema
|
|
27
|
+
|
|
28
|
+
if not isinstance(data, dict) or "value" not in data.keys():
|
|
29
|
+
data = {"value": data}
|
|
30
|
+
|
|
31
|
+
self._properties_flag = PROPERTIES_FLAG
|
|
32
|
+
self._properties_object = PROPERTIES_OBJECT
|
|
33
|
+
self._properties = self._properties_flag + self._properties_object
|
|
34
|
+
|
|
35
|
+
self._read_only = ["updated_at", "is_inherited"]
|
|
36
|
+
|
|
37
|
+
self.id: str | None = data.get("id", None)
|
|
38
|
+
|
|
39
|
+
self._value: Any | None = data.get("value", None)
|
|
40
|
+
self.value_has_been_mutated = False
|
|
41
|
+
self.is_default: bool | None = data.get("is_default", None)
|
|
42
|
+
self.is_from_profile: bool | None = data.get("is_from_profile", None)
|
|
43
|
+
|
|
44
|
+
if self._value:
|
|
45
|
+
value_mapper: dict[str, Callable] = {
|
|
46
|
+
"IPHost": ipaddress.ip_interface,
|
|
47
|
+
"IPNetwork": ipaddress.ip_network,
|
|
48
|
+
}
|
|
49
|
+
mapper = value_mapper.get(schema.kind, lambda value: value)
|
|
50
|
+
self._value = mapper(data.get("value"))
|
|
51
|
+
|
|
52
|
+
self.is_inherited: bool | None = data.get("is_inherited", None)
|
|
53
|
+
self.updated_at: str | None = data.get("updated_at", None)
|
|
54
|
+
|
|
55
|
+
self.is_visible: bool | None = data.get("is_visible", None)
|
|
56
|
+
self.is_protected: bool | None = data.get("is_protected", None)
|
|
57
|
+
|
|
58
|
+
self.source: NodeProperty | None = None
|
|
59
|
+
self.owner: NodeProperty | None = None
|
|
60
|
+
|
|
61
|
+
for prop_name in self._properties_object:
|
|
62
|
+
if data.get(prop_name):
|
|
63
|
+
setattr(self, prop_name, NodeProperty(data=data.get(prop_name))) # type: ignore[arg-type]
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def value(self) -> Any:
|
|
67
|
+
return self._value
|
|
68
|
+
|
|
69
|
+
@value.setter
|
|
70
|
+
def value(self, value: Any) -> None:
|
|
71
|
+
self._value = value
|
|
72
|
+
self.value_has_been_mutated = True
|
|
73
|
+
|
|
74
|
+
def _generate_input_data(self) -> dict | None:
|
|
75
|
+
data: dict[str, Any] = {}
|
|
76
|
+
variables: dict[str, Any] = {}
|
|
77
|
+
|
|
78
|
+
if self.value is None:
|
|
79
|
+
return data
|
|
80
|
+
|
|
81
|
+
if isinstance(self.value, str):
|
|
82
|
+
if SAFE_VALUE.match(self.value):
|
|
83
|
+
data["value"] = self.value
|
|
84
|
+
else:
|
|
85
|
+
var_name = f"value_{UUIDT.new().hex}"
|
|
86
|
+
variables[var_name] = self.value
|
|
87
|
+
data["value"] = f"${var_name}"
|
|
88
|
+
elif isinstance(self.value, get_args(IP_TYPES)):
|
|
89
|
+
data["value"] = self.value.with_prefixlen
|
|
90
|
+
elif isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool():
|
|
91
|
+
data["from_pool"] = {"id": self.value.id}
|
|
92
|
+
else:
|
|
93
|
+
data["value"] = self.value
|
|
94
|
+
|
|
95
|
+
for prop_name in self._properties_flag:
|
|
96
|
+
if getattr(self, prop_name) is not None:
|
|
97
|
+
data[prop_name] = getattr(self, prop_name)
|
|
98
|
+
|
|
99
|
+
for prop_name in self._properties_object:
|
|
100
|
+
if getattr(self, prop_name) is not None:
|
|
101
|
+
data[prop_name] = getattr(self, prop_name)._generate_input_data()
|
|
102
|
+
|
|
103
|
+
return {"data": data, "variables": variables}
|
|
104
|
+
|
|
105
|
+
def _generate_query_data(self, property: bool = False) -> dict | None:
|
|
106
|
+
data: dict[str, Any] = {"value": None}
|
|
107
|
+
|
|
108
|
+
if property:
|
|
109
|
+
data.update({"is_default": None, "is_from_profile": None})
|
|
110
|
+
|
|
111
|
+
for prop_name in self._properties_flag:
|
|
112
|
+
data[prop_name] = None
|
|
113
|
+
for prop_name in self._properties_object:
|
|
114
|
+
data[prop_name] = {"id": None, "display_label": None, "__typename": None}
|
|
115
|
+
|
|
116
|
+
return data
|
|
117
|
+
|
|
118
|
+
def _generate_mutation_query(self) -> dict[str, Any]:
|
|
119
|
+
if isinstance(self.value, CoreNodeBase) and self.value.is_resource_pool():
|
|
120
|
+
# If it points to a pool, ask for the value of the pool allocated resource
|
|
121
|
+
return {self.name: {"value": None}}
|
|
122
|
+
return {}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import ipaddress
|
|
2
|
+
import re
|
|
3
|
+
from typing import Union
|
|
4
|
+
|
|
5
|
+
PROPERTIES_FLAG = ["is_visible", "is_protected"]
|
|
6
|
+
PROPERTIES_OBJECT = ["source", "owner"]
|
|
7
|
+
SAFE_VALUE = re.compile(r"(^[\. /:a-zA-Z0-9_-]+$)|(^$)")
|
|
8
|
+
|
|
9
|
+
IP_TYPES = Union[ipaddress.IPv4Interface, ipaddress.IPv6Interface, ipaddress.IPv4Network, ipaddress.IPv6Network]
|
|
10
|
+
|
|
11
|
+
ARTIFACT_FETCH_FEATURE_NOT_SUPPORTED_MESSAGE = (
|
|
12
|
+
"calling artifact_fetch is only supported for nodes that are Artifact Definition target"
|
|
13
|
+
)
|
|
14
|
+
ARTIFACT_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = (
|
|
15
|
+
"calling artifact_generate is only supported for nodes that are Artifact Definition targets"
|
|
16
|
+
)
|
|
17
|
+
ARTIFACT_DEFINITION_GENERATE_FEATURE_NOT_SUPPORTED_MESSAGE = (
|
|
18
|
+
"calling generate is only supported for CoreArtifactDefinition nodes"
|
|
19
|
+
)
|
|
20
|
+
|
|
21
|
+
HFID_STR_SEPARATOR = "__"
|