osism 0.20250602.0__tar.gz → 0.20250605.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osism-0.20250605.0/ChangeLog +7 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/Containerfile +1 -1
- {osism-0.20250602.0 → osism-0.20250605.0}/Dockerfile +1 -1
- {osism-0.20250602.0/osism.egg-info → osism-0.20250605.0}/PKG-INFO +2 -2
- {osism-0.20250602.0 → osism-0.20250605.0}/Pipfile +1 -1
- {osism-0.20250602.0 → osism-0.20250605.0}/Pipfile.lock +16 -16
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/baremetal.py +36 -30
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/vault.py +9 -1
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/core/enums.py +1 -0
- osism-0.20250605.0/osism/tasks/conductor/__init__.py +54 -0
- osism-0.20250605.0/osism/tasks/conductor/config.py +92 -0
- osism-0.20250602.0/osism/tasks/conductor.py → osism-0.20250605.0/osism/tasks/conductor/ironic.py +66 -215
- osism-0.20250605.0/osism/tasks/conductor/netbox.py +50 -0
- osism-0.20250605.0/osism/tasks/conductor/utils.py +79 -0
- osism-0.20250605.0/osism/tasks/conductor.py +15 -0
- {osism-0.20250602.0 → osism-0.20250605.0/osism.egg-info}/PKG-INFO +2 -2
- {osism-0.20250602.0 → osism-0.20250605.0}/osism.egg-info/SOURCES.txt +5 -0
- osism-0.20250605.0/osism.egg-info/pbr.json +1 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism.egg-info/requires.txt +1 -1
- {osism-0.20250602.0 → osism-0.20250605.0}/requirements.txt +1 -1
- osism-0.20250602.0/ChangeLog +0 -7
- osism-0.20250602.0/osism.egg-info/pbr.json +0 -1
- {osism-0.20250602.0 → osism-0.20250605.0}/.flake8 +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/.github/renovate.json +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/.github/workflows/publish.yml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/.hadolint.yaml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/.zuul.yaml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/AUTHORS +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/LICENSE +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/README.md +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/change.sh +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/cleanup-ansible-collections.sh +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/clustershell/clush.conf +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/clustershell/groups.conf +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/data/SCS-Spec.MandatoryFlavors.verbose.yaml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/netbox-manager/settings.toml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/files/run-ansible-console.sh +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/__main__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/actions/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/api.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/apply.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/compose.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/compute.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/configuration.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/console.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/container.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/get.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/log.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/manage.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/netbox.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/noset.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/reconciler.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/server.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/service.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/set.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/status.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/sync.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/task.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/validate.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/volume.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/wait.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/commands/worker.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/core/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/core/playbooks.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/data/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/main.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/plugins/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/services/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/services/listener.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/settings.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/ansible.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/ceph.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/kolla.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/kubernetes.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/netbox.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/openstack.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/tasks/reconciler.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism/utils/__init__.py +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism.egg-info/dependency_links.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism.egg-info/entry_points.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism.egg-info/not-zip-safe +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/osism.egg-info/top_level.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/playbooks/build.yml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/playbooks/pre.yml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/playbooks/test-setup.yml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/requirements.ansible.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/requirements.netbox-manager.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/requirements.openstack-flavor-manager.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/requirements.openstack-image-manager.txt +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/requirements.yml +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/setup.cfg +0 -0
- {osism-0.20250602.0 → osism-0.20250605.0}/setup.py +0 -0
@@ -7,7 +7,7 @@ FROM ${IMAGE}:${PYTHON_VERSION}-alpine${ALPINE_VERSION}
|
|
7
7
|
ENV PYTHONWARNINGS="ignore::UserWarning"
|
8
8
|
|
9
9
|
COPY . /src
|
10
|
-
COPY --from=ghcr.io/astral-sh/uv:0.7.
|
10
|
+
COPY --from=ghcr.io/astral-sh/uv:0.7.11 /uv /usr/local/bin/uv
|
11
11
|
|
12
12
|
COPY files/data /data
|
13
13
|
COPY files/change.sh /change.sh
|
@@ -7,7 +7,7 @@ FROM ${IMAGE}:${PYTHON_VERSION}-alpine${ALPINE_VERSION}
|
|
7
7
|
ENV PYTHONWARNINGS="ignore::UserWarning"
|
8
8
|
|
9
9
|
COPY . /src
|
10
|
-
COPY --from=ghcr.io/astral-sh/uv:0.7.
|
10
|
+
COPY --from=ghcr.io/astral-sh/uv:0.7.11 /uv /usr/local/bin/uv
|
11
11
|
|
12
12
|
COPY files/data /data
|
13
13
|
COPY files/change.sh /change.sh
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: osism
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.20250605.0
|
4
4
|
Summary: OSISM manager interface
|
5
5
|
Home-page: https://github.com/osism/python-osism
|
6
6
|
Author: OSISM GmbH
|
@@ -41,7 +41,7 @@ Requires-Dist: kubernetes==32.0.1
|
|
41
41
|
Requires-Dist: loguru==0.7.3
|
42
42
|
Requires-Dist: nbcli==0.10.0.dev2
|
43
43
|
Requires-Dist: netmiko==4.5.0
|
44
|
-
Requires-Dist: openstacksdk==4.
|
44
|
+
Requires-Dist: openstacksdk==4.6.0
|
45
45
|
Requires-Dist: pottery==3.0.1
|
46
46
|
Requires-Dist: prompt-toolkit==3.0.51
|
47
47
|
Requires-Dist: pynetbox==7.5.0
|
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"_meta": {
|
3
3
|
"hash": {
|
4
|
-
"sha256": "
|
4
|
+
"sha256": "2c0211f42b6765854a69f4900797638c04652c2852298fb20cf1ec26ae0fdf5e"
|
5
5
|
},
|
6
6
|
"pipfile-spec": 6,
|
7
7
|
"requires": {},
|
@@ -1019,12 +1019,12 @@
|
|
1019
1019
|
},
|
1020
1020
|
"openstacksdk": {
|
1021
1021
|
"hashes": [
|
1022
|
-
"sha256:
|
1023
|
-
"sha256:
|
1022
|
+
"sha256:0ea54ce3005d48c5134f77dce8df7dd6b4c52d2a103472abc99db19cd4382638",
|
1023
|
+
"sha256:e47e166c4732e9aea65228e618d490e4be5df06526a1b95e2d5995d7d0977d3d"
|
1024
1024
|
],
|
1025
1025
|
"index": "pypi",
|
1026
|
-
"markers": "python_version >= '3.
|
1027
|
-
"version": "==4.
|
1026
|
+
"markers": "python_version >= '3.10'",
|
1027
|
+
"version": "==4.6.0"
|
1028
1028
|
},
|
1029
1029
|
"orderly-set": {
|
1030
1030
|
"hashes": [
|
@@ -1113,11 +1113,11 @@
|
|
1113
1113
|
},
|
1114
1114
|
"prometheus-client": {
|
1115
1115
|
"hashes": [
|
1116
|
-
"sha256:
|
1117
|
-
"sha256:
|
1116
|
+
"sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28",
|
1117
|
+
"sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094"
|
1118
1118
|
],
|
1119
1119
|
"markers": "python_version >= '3.9'",
|
1120
|
-
"version": "==0.22.
|
1120
|
+
"version": "==0.22.1"
|
1121
1121
|
},
|
1122
1122
|
"prompt-toolkit": {
|
1123
1123
|
"hashes": [
|
@@ -1335,11 +1335,11 @@
|
|
1335
1335
|
},
|
1336
1336
|
"pytest": {
|
1337
1337
|
"hashes": [
|
1338
|
-
"sha256:
|
1339
|
-
"sha256:
|
1338
|
+
"sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6",
|
1339
|
+
"sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e"
|
1340
1340
|
],
|
1341
|
-
"markers": "python_version >= '3.
|
1342
|
-
"version": "==8.
|
1341
|
+
"markers": "python_version >= '3.9'",
|
1342
|
+
"version": "==8.4.0"
|
1343
1343
|
},
|
1344
1344
|
"pytest-testinfra": {
|
1345
1345
|
"hashes": [
|
@@ -1697,11 +1697,11 @@
|
|
1697
1697
|
},
|
1698
1698
|
"typing-extensions": {
|
1699
1699
|
"hashes": [
|
1700
|
-
"sha256:
|
1701
|
-
"sha256:
|
1700
|
+
"sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4",
|
1701
|
+
"sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"
|
1702
1702
|
],
|
1703
|
-
"markers": "python_version >= '3.
|
1704
|
-
"version": "==4.
|
1703
|
+
"markers": "python_version >= '3.9'",
|
1704
|
+
"version": "==4.14.0"
|
1705
1705
|
},
|
1706
1706
|
"typing-inspection": {
|
1707
1707
|
"hashes": [
|
@@ -70,19 +70,18 @@ class BaremetalDeploy(Command):
|
|
70
70
|
def get_parser(self, prog_name):
|
71
71
|
parser = super(BaremetalDeploy, self).get_parser(prog_name)
|
72
72
|
|
73
|
-
|
74
|
-
|
73
|
+
parser.add_argument(
|
74
|
+
"name",
|
75
|
+
nargs="?",
|
76
|
+
type=str,
|
77
|
+
help="Deploy given baremetal node when in provision state available",
|
78
|
+
)
|
79
|
+
parser.add_argument(
|
75
80
|
"--all",
|
76
81
|
default=False,
|
77
82
|
help="Deploy all baremetal nodes in provision state available",
|
78
83
|
action="store_true",
|
79
84
|
)
|
80
|
-
parser_exc_group.add_argument(
|
81
|
-
"--name",
|
82
|
-
default=[],
|
83
|
-
help="Deploy given baremetal node when in provision state available. May be specified multiple times",
|
84
|
-
action="append",
|
85
|
-
)
|
86
85
|
parser.add_argument(
|
87
86
|
"--rebuild",
|
88
87
|
default=False,
|
@@ -99,10 +98,14 @@ class BaremetalDeploy(Command):
|
|
99
98
|
|
100
99
|
def take_action(self, parsed_args):
|
101
100
|
all_nodes = parsed_args.all
|
102
|
-
|
101
|
+
name = parsed_args.name
|
103
102
|
rebuild = parsed_args.rebuild
|
104
103
|
yes_i_really_really_mean_it = parsed_args.yes_i_really_really_mean_it
|
105
104
|
|
105
|
+
if not all_nodes and not name:
|
106
|
+
logger.error("Please specify a node name or use --all")
|
107
|
+
return
|
108
|
+
|
106
109
|
if all_nodes and rebuild and not yes_i_really_really_mean_it:
|
107
110
|
logger.error(
|
108
111
|
"Please confirm that you wish to rebuild all nodes by specifying '--yes-i-really-really-mean-it'"
|
@@ -114,14 +117,14 @@ class BaremetalDeploy(Command):
|
|
114
117
|
if all_nodes:
|
115
118
|
deploy_nodes = list(conn.baremetal.nodes(details=True))
|
116
119
|
else:
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
120
|
+
node = conn.baremetal.find_node(name, ignore_missing=True, details=True)
|
121
|
+
if not node:
|
122
|
+
logger.warning(f"Could not find node {name}")
|
123
|
+
return
|
124
|
+
deploy_nodes = [node]
|
121
125
|
|
122
|
-
for
|
126
|
+
for node in deploy_nodes:
|
123
127
|
if not node:
|
124
|
-
logger.warning(f"Could not find node {names[node_idx]}")
|
125
128
|
continue
|
126
129
|
|
127
130
|
if node.provision_state in ["available", "deploy failed"]:
|
@@ -176,19 +179,18 @@ class BaremetalUndeploy(Command):
|
|
176
179
|
def get_parser(self, prog_name):
|
177
180
|
parser = super(BaremetalUndeploy, self).get_parser(prog_name)
|
178
181
|
|
179
|
-
|
180
|
-
|
182
|
+
parser.add_argument(
|
183
|
+
"name",
|
184
|
+
nargs="?",
|
185
|
+
type=str,
|
186
|
+
help="Undeploy given baremetal node",
|
187
|
+
)
|
188
|
+
parser.add_argument(
|
181
189
|
"--all",
|
182
190
|
default=False,
|
183
191
|
help="Undeploy all baremetal nodes",
|
184
192
|
action="store_true",
|
185
193
|
)
|
186
|
-
parser_exc_group.add_argument(
|
187
|
-
"--name",
|
188
|
-
default=[],
|
189
|
-
help="Undeploy given baremetal node. May be specified multiple times",
|
190
|
-
action="append",
|
191
|
-
)
|
192
194
|
parser.add_argument(
|
193
195
|
"--yes-i-really-really-mean-it",
|
194
196
|
default=False,
|
@@ -199,9 +201,13 @@ class BaremetalUndeploy(Command):
|
|
199
201
|
|
200
202
|
def take_action(self, parsed_args):
|
201
203
|
all_nodes = parsed_args.all
|
202
|
-
|
204
|
+
name = parsed_args.name
|
203
205
|
yes_i_really_really_mean_it = parsed_args.yes_i_really_really_mean_it
|
204
206
|
|
207
|
+
if not all_nodes and not name:
|
208
|
+
logger.error("Please specify a node name or use --all")
|
209
|
+
return
|
210
|
+
|
205
211
|
if all_nodes and not yes_i_really_really_mean_it:
|
206
212
|
logger.error(
|
207
213
|
"Please confirm that you wish to undeploy all nodes by specifying '--yes-i-really-really-mean-it'"
|
@@ -213,14 +219,14 @@ class BaremetalUndeploy(Command):
|
|
213
219
|
if all_nodes:
|
214
220
|
deploy_nodes = list(conn.baremetal.nodes())
|
215
221
|
else:
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
222
|
+
node = conn.baremetal.find_node(name, ignore_missing=True, details=False)
|
223
|
+
if not node:
|
224
|
+
logger.warning(f"Could not find node {name}")
|
225
|
+
return
|
226
|
+
deploy_nodes = [node]
|
220
227
|
|
221
|
-
for
|
228
|
+
for node in deploy_nodes:
|
222
229
|
if not node:
|
223
|
-
logger.warning(f"Could not find node {names[node_idx]}")
|
224
230
|
continue
|
225
231
|
|
226
232
|
if node.provision_state in ["active", "deploy failed", "error"]:
|
@@ -5,6 +5,7 @@
|
|
5
5
|
|
6
6
|
import os
|
7
7
|
import subprocess
|
8
|
+
import sys
|
8
9
|
|
9
10
|
from cliff.command import Command
|
10
11
|
from cryptography.fernet import Fernet
|
@@ -31,7 +32,14 @@ class SetPassword(Command):
|
|
31
32
|
|
32
33
|
f = Fernet(key)
|
33
34
|
|
34
|
-
|
35
|
+
# Check if password is being piped from STDIN
|
36
|
+
if not sys.stdin.isatty():
|
37
|
+
ansible_vault_password = sys.stdin.read().strip()
|
38
|
+
else:
|
39
|
+
ansible_vault_password = prompt(
|
40
|
+
"Ansible Vault password: ", is_password=True
|
41
|
+
)
|
42
|
+
|
35
43
|
redis.set(
|
36
44
|
"ansible_vault_password", f.encrypt(ansible_vault_password.encode("utf-8"))
|
37
45
|
)
|
@@ -102,6 +102,7 @@ VALIDATE_PLAYBOOKS = {
|
|
102
102
|
"ceph-osds": {"environment": "ceph", "runtime": "osism-ansible"},
|
103
103
|
"container-status": {"environment": "generic", "runtime": "osism-ansible"},
|
104
104
|
"kernel-version": {"environment": "generic", "runtime": "osism-ansible"},
|
105
|
+
"docker-version": {"environment": "generic", "runtime": "osism-ansible"},
|
105
106
|
"kolla-connectivity": {"environment": "kolla", "runtime": "osism-ansible"},
|
106
107
|
"mysql-open-files-limit": {"environment": "generic", "runtime": "osism-ansible"},
|
107
108
|
"ntp": {"environment": "generic", "runtime": "osism-ansible"},
|
@@ -0,0 +1,54 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
|
3
|
+
import copy
|
4
|
+
from celery import Celery
|
5
|
+
from celery.signals import worker_process_init
|
6
|
+
from loguru import logger
|
7
|
+
|
8
|
+
from osism.tasks import Config
|
9
|
+
from osism.tasks.conductor.config import get_configuration
|
10
|
+
from osism.tasks.conductor.ironic import sync_ironic as _sync_ironic
|
11
|
+
|
12
|
+
|
13
|
+
# App configuration
|
14
|
+
app = Celery("conductor")
|
15
|
+
app.config_from_object(Config)
|
16
|
+
|
17
|
+
|
18
|
+
@worker_process_init.connect
|
19
|
+
def celery_init_worker(**kwargs):
|
20
|
+
pass
|
21
|
+
|
22
|
+
|
23
|
+
@app.on_after_configure.connect
|
24
|
+
def setup_periodic_tasks(sender, **kwargs):
|
25
|
+
pass
|
26
|
+
|
27
|
+
|
28
|
+
# Tasks
|
29
|
+
@app.task(bind=True, name="osism.tasks.conductor.get_ironic_parameters")
|
30
|
+
def get_ironic_parameters(self):
|
31
|
+
configuration = get_configuration()
|
32
|
+
if "ironic_parameters" in configuration:
|
33
|
+
# NOTE: Do not pass by reference, everybody gets their own copy to work with
|
34
|
+
return copy.deepcopy(configuration["ironic_parameters"])
|
35
|
+
|
36
|
+
return {}
|
37
|
+
|
38
|
+
|
39
|
+
@app.task(bind=True, name="osism.tasks.conductor.sync_netbox")
|
40
|
+
def sync_netbox(self, force_update=False):
|
41
|
+
logger.info("Not implemented")
|
42
|
+
|
43
|
+
|
44
|
+
@app.task(bind=True, name="osism.tasks.conductor.sync_ironic")
|
45
|
+
def sync_ironic(self, force_update=False):
|
46
|
+
_sync_ironic(get_ironic_parameters, force_update)
|
47
|
+
|
48
|
+
|
49
|
+
__all__ = [
|
50
|
+
"app",
|
51
|
+
"get_ironic_parameters",
|
52
|
+
"sync_netbox",
|
53
|
+
"sync_ironic",
|
54
|
+
]
|
@@ -0,0 +1,92 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
|
3
|
+
import uuid
|
4
|
+
|
5
|
+
from loguru import logger
|
6
|
+
import yaml
|
7
|
+
|
8
|
+
from osism.tasks import Config, openstack
|
9
|
+
|
10
|
+
|
11
|
+
def is_uuid(value):
|
12
|
+
"""Check if a string is a valid UUID."""
|
13
|
+
try:
|
14
|
+
uuid.UUID(value)
|
15
|
+
return True
|
16
|
+
except (ValueError, AttributeError):
|
17
|
+
return False
|
18
|
+
|
19
|
+
|
20
|
+
def get_configuration():
|
21
|
+
with open("/etc/conductor.yml") as fp:
|
22
|
+
configuration = yaml.load(fp, Loader=yaml.SafeLoader)
|
23
|
+
|
24
|
+
if not configuration:
|
25
|
+
logger.warning(
|
26
|
+
"The conductor configuration is empty. That's probably wrong"
|
27
|
+
)
|
28
|
+
return {}
|
29
|
+
|
30
|
+
if Config.enable_ironic.lower() not in ["true", "yes"]:
|
31
|
+
return configuration
|
32
|
+
|
33
|
+
if "ironic_parameters" not in configuration:
|
34
|
+
logger.error("ironic_parameters not found in the conductor configuration")
|
35
|
+
return configuration
|
36
|
+
|
37
|
+
if "instance_info" in configuration["ironic_parameters"]:
|
38
|
+
if "image_source" in configuration["ironic_parameters"]["instance_info"]:
|
39
|
+
image_source = configuration["ironic_parameters"]["instance_info"][
|
40
|
+
"image_source"
|
41
|
+
]
|
42
|
+
if not is_uuid(image_source):
|
43
|
+
result = openstack.image_get(image_source)
|
44
|
+
configuration["ironic_parameters"]["instance_info"][
|
45
|
+
"image_source"
|
46
|
+
] = result.id
|
47
|
+
|
48
|
+
if "driver_info" in configuration["ironic_parameters"]:
|
49
|
+
if "deploy_kernel" in configuration["ironic_parameters"]["driver_info"]:
|
50
|
+
deploy_kernel = configuration["ironic_parameters"]["driver_info"][
|
51
|
+
"deploy_kernel"
|
52
|
+
]
|
53
|
+
if not is_uuid(deploy_kernel):
|
54
|
+
result = openstack.image_get(deploy_kernel)
|
55
|
+
configuration["ironic_parameters"]["driver_info"][
|
56
|
+
"deploy_kernel"
|
57
|
+
] = result.id
|
58
|
+
|
59
|
+
if "deploy_ramdisk" in configuration["ironic_parameters"]["driver_info"]:
|
60
|
+
deploy_ramdisk = configuration["ironic_parameters"]["driver_info"][
|
61
|
+
"deploy_ramdisk"
|
62
|
+
]
|
63
|
+
if not is_uuid(deploy_ramdisk):
|
64
|
+
result = openstack.image_get(deploy_ramdisk)
|
65
|
+
configuration["ironic_parameters"]["driver_info"][
|
66
|
+
"deploy_ramdisk"
|
67
|
+
] = result.id
|
68
|
+
|
69
|
+
if "cleaning_network" in configuration["ironic_parameters"]["driver_info"]:
|
70
|
+
result = openstack.network_get(
|
71
|
+
configuration["ironic_parameters"]["driver_info"][
|
72
|
+
"cleaning_network"
|
73
|
+
]
|
74
|
+
)
|
75
|
+
configuration["ironic_parameters"]["driver_info"][
|
76
|
+
"cleaning_network"
|
77
|
+
] = result.id
|
78
|
+
|
79
|
+
if (
|
80
|
+
"provisioning_network"
|
81
|
+
in configuration["ironic_parameters"]["driver_info"]
|
82
|
+
):
|
83
|
+
result = openstack.network_get(
|
84
|
+
configuration["ironic_parameters"]["driver_info"][
|
85
|
+
"provisioning_network"
|
86
|
+
]
|
87
|
+
)
|
88
|
+
configuration["ironic_parameters"]["driver_info"][
|
89
|
+
"provisioning_network"
|
90
|
+
] = result.id
|
91
|
+
|
92
|
+
return configuration
|
osism-0.20250602.0/osism/tasks/conductor.py → osism-0.20250605.0/osism/tasks/conductor/ironic.py
RENAMED
@@ -1,210 +1,39 @@
|
|
1
1
|
# SPDX-License-Identifier: Apache-2.0
|
2
2
|
|
3
|
-
from ansible import constants as ansible_constants
|
4
|
-
from ansible.parsing.vault import VaultLib, VaultSecret
|
5
|
-
from celery import Celery
|
6
|
-
from celery.signals import worker_process_init
|
7
|
-
import copy
|
8
3
|
import ipaddress
|
4
|
+
import json
|
5
|
+
|
9
6
|
import jinja2
|
10
7
|
from loguru import logger
|
11
8
|
from pottery import Redlock
|
12
|
-
import yaml
|
13
|
-
import json
|
14
|
-
|
15
|
-
from osism import settings
|
16
|
-
from osism import utils
|
17
|
-
from osism.tasks import Config, netbox, openstack
|
18
|
-
|
19
|
-
app = Celery("conductor")
|
20
|
-
app.config_from_object(Config)
|
21
|
-
|
22
|
-
|
23
|
-
configuration = {}
|
24
|
-
|
25
|
-
|
26
|
-
def get_nb_device_query_list():
|
27
|
-
try:
|
28
|
-
supported_nb_device_filters = [
|
29
|
-
"site",
|
30
|
-
"region",
|
31
|
-
"site_group",
|
32
|
-
"location",
|
33
|
-
"rack",
|
34
|
-
"tag",
|
35
|
-
"state",
|
36
|
-
]
|
37
|
-
nb_device_query_list = yaml.safe_load(settings.NETBOX_FILTER_CONDUCTOR)
|
38
|
-
if type(nb_device_query_list) is not list:
|
39
|
-
raise TypeError
|
40
|
-
for nb_device_query in nb_device_query_list:
|
41
|
-
if type(nb_device_query) is not dict:
|
42
|
-
raise TypeError
|
43
|
-
for key in list(nb_device_query.keys()):
|
44
|
-
if key not in supported_nb_device_filters:
|
45
|
-
raise ValueError
|
46
|
-
# NOTE: Only "location_id" and "rack_id" are supported by netbox
|
47
|
-
if key in ["location", "rack"]:
|
48
|
-
value_name = nb_device_query.pop(key, "")
|
49
|
-
if key == "location":
|
50
|
-
value_id = netbox.get_location_id(value_name)
|
51
|
-
elif key == "rack":
|
52
|
-
value_id = netbox.get_rack_id(value_name)
|
53
|
-
if value_id:
|
54
|
-
nb_device_query.update({key + "_id": value_id})
|
55
|
-
else:
|
56
|
-
raise ValueError(f"Invalid name {value_name} for {key}")
|
57
|
-
except (yaml.YAMLError, TypeError):
|
58
|
-
logger.error(
|
59
|
-
f"Setting NETBOX_FILTER_CONDUCTOR needs to be an array of mappings containing supported netbox device filters: {supported_nb_device_filters}"
|
60
|
-
)
|
61
|
-
nb_device_query_list = []
|
62
|
-
except ValueError as exc:
|
63
|
-
logger.error(f"Unknown value in NETBOX_FILTER_CONDUCTOR: {exc}")
|
64
|
-
nb_device_query_list = []
|
65
|
-
|
66
|
-
return nb_device_query_list
|
67
|
-
|
68
|
-
|
69
|
-
def get_configuration():
|
70
|
-
with open("/etc/conductor.yml") as fp:
|
71
|
-
configuration = yaml.load(fp, Loader=yaml.SafeLoader)
|
72
|
-
|
73
|
-
if not configuration:
|
74
|
-
logger.warning(
|
75
|
-
"The conductor configuration is empty. That's probably wrong"
|
76
|
-
)
|
77
|
-
return {}
|
78
|
-
|
79
|
-
if Config.enable_ironic.lower() not in ["true", "yes"]:
|
80
|
-
return configuration
|
81
|
-
|
82
|
-
if "ironic_parameters" not in configuration:
|
83
|
-
logger.error("ironic_parameters not found in the conductor configuration")
|
84
|
-
return configuration
|
85
|
-
|
86
|
-
if "driver_info" in configuration["ironic_parameters"]:
|
87
|
-
if "deploy_kernel" in configuration["ironic_parameters"]["driver_info"]:
|
88
|
-
result = openstack.image_get(
|
89
|
-
configuration["ironic_parameters"]["driver_info"]["deploy_kernel"]
|
90
|
-
)
|
91
|
-
configuration["ironic_parameters"]["driver_info"][
|
92
|
-
"deploy_kernel"
|
93
|
-
] = result.id
|
94
|
-
|
95
|
-
if "deploy_ramdisk" in configuration["ironic_parameters"]["driver_info"]:
|
96
|
-
result = openstack.image_get(
|
97
|
-
configuration["ironic_parameters"]["driver_info"]["deploy_ramdisk"]
|
98
|
-
)
|
99
|
-
configuration["ironic_parameters"]["driver_info"][
|
100
|
-
"deploy_ramdisk"
|
101
|
-
] = result.id
|
102
|
-
|
103
|
-
if "cleaning_network" in configuration["ironic_parameters"]["driver_info"]:
|
104
|
-
result = openstack.network_get(
|
105
|
-
configuration["ironic_parameters"]["driver_info"][
|
106
|
-
"cleaning_network"
|
107
|
-
]
|
108
|
-
)
|
109
|
-
configuration["ironic_parameters"]["driver_info"][
|
110
|
-
"cleaning_network"
|
111
|
-
] = result.id
|
112
|
-
|
113
|
-
if (
|
114
|
-
"provisioning_network"
|
115
|
-
in configuration["ironic_parameters"]["driver_info"]
|
116
|
-
):
|
117
|
-
result = openstack.network_get(
|
118
|
-
configuration["ironic_parameters"]["driver_info"][
|
119
|
-
"provisioning_network"
|
120
|
-
]
|
121
|
-
)
|
122
|
-
configuration["ironic_parameters"]["driver_info"][
|
123
|
-
"provisioning_network"
|
124
|
-
] = result.id
|
125
|
-
|
126
|
-
return configuration
|
127
|
-
|
128
|
-
|
129
|
-
@worker_process_init.connect
|
130
|
-
def celery_init_worker(**kwargs):
|
131
|
-
global configuration
|
132
|
-
configuration = get_configuration()
|
133
|
-
|
134
|
-
|
135
|
-
@app.on_after_configure.connect
|
136
|
-
def setup_periodic_tasks(sender, **kwargs):
|
137
|
-
pass
|
138
|
-
|
139
|
-
|
140
|
-
@app.task(bind=True, name="osism.tasks.conductor.get_ironic_parameters")
|
141
|
-
def get_ironic_parameters(self):
|
142
|
-
if "ironic_parameters" in configuration:
|
143
|
-
# NOTE: Do not pass by reference, everybody gets their own copy to work with
|
144
|
-
return copy.deepcopy(configuration["ironic_parameters"])
|
145
|
-
|
146
|
-
return {}
|
147
|
-
|
148
|
-
|
149
|
-
@app.task(bind=True, name="osism.tasks.conductor.sync_netbox")
|
150
|
-
def sync_netbox(self, force_update=False):
|
151
|
-
logger.info("Not implemented")
|
152
|
-
|
153
|
-
|
154
|
-
@app.task(bind=True, name="osism.tasks.conductor.sync_ironic")
|
155
|
-
def sync_ironic(self, force_update=False):
|
156
|
-
def deep_compare(a, b, updates):
|
157
|
-
"""
|
158
|
-
Find items in a that do not exist in b or are different.
|
159
|
-
Write required changes into updates
|
160
|
-
"""
|
161
|
-
for key, value in a.items():
|
162
|
-
if type(value) is not dict:
|
163
|
-
if key not in b or b[key] != value:
|
164
|
-
updates[key] = value
|
165
|
-
else:
|
166
|
-
updates[key] = {}
|
167
|
-
deep_compare(a[key], b[key], updates[key])
|
168
|
-
if not updates[key]:
|
169
|
-
updates.pop(key)
|
170
|
-
|
171
|
-
def deep_merge(a, b):
|
172
|
-
for key, value in b.items():
|
173
|
-
if value == "DELETE":
|
174
|
-
# NOTE: Use special string to remove keys
|
175
|
-
a.pop(key, None)
|
176
|
-
elif (
|
177
|
-
key not in a.keys()
|
178
|
-
or not isinstance(a[key], dict)
|
179
|
-
or not isinstance(value, dict)
|
180
|
-
):
|
181
|
-
a[key] = value
|
182
|
-
else:
|
183
|
-
deep_merge(a[key], value)
|
184
|
-
|
185
|
-
def deep_decrypt(a, vault):
|
186
|
-
for key, value in list(a.items()):
|
187
|
-
if not isinstance(value, dict):
|
188
|
-
if vault.is_encrypted(value):
|
189
|
-
try:
|
190
|
-
a[key] = vault.decrypt(value).decode()
|
191
|
-
except Exception:
|
192
|
-
a.pop(key, None)
|
193
|
-
else:
|
194
|
-
deep_decrypt(a[key], vault)
|
195
|
-
|
196
|
-
driver_params = {
|
197
|
-
"ipmi": {
|
198
|
-
"address": "ipmi_address",
|
199
|
-
"port": "ipmi_port",
|
200
|
-
"password": "ipmi_password",
|
201
|
-
},
|
202
|
-
"redfish": {
|
203
|
-
"address": "redfish_address",
|
204
|
-
"password": "redfish_password",
|
205
|
-
},
|
206
|
-
}
|
207
9
|
|
10
|
+
from osism import utils as osism_utils
|
11
|
+
from osism.tasks import netbox, openstack
|
12
|
+
from osism.tasks.conductor.netbox import get_nb_device_query_list
|
13
|
+
from osism.tasks.conductor.utils import (
|
14
|
+
deep_compare,
|
15
|
+
deep_decrypt,
|
16
|
+
deep_merge,
|
17
|
+
get_vault,
|
18
|
+
)
|
19
|
+
|
20
|
+
|
21
|
+
driver_params = {
|
22
|
+
"ipmi": {
|
23
|
+
"address": "ipmi_address",
|
24
|
+
"port": "ipmi_port",
|
25
|
+
"password": "ipmi_password",
|
26
|
+
"username": "ipmi_username",
|
27
|
+
},
|
28
|
+
"redfish": {
|
29
|
+
"address": "redfish_address",
|
30
|
+
"password": "redfish_password",
|
31
|
+
"username": "redfish_username",
|
32
|
+
},
|
33
|
+
}
|
34
|
+
|
35
|
+
|
36
|
+
def sync_ironic(get_ironic_parameters, force_update=False):
|
208
37
|
devices = set()
|
209
38
|
nb_device_query_list = get_nb_device_query_list()
|
210
39
|
for nb_device_query in nb_device_query_list:
|
@@ -248,27 +77,22 @@ def sync_ironic(self, force_update=False):
|
|
248
77
|
):
|
249
78
|
# NOTE: Update node attributes with overrides from netbox device
|
250
79
|
deep_merge(node_attributes, device.custom_fields["ironic_parameters"])
|
80
|
+
|
251
81
|
# NOTE: Decrypt ansible vaulted secrets
|
252
|
-
|
253
|
-
vault_secret = utils.get_ansible_vault_password()
|
254
|
-
vault = VaultLib(
|
255
|
-
[
|
256
|
-
(
|
257
|
-
ansible_constants.DEFAULT_VAULT_ID_MATCH,
|
258
|
-
VaultSecret(vault_secret.encode()),
|
259
|
-
)
|
260
|
-
]
|
261
|
-
)
|
262
|
-
except Exception:
|
263
|
-
logger.error("Unable to get vault secret. Dropping encrypted entries")
|
264
|
-
vault = VaultLib()
|
82
|
+
vault = get_vault()
|
265
83
|
deep_decrypt(node_attributes, vault)
|
84
|
+
|
85
|
+
node_secrets = device.custom_fields.get("secrets", {})
|
86
|
+
if node_secrets is None:
|
87
|
+
node_secrets = {}
|
88
|
+
deep_decrypt(node_secrets, vault)
|
89
|
+
|
266
90
|
if (
|
267
91
|
"driver" in node_attributes
|
268
92
|
and node_attributes["driver"] in driver_params.keys()
|
269
93
|
):
|
270
94
|
if "driver_info" in node_attributes:
|
271
|
-
# NOTE:
|
95
|
+
# NOTE: Remove all fields belonging to a different driver
|
272
96
|
unused_drivers = [
|
273
97
|
driver
|
274
98
|
for driver in driver_params.keys()
|
@@ -278,6 +102,33 @@ def sync_ironic(self, force_update=False):
|
|
278
102
|
for driver in unused_drivers:
|
279
103
|
if key.startswith(driver + "_"):
|
280
104
|
node_attributes["driver_info"].pop(key, None)
|
105
|
+
|
106
|
+
# NOTE: Render driver username field
|
107
|
+
username_key = driver_params[node_attributes["driver"]]["username"]
|
108
|
+
if username_key in node_attributes["driver_info"]:
|
109
|
+
node_attributes["driver_info"][username_key] = (
|
110
|
+
jinja2.Environment(loader=jinja2.BaseLoader())
|
111
|
+
.from_string(node_attributes["driver_info"][username_key])
|
112
|
+
.render(
|
113
|
+
remote_board_username=str(
|
114
|
+
node_secrets.get("remote_board_username", "admin")
|
115
|
+
)
|
116
|
+
)
|
117
|
+
)
|
118
|
+
|
119
|
+
# NOTE: Render driver password field
|
120
|
+
password_key = driver_params[node_attributes["driver"]]["password"]
|
121
|
+
if password_key in node_attributes["driver_info"]:
|
122
|
+
node_attributes["driver_info"][password_key] = (
|
123
|
+
jinja2.Environment(loader=jinja2.BaseLoader())
|
124
|
+
.from_string(node_attributes["driver_info"][password_key])
|
125
|
+
.render(
|
126
|
+
remote_board_password=str(
|
127
|
+
node_secrets.get("remote_board_password", "password")
|
128
|
+
)
|
129
|
+
)
|
130
|
+
)
|
131
|
+
|
281
132
|
# NOTE: Render driver address field
|
282
133
|
address_key = driver_params[node_attributes["driver"]]["address"]
|
283
134
|
if address_key in node_attributes["driver_info"]:
|
@@ -335,7 +186,7 @@ def sync_ironic(self, force_update=False):
|
|
335
186
|
|
336
187
|
lock = Redlock(
|
337
188
|
key=f"lock_osism_tasks_conductor_sync_ironic-{device.name}",
|
338
|
-
masters={
|
189
|
+
masters={osism_utils.redis},
|
339
190
|
auto_release_time=600,
|
340
191
|
)
|
341
192
|
if lock.acquire(timeout=120):
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
|
3
|
+
from loguru import logger
|
4
|
+
import yaml
|
5
|
+
|
6
|
+
from osism import settings
|
7
|
+
from osism.tasks import netbox
|
8
|
+
|
9
|
+
|
10
|
+
def get_nb_device_query_list():
|
11
|
+
try:
|
12
|
+
supported_nb_device_filters = [
|
13
|
+
"site",
|
14
|
+
"region",
|
15
|
+
"site_group",
|
16
|
+
"location",
|
17
|
+
"rack",
|
18
|
+
"tag",
|
19
|
+
"state",
|
20
|
+
]
|
21
|
+
nb_device_query_list = yaml.safe_load(settings.NETBOX_FILTER_CONDUCTOR)
|
22
|
+
if type(nb_device_query_list) is not list:
|
23
|
+
raise TypeError
|
24
|
+
for nb_device_query in nb_device_query_list:
|
25
|
+
if type(nb_device_query) is not dict:
|
26
|
+
raise TypeError
|
27
|
+
for key in list(nb_device_query.keys()):
|
28
|
+
if key not in supported_nb_device_filters:
|
29
|
+
raise ValueError
|
30
|
+
# NOTE: Only "location_id" and "rack_id" are supported by netbox
|
31
|
+
if key in ["location", "rack"]:
|
32
|
+
value_name = nb_device_query.pop(key, "")
|
33
|
+
if key == "location":
|
34
|
+
value_id = netbox.get_location_id(value_name)
|
35
|
+
elif key == "rack":
|
36
|
+
value_id = netbox.get_rack_id(value_name)
|
37
|
+
if value_id:
|
38
|
+
nb_device_query.update({key + "_id": value_id})
|
39
|
+
else:
|
40
|
+
raise ValueError(f"Invalid name {value_name} for {key}")
|
41
|
+
except (yaml.YAMLError, TypeError):
|
42
|
+
logger.error(
|
43
|
+
f"Setting NETBOX_FILTER_CONDUCTOR needs to be an array of mappings containing supported netbox device filters: {supported_nb_device_filters}"
|
44
|
+
)
|
45
|
+
nb_device_query_list = []
|
46
|
+
except ValueError as exc:
|
47
|
+
logger.error(f"Unknown value in NETBOX_FILTER_CONDUCTOR: {exc}")
|
48
|
+
nb_device_query_list = []
|
49
|
+
|
50
|
+
return nb_device_query_list
|
@@ -0,0 +1,79 @@
|
|
1
|
+
# SPDX-License-Identifier: Apache-2.0
|
2
|
+
|
3
|
+
from ansible import constants as ansible_constants
|
4
|
+
from ansible.parsing.vault import VaultLib, VaultSecret
|
5
|
+
from loguru import logger
|
6
|
+
|
7
|
+
from osism import utils
|
8
|
+
|
9
|
+
|
10
|
+
def deep_compare(a, b, updates):
|
11
|
+
"""
|
12
|
+
Find items in a that do not exist in b or are different.
|
13
|
+
Write required changes into updates
|
14
|
+
"""
|
15
|
+
for key, value in a.items():
|
16
|
+
if type(value) is not dict:
|
17
|
+
if key not in b or b[key] != value:
|
18
|
+
updates[key] = value
|
19
|
+
else:
|
20
|
+
updates[key] = {}
|
21
|
+
deep_compare(a[key], b[key], updates[key])
|
22
|
+
if not updates[key]:
|
23
|
+
updates.pop(key)
|
24
|
+
|
25
|
+
|
26
|
+
def deep_merge(a, b):
|
27
|
+
for key, value in b.items():
|
28
|
+
if value == "DELETE":
|
29
|
+
# NOTE: Use special string to remove keys
|
30
|
+
a.pop(key, None)
|
31
|
+
elif (
|
32
|
+
key not in a.keys()
|
33
|
+
or not isinstance(a[key], dict)
|
34
|
+
or not isinstance(value, dict)
|
35
|
+
):
|
36
|
+
a[key] = value
|
37
|
+
else:
|
38
|
+
deep_merge(a[key], value)
|
39
|
+
|
40
|
+
|
41
|
+
def deep_decrypt(a, vault):
|
42
|
+
if a is None:
|
43
|
+
return
|
44
|
+
if isinstance(a, dict):
|
45
|
+
for key, value in list(a.items()):
|
46
|
+
if isinstance(value, (dict, list)):
|
47
|
+
deep_decrypt(a[key], vault)
|
48
|
+
elif vault.is_encrypted(value):
|
49
|
+
try:
|
50
|
+
a[key] = vault.decrypt(value).decode()
|
51
|
+
except Exception:
|
52
|
+
a.pop(key, None)
|
53
|
+
elif isinstance(a, list):
|
54
|
+
for i, item in enumerate(a):
|
55
|
+
if isinstance(item, (dict, list)):
|
56
|
+
deep_decrypt(item, vault)
|
57
|
+
elif vault.is_encrypted(item):
|
58
|
+
try:
|
59
|
+
a[i] = vault.decrypt(item).decode()
|
60
|
+
except Exception:
|
61
|
+
pass
|
62
|
+
|
63
|
+
|
64
|
+
def get_vault():
|
65
|
+
"""Create and return a VaultLib instance for decrypting secrets"""
|
66
|
+
try:
|
67
|
+
vault_secret = utils.get_ansible_vault_password()
|
68
|
+
vault = VaultLib(
|
69
|
+
[
|
70
|
+
(
|
71
|
+
ansible_constants.DEFAULT_VAULT_ID_MATCH,
|
72
|
+
VaultSecret(vault_secret.encode()),
|
73
|
+
)
|
74
|
+
]
|
75
|
+
)
|
76
|
+
except Exception:
|
77
|
+
logger.error("Unable to get vault secret. Dropping encrypted entries")
|
78
|
+
vault = VaultLib()
|
79
|
+
return vault
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: osism
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.20250605.0
|
4
4
|
Summary: OSISM manager interface
|
5
5
|
Home-page: https://github.com/osism/python-osism
|
6
6
|
Author: OSISM GmbH
|
@@ -41,7 +41,7 @@ Requires-Dist: kubernetes==32.0.1
|
|
41
41
|
Requires-Dist: loguru==0.7.3
|
42
42
|
Requires-Dist: nbcli==0.10.0.dev2
|
43
43
|
Requires-Dist: netmiko==4.5.0
|
44
|
-
Requires-Dist: openstacksdk==4.
|
44
|
+
Requires-Dist: openstacksdk==4.6.0
|
45
45
|
Requires-Dist: pottery==3.0.1
|
46
46
|
Requires-Dist: prompt-toolkit==3.0.51
|
47
47
|
Requires-Dist: pynetbox==7.5.0
|
@@ -81,6 +81,11 @@ osism/tasks/kubernetes.py
|
|
81
81
|
osism/tasks/netbox.py
|
82
82
|
osism/tasks/openstack.py
|
83
83
|
osism/tasks/reconciler.py
|
84
|
+
osism/tasks/conductor/__init__.py
|
85
|
+
osism/tasks/conductor/config.py
|
86
|
+
osism/tasks/conductor/ironic.py
|
87
|
+
osism/tasks/conductor/netbox.py
|
88
|
+
osism/tasks/conductor/utils.py
|
84
89
|
osism/utils/__init__.py
|
85
90
|
playbooks/build.yml
|
86
91
|
playbooks/pre.yml
|
@@ -0,0 +1 @@
|
|
1
|
+
{"git_version": "f816350", "is_release": false}
|
osism-0.20250602.0/ChangeLog
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"git_version": "e6f441e", "is_release": false}
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|