osism 0.20250331.0__py3-none-any.whl → 0.20250407.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
osism/tasks/netbox.py CHANGED
@@ -2,12 +2,12 @@
2
2
 
3
3
  from celery import Celery
4
4
  from celery.signals import worker_process_init
5
- import json
5
+ from loguru import logger
6
+ from pottery import Redlock
6
7
  import pynetbox
7
8
 
8
9
  from osism import settings, utils
9
- from osism.actions import manage_device, manage_interface
10
- from osism.tasks import Config, openstack, run_command
10
+ from osism.tasks import Config, run_command
11
11
 
12
12
  app = Celery("netbox")
13
13
  app.config_from_object(Config)
@@ -32,95 +32,99 @@ def setup_periodic_tasks(sender, **kwargs):
32
32
  pass
33
33
 
34
34
 
35
- @app.task(bind=True, name="osism.tasks.netbox.periodic_synchronize_ironic")
36
- def periodic_synchronize_ironic(self):
37
- """Synchronize the state of Ironic with Netbox"""
38
- openstack.baremetal_node_list.apply_async((), link=synchronize_device_state.s())
39
-
40
-
41
35
  @app.task(bind=True, name="osism.tasks.netbox.run")
42
36
  def run(self, action, arguments):
43
37
  pass
44
38
 
45
39
 
46
- @app.task(bind=True, name="osism.tasks.netbox.update_network_interface_name")
47
- def update_network_interface_name(self, mac_address, network_interface_name):
48
- manage_interface.update_network_interface_name(mac_address, network_interface_name)
49
-
50
-
51
- @app.task(bind=True, name="osism.tasks.netbox.synchronize_device_state")
52
- def synchronize_device_state(self, data):
53
- """Synchronize the state of Ironic with Netbox"""
54
-
55
- if type(data) == str:
56
- data = json.loads(data)
57
-
58
- if not data:
59
- return
60
-
61
- for device in data:
62
- manage_device.set_provision_state(device["Name"], device["Provisioning State"])
63
- manage_device.set_power_state(device["Name"], device["Power State"])
64
-
65
-
66
- @app.task(bind=True, name="osism.tasks.netbox.states")
67
- def states(self, data):
68
- result = manage_device.get_states(data.keys())
69
- return result
70
-
71
-
72
- @app.task(bind=True, name="osism.tasks.netbox.set_state")
73
- def set_state(self, device=None, state=None, state_type=None):
74
- manage_device.set_state(device, state, state_type)
40
+ @app.task(bind=True, name="osism.tasks.netbox.set_maintenance")
41
+ def set_maintenance(self, device_name, state=True):
42
+ """Set the maintenance state for a device in the Netbox."""
75
43
 
44
+ lock = Redlock(
45
+ key=f"lock_osism_tasks_netbox_set_maintenance_{device_name}",
46
+ masters={utils.redis},
47
+ auto_release_time=60,
48
+ )
49
+ if lock.acquire(timeout=20):
50
+ try:
51
+ logger.info(f"Set maintenance state of device {device_name} = {state}")
52
+
53
+ device = utils.nb.dcim.devices.get(name=device_name)
54
+ if device:
55
+ device.custom_fields.update({"maintenance": state})
56
+ device.save()
57
+ else:
58
+ logger.error(f"Could not set maintenance for {device_name}")
59
+ finally:
60
+ lock.release()
61
+ else:
62
+ logger.error("Could not acquire lock for node {device_name}")
63
+
64
+
65
+ @app.task(bind=True, name="osism.tasks.netbox.set_provision_state")
66
+ def set_provision_state(self, device_name, state):
67
+ """Set the provision state for a device in the Netbox."""
68
+
69
+ lock = Redlock(
70
+ key=f"lock_osism_tasks_netbox_set_provision_state_{device_name}",
71
+ masters={utils.redis},
72
+ auto_release_time=60,
73
+ )
74
+ if lock.acquire(timeout=20):
75
+ try:
76
+ logger.info(f"Set provision state of device {device_name} = {state}")
77
+
78
+ device = utils.nb.dcim.devices.get(name=device_name)
79
+ if device:
80
+ device.custom_fields.update({"provision_state": state})
81
+ device.save()
82
+ else:
83
+ logger.error(f"Could not set provision state for {device_name}")
84
+ finally:
85
+ lock.release()
86
+ else:
87
+ logger.error("Could not acquire lock for node {device_name}")
88
+
89
+
90
+ @app.task(bind=True, name="osism.tasks.netbox.set_power_state")
91
+ def set_power_state(self, device_name, state):
92
+ """Set the provision state for a device in the Netbox."""
93
+
94
+ lock = Redlock(
95
+ key=f"lock_osism_tasks_netbox_set_provision_state_{device_name}",
96
+ masters={utils.redis},
97
+ auto_release_time=60,
98
+ )
99
+ if lock.acquire(timeout=20):
100
+ try:
101
+ logger.info(f"Set power state of device {device_name} = {state}")
76
102
 
77
- @app.task(bind=True, name="osism.tasks.netbox.set_maintenance")
78
- def set_maintenance(self, device=None, state=None):
79
- manage_device.set_maintenance(device, state)
103
+ device = utils.nb.dcim.devices.get(name=device_name)
104
+ if device:
105
+ device.custom_fields.update({"power_state": state})
106
+ device.save()
107
+ else:
108
+ logger.error(f"Could not set power state for {device_name}")
109
+ finally:
110
+ lock.release()
111
+ else:
112
+ logger.error("Could not acquire lock for node {device_name}")
80
113
 
81
114
 
82
- @app.task(bind=True, name="osism.tasks.netbox.diff")
83
- @app.task(bind=True, name="osism.tasks.netbox.get_devices_not_yet_registered_in_ironic")
84
- def get_devices_not_yet_registered_in_ironic(
85
- self, status="active", tags=["managed-by-ironic"], ironic_enabled=True
86
- ):
87
- devices = utils.nb.dcim.devices.filter(
88
- tag=tags, status=status, cf_ironic_enabled=[ironic_enabled]
89
- )
115
+ @app.task(bind=True, name="osism.tasks.netbox.get_devices")
116
+ def get_devices_by_tags(self, tags, state="active"):
117
+ return utils.nb.dcim.devices.filter(tag=tags, state=state)
90
118
 
91
- result = []
92
-
93
- for device in devices:
94
- if (
95
- "ironic_state" in device.custom_fields
96
- and device.custom_fields["ironic_state"] != "registered"
97
- ):
98
- result.append(device.name)
99
-
100
- return result
101
-
102
-
103
- @app.task(
104
- bind=True,
105
- name="osism.tasks.netbox.get_devices_that_should_have_an_allocation_in_ironic",
106
- )
107
- def get_devices_that_should_have_an_allocation_in_ironic(self):
108
- devices = utils.nb.dcim.devices.filter(
109
- tag=["managed-by-ironic", "managed-by-osism"],
110
- status="active",
111
- cf_ironic_enabled=[True],
112
- cf_ironic_state=["registered"],
113
- cf_provision_state=["available"],
114
- cf_introspection_state=["introspected"],
115
- cf_device_type=["server"],
116
- )
117
119
 
118
- result = []
120
+ @app.task(bind=True, name="osism.tasks.netbox.get_devices")
121
+ def get_device_by_name(self, name):
122
+ return utils.nb.dcim.devices.get(name=name)
119
123
 
120
- for device in devices:
121
- result.append(device.name)
122
124
 
123
- return result
125
+ @app.task(bind=True, name="osism.tasks.netbox.get_interfaces_by_device")
126
+ def get_interfaces_by_device(self, device_name):
127
+ return utils.nb.dcim.interfaces.filter(device=device_name)
124
128
 
125
129
 
126
130
  @app.task(bind=True, name="osism.tasks.netbox.manage")
@@ -139,7 +143,7 @@ def manage(self, *arguments, publish=True, locking=False, auto_release_time=3600
139
143
  *arguments,
140
144
  publish=publish,
141
145
  locking=locking,
142
- auto_release_time=auto_release_time
146
+ auto_release_time=auto_release_time,
143
147
  )
144
148
 
145
149
 
osism/tasks/openstack.py CHANGED
@@ -1,16 +1,10 @@
1
1
  # SPDX-License-Identifier: Apache-2.0
2
2
 
3
- import copy
4
- import ipaddress
5
-
6
3
  from celery import Celery
7
- import jinja2
8
- from openstack.exceptions import ConflictException, ResourceNotFound, ResourceFailure
9
- from pottery import Redlock
10
4
  import tempfile
11
5
 
12
6
  from osism import utils
13
- from osism.tasks import Config, conductor, netbox, run_command
7
+ from osism.tasks import Config, run_command
14
8
 
15
9
  app = Celery("openstack")
16
10
  app.config_from_object(Config)
@@ -25,20 +19,46 @@ def setup_periodic_tasks(sender, **kwargs):
25
19
  def image_get(self, image_name):
26
20
  conn = utils.get_openstack_connection()
27
21
  result = conn.image.find_image(image_name)
28
- return result.id
22
+ return result
29
23
 
30
24
 
31
25
  @app.task(bind=True, name="osism.tasks.openstack.network_get")
32
26
  def network_get(self, network_name):
33
27
  conn = utils.get_openstack_connection()
34
28
  result = conn.network.find_network(network_name)
35
- return result.id
29
+ return result
30
+
31
+
32
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_node_create")
33
+ def baremetal_node_create(self, node_name, attributes=None):
34
+ if attributes is None:
35
+ attributes = {}
36
+ attributes.update({"name": node_name})
37
+ conn = utils.get_openstack_connection()
38
+ result = conn.baremetal.create_node(**attributes)
39
+ return result
40
+
41
+
42
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_node_delete")
43
+ def baremetal_node_delete(self, node_or_id):
44
+ conn = utils.get_openstack_connection()
45
+ result = conn.baremetal.delete_node(node_or_id)
46
+ return result
47
+
48
+
49
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_node_update")
50
+ def baremetal_node_update(self, node_id_or_name, attributes=None):
51
+ if attributes is None:
52
+ attributes = {}
53
+ conn = utils.get_openstack_connection()
54
+ result = conn.baremetal.update_node(node_id_or_name, **attributes)
55
+ return result
36
56
 
37
57
 
38
58
  @app.task(bind=True, name="osism.tasks.openstack.baremetal_node_show")
39
- def baremetal_node_show(self, node_id_or_name):
59
+ def baremetal_node_show(self, node_id_or_name, ignore_missing=False):
40
60
  conn = utils.get_openstack_connection()
41
- result = conn.baremetal.find_node(node_id_or_name)
61
+ result = conn.baremetal.find_node(node_id_or_name, ignore_missing)
42
62
  return result
43
63
 
44
64
 
@@ -64,177 +84,97 @@ def baremetal_node_list(self):
64
84
  return result
65
85
 
66
86
 
67
- @app.task(
68
- bind=True, name="osism.tasks.openstack.baremetal_introspection_interface_list"
69
- )
70
- def baremetal_introspection_interface_list(self, node_id_or_name):
71
- pass
72
-
73
-
74
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_introspection_status")
75
- def baremetal_introspection_status(self, node_id_or_name):
76
- result = None
87
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_node_validate")
88
+ def baremetal_node_validate(self, node_id_or_name):
89
+ conn = utils.get_openstack_connection()
90
+ result = conn.baremetal.validate_node(node_id_or_name, required=())
77
91
  return result
78
92
 
79
93
 
80
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_get_network_interface_name")
81
- def baremetal_get_network_interface_name(self, node_name, mac_address):
94
+ @app.task(
95
+ bind=True,
96
+ name="osism.tasks.openstack.baremetal_node_wait_for_nodes_provision_state",
97
+ )
98
+ def baremetal_node_wait_for_nodes_provision_state(self, node_id_or_name, state):
82
99
  conn = utils.get_openstack_connection()
100
+ result = conn.baremetal.wait_for_nodes_provision_state([node_id_or_name], state)
101
+ if len(result) > 0:
102
+ return result[0]
103
+ else:
104
+ return None
83
105
 
84
- introspection = conn.baremetal_introspection.get_introspection(node_name)
85
-
86
- # Wait up to 5 minutes for the completion of a running introspection
87
- conn.baremetal_introspection.wait_for_introspection(introspection, timeout=30)
88
-
89
- introspection_data = conn.baremetal_introspection.get_introspection_data(
90
- introspection
91
- )
92
- interfaces = introspection_data["inventory"]["interfaces"]
93
-
94
- result = None
95
- for interface in interfaces:
96
- if interface["mac_address"].lower() == mac_address.lower():
97
- result = interface["name"]
98
106
 
107
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_node_set_provision_state")
108
+ def baremetal_node_set_provision_state(self, node, state):
109
+ conn = utils.get_openstack_connection()
110
+ result = conn.baremetal.set_node_provision_state(node, state)
99
111
  return result
100
112
 
101
113
 
102
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_set_node_provision_state")
103
- def baremetal_set_node_provision_state(self, node, state):
114
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_port_list")
115
+ def baremetal_port_list(self, details=False, attributes=None):
116
+ if attributes is None:
117
+ attributes = {}
104
118
  conn = utils.get_openstack_connection()
105
- conn.baremetal.set_node_provision_state(node, state)
119
+ result = conn.baremetal.ports(details=details, **attributes)
120
+ return list(result)
106
121
 
107
122
 
108
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_create_allocations")
109
- def baremetal_create_allocations(self, nodes):
123
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_port_create")
124
+ def baremetal_port_create(self, attributes=None):
125
+ if attributes is None:
126
+ attributes = {}
110
127
  conn = utils.get_openstack_connection()
111
-
112
- for node in nodes:
113
- try:
114
- allocation_a = conn.baremetal.get_allocation(allocation=node)
115
- except ResourceNotFound:
116
- allocation_a = None
117
-
118
- if not allocation_a:
119
- # Get Ironic parameters from the conductor
120
- task = conductor.get_ironic_parameters.delay()
121
- task.wait(timeout=None, interval=0.5)
122
- ironic_parameters = task.get()
123
-
124
- allocation_a = conn.baremetal.create_allocation(
125
- name=node,
126
- candidate_nodes=[node],
127
- resource_class=ironic_parameters["resource_class"],
128
- )
129
- conn.baremetal.wait_for_allocation(allocation=node, timeout=30)
128
+ result = conn.baremetal.create_port(**attributes)
129
+ return result
130
130
 
131
131
 
132
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_create_nodes")
133
- def baremetal_create_nodes(self, nodes, ironic_parameters):
132
+ @app.task(bind=True, name="osism.tasks.openstack.baremetal_port_delete")
133
+ def baremetal_port_delete(self, port_or_id):
134
134
  conn = utils.get_openstack_connection()
135
+ result = conn.baremetal.delete_port(port_or_id)
136
+ return result
135
137
 
136
- for node in nodes:
137
- # TODO: Filter on mgmt_only
138
- address_a = utils.nb.ipam.ip_addresses.get(device=node, interface="Ethernet0")
139
-
140
- node_parameters = copy.deepcopy(ironic_parameters)
141
-
142
- if node_parameters["driver"] == "redfish":
143
- remote_board_address = str(ipaddress.ip_interface(address_a["address"]).ip)
144
- t = jinja2.Environment(loader=jinja2.BaseLoader()).from_string(
145
- node_parameters["driver_info"]["redfish_address"]
146
- )
147
- node_parameters["driver_info"]["redfish_address"] = t.render(
148
- remote_board_address=remote_board_address
149
- )
150
-
151
- elif node_parameters["driver"] == "ipmi":
152
- remote_board_address = str(ipaddress.ip_interface(address_a["address"]).ip)
153
- t = jinja2.Environment(loader=jinja2.BaseLoader()).from_string(
154
- node_parameters["driver_info"]["ipmi_address"]
155
- )
156
- node_parameters["driver_info"]["ipmi_address"] = t.render(
157
- remote_board_address=remote_board_address
158
- )
159
-
160
- try:
161
- device_a = utils.nb.dcim.devices.get(name=node)
162
- tags = [str(tag) for tag in device_a.tags]
163
-
164
- # NOTE: Internally used nodes are identified by their unique name via the resource class.
165
- # The actual resource class is explicitly overwritten.
166
- if "Managed by Ironic" in tags and "Managed by OSISM" in tags:
167
- node_parameters["resource_class"] = f"osism-{node}"
168
- baremetal_create_internal_flavor(node)
169
-
170
- conn.baremetal.create_node(
171
- name=node, provision_state="manageable", **node_parameters
172
- )
173
- conn.baremetal.wait_for_nodes_provision_state([node], "manageable")
174
-
175
- if "Managed by Ironic" in tags and "Managed by OSISM" not in tags:
176
- conn.baremetal.set_node_traits(node, ["CUSTOM_GENERAL_USE"])
177
- elif "Managed by Ironic" in tags and "Managed by OSISM" in tags:
178
- conn.baremetal.set_node_traits(node, ["CUSTOM_OSISM_USE"])
179
138
 
180
- conn.baremetal.set_node_provision_state(node, "inspect")
139
+ @app.task(bind=True, name="osism.tasks.openstack.compute_flavor_get")
140
+ def compute_flavor_get(self, name_or_id):
141
+ conn = utils.get_openstack_connection()
142
+ result = conn.compute.find_flavor(
143
+ name_or_id, ignore_missing=True, get_extra_specs=True
144
+ )
145
+ return result
181
146
 
182
- # TODO: Check if the system has been registered correctly
183
- device_a.custom_fields = {
184
- "ironic_state": "registered",
185
- }
186
- device_a.save()
187
-
188
- except ResourceFailure:
189
- # TODO: Do something useful here
190
- pass
191
- except ConflictException:
192
- # The node already exists and has a wronge state in the Netbox
193
- device_a = utils.nb.dcim.devices.get(name=node)
194
- device_a.custom_fields = {
195
- "ironic_state": "registered",
196
- }
197
- device_a.save()
198
147
 
148
+ @app.task(bind=True, name="osism.tasks.openstack.compute_flavor_create")
149
+ def compute_flavor_create(self, name, attributes=None):
150
+ if attributes is None:
151
+ attributes = {}
152
+ attributes.update({"name": name})
153
+ extra_specs = attributes.pop("extra_specs", None)
154
+ conn = utils.get_openstack_connection()
155
+ flavor = conn.compute.create_flavor(**attributes)
156
+ if extra_specs:
157
+ flavor = conn.compute.create_flavor_extra_specs(flavor, extra_specs)
158
+ return flavor
199
159
 
200
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_check_allocations")
201
- def baremetal_check_allocations(self):
202
- lock = Redlock(
203
- key="lock_osism_tasks_openstack_baremetal_check_allocations",
204
- masters={utils.redis},
205
- auto_release_time=60,
206
- )
207
160
 
208
- if lock.acquire(timeout=20):
209
- netbox.get_devices_that_should_have_an_allocation_in_ironic.apply_async(
210
- (), link=baremetal_create_allocations.s()
211
- )
212
- lock.release()
161
+ @app.task(bind=True, name="osism.tasks.openstack.compute_flavor_delete")
162
+ def compute_flavor_delete(self, flavor):
163
+ conn = utils.get_openstack_connection()
164
+ conn.compute.delete_flavor(flavor, ignore_missing=True)
213
165
 
214
166
 
215
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_create_internal_flavor")
216
- def baremetal_create_internal_flavor(self, node):
167
+ @app.task(bind=True, name="osism.tasks.openstack.compute_flavor_update_extra_specs")
168
+ def compute_flavor_update_extra_specs(self, flavor, extra_specs={}):
217
169
  conn = utils.get_openstack_connection()
170
+ for key, value in extra_specs.items():
171
+ conn.compute.update_flavor_extra_specs_property(flavor, key, value)
218
172
 
219
- flavor_a = conn.compute.create_flavor(
220
- name=f"osism-{node}", ram=1, vcpus=1, disk=1, is_public=False
221
- )
222
- specs = {
223
- f"resources:CUSTOM_RESOURCE_CLASS_OSISM_{node.upper()}": 1,
224
- "resources:VCPU": 0,
225
- "resources:MEMORY_MB": 0,
226
- "resources:DISK_GB": 0,
227
- "trait:CUSTOM_OSISM_USE": "required",
228
- }
229
- conn.compute.create_flavor_extra_specs(flavor_a, specs)
230
-
231
-
232
- @app.task(bind=True, name="osism.tasks.openstack.baremetal_delete_internal_flavor")
233
- def baremetal_delete_internal_flavor(self, node):
234
- conn = utils.get_openstack_connection()
235
173
 
236
- flavor = conn.compute.get_flavor(f"osism-{node}")
237
- conn.compute.delete_flavor(flavor)
174
+ @app.task(bind=True, name="osism.tasks.openstack.compute_flavor_delete_extra_specs")
175
+ def compute_flavor_delete_extra_specs_property(self, flavor, prop):
176
+ conn = utils.get_openstack_connection()
177
+ conn.compute.delete_flavor_extra_specs_property(flavor, prop)
238
178
 
239
179
 
240
180
  @app.task(bind=True, name="osism.tasks.openstack.image_manager")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: osism
3
- Version: 0.20250331.0
3
+ Version: 0.20250407.0
4
4
  Summary: OSISM manager interface
5
5
  Home-page: https://github.com/osism/python-osism
6
6
  Author: OSISM GmbH
@@ -27,7 +27,7 @@ Requires-Dist: GitPython==3.1.44
27
27
  Requires-Dist: Jinja2==3.1.6
28
28
  Requires-Dist: PyYAML==6.0.2
29
29
  Requires-Dist: ara==1.7.2
30
- Requires-Dist: celery[redis]==5.4.0
30
+ Requires-Dist: celery[redis]==5.5.0
31
31
  Requires-Dist: cliff==4.9.1
32
32
  Requires-Dist: deepdiff==8.4.2
33
33
  Requires-Dist: docker==7.1.0
@@ -61,7 +61,7 @@ Provides-Extra: ansible
61
61
  Requires-Dist: ansible-runner==2.4.1; extra == "ansible"
62
62
  Requires-Dist: ansible-core==2.18.4; extra == "ansible"
63
63
  Provides-Extra: openstack-image-manager
64
- Requires-Dist: openstack-image-manager==0.20250314.0; extra == "openstack-image-manager"
64
+ Requires-Dist: openstack-image-manager==0.20250407.0; extra == "openstack-image-manager"
65
65
  Dynamic: author
66
66
  Dynamic: author-email
67
67
  Dynamic: classifier
@@ -4,8 +4,6 @@ osism/api.py,sha256=Lvkdd92tvv9RtoMs9RtvqsN3DiSKPdSll24J3wRzbBY,4793
4
4
  osism/main.py,sha256=Dt2-9sLXcS-Ny4DAz7hrha-KRc7zd7BFUTRdfs_X8z4,893
5
5
  osism/settings.py,sha256=m__DltxKQo5D-vDKKwY8RNBVs5bverYdJmtyVyln_6o,1049
6
6
  osism/actions/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
7
- osism/actions/manage_device.py,sha256=joQwPnwEUw5V1ZRRbdrM0FjfNlG4vPNc0r8FBRTOJiA,3541
8
- osism/actions/manage_interface.py,sha256=iDp7zY16XXtwdLk1sxa-TBAkpdPxmtbVeEvMZuP5h4s,472
9
7
  osism/commands/__init__.py,sha256=Ag4wX_DCgXRdoLn6t069jqb3DdRylsX2nyYkiyCx4uk,456
10
8
  osism/commands/apply.py,sha256=n3lLb1cS3GahQqRT0723di98hg47MjVzDzkAoeZX7qU,16780
11
9
  osism/commands/compose.py,sha256=iqzG7mS9E1VWaLNN6yQowjOqiHn3BMdj-yfXb3Dc4Ok,1200
@@ -15,8 +13,8 @@ osism/commands/console.py,sha256=8BPz1hio5Wi6kONVAWFuSqkDRrMcLEYeFIY8dbtN6e4,321
15
13
  osism/commands/container.py,sha256=Fku2GaCM3Idq_FxExUtNqjrEM0XYjpVvXmueSVO8S_c,1601
16
14
  osism/commands/get.py,sha256=ryytjtXWmlMV0NucP5tGkMZu0nIlC4xVtjRk4iMZ06c,8967
17
15
  osism/commands/log.py,sha256=2IpYuosC7FZwwLvM8HmKSU1NRNIelVVYzqjjVMCrOJk,4072
18
- osism/commands/manage.py,sha256=SDJyH3zwdaOjVWURIIjm8WMo6zSor1Y_TiTYgeMt4pI,11932
19
- osism/commands/netbox.py,sha256=FYBHcOR_cO-n7rcf4V_-DbwUCgMLFmrrPKCjd0zQOp4,4548
16
+ osism/commands/manage.py,sha256=E0ZF4Bf91cgttkSVt1dOQ4nQbRUSDAgsgPOjJMDsGBk,11932
17
+ osism/commands/netbox.py,sha256=_2-j6XM9JvH0DXnbct6rG9T6hT8KEpm3vazQC28Rt7I,4529
20
18
  osism/commands/noset.py,sha256=7zDFuFMyNpo7DUOKcNiYV8nodtdMOYFp5LDPcuJhlZ8,1481
21
19
  osism/commands/reconciler.py,sha256=Ja_b86gX6-_Pr3DmrUUvskmEnnJpHQ-XJNQLycMJeyc,2818
22
20
  osism/commands/server.py,sha256=zFXRdYoj4ZNDJNPSaGddMPEWxt8G2GyMomPOcCOaN3c,4137
@@ -31,27 +29,27 @@ osism/commands/volume.py,sha256=SqD9pYgtcYnMu6sB2pG8lfrLHRq6GzOb_-RkWOOVZPo,3156
31
29
  osism/commands/wait.py,sha256=mKFDqEXcaLlKw1T3MuBEZpNh7CeL3lpUXgubD2_f8es,6580
32
30
  osism/commands/worker.py,sha256=iraCOEhCp7WgfjfZ0-12XQYQPUjpi9rSJK5Z9JfNJk4,1651
33
31
  osism/core/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
34
- osism/core/enums.py,sha256=UDV3WoOp9kfGTPCQ94tr-2v6c07pNP2kYrxxv6pwxDI,9638
32
+ osism/core/enums.py,sha256=ldH2wM0mea7oHBSCrxEyCqkjH_R2kc8wmdI2J9eb6sM,9952
35
33
  osism/core/playbooks.py,sha256=M3T3ajV-8Lt-orsRO3jAoukhaoYFr4EZ2dzYXQjt1kg,728
36
34
  osism/data/__init__.py,sha256=izXdh0J3vPLQI7kBhJI7ibJQzPqU_nlONP0L4Cf_k6A,1504
37
35
  osism/plugins/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
38
36
  osism/services/__init__.py,sha256=bG7Ffen4LvQtgnYPFEpFccsWs81t4zqqeqn9ZeirH6E,38
39
- osism/services/listener.py,sha256=JjCdwPG5U9b_xYDpGFQeiLPP4y00GM3Me6NW1tt6Jws,11275
37
+ osism/services/listener.py,sha256=eEamlQsJqCuU9K2QFmk3yM9LAJZEanVcTLtGMsNCKjs,9783
40
38
  osism/tasks/__init__.py,sha256=lrSkcZtbzhWsLS4hWadKfpP_tCd1pX1IhvrBU3EhKmM,8605
41
39
  osism/tasks/ansible.py,sha256=RcLxLrjzL5_X6OjNHm3H0lZlmKKlYKIANB0M4_d4chE,1109
42
40
  osism/tasks/ceph.py,sha256=eIQkah3Kj4INtOkF9kTjHbXJ3_J2lg48EWJKfHc-UYw,615
43
- osism/tasks/conductor.py,sha256=P52Avy8OgNQ4koZp3QZLXJiN9uIiBcqrmDpc3UXsPzs,3639
41
+ osism/tasks/conductor.py,sha256=Qg4ic9j5khHGumXCRaosrDiphs4-Eqk02BCb78zuTkM,19162
44
42
  osism/tasks/kolla.py,sha256=wJQpWn_01iWLkr7l7T7RNrQGfRgsgmYi4WQlTmNGvew,618
45
43
  osism/tasks/kubernetes.py,sha256=VzXq_VrYU_CLm4cOruqnE3Kq2ydfO9glZ3p0bp3OYoc,625
46
- osism/tasks/netbox.py,sha256=JTgMLp5WAGoupU5Os6xWnKHXACxfXVS33wM1rvbz6Y0,4432
47
- osism/tasks/openstack.py,sha256=nhHiEcmI_AjM-oYnqjlJ0-c9qYZRQeruOTJsLbScxKI,10258
44
+ osism/tasks/netbox.py,sha256=qT3-0GWDPCnejLGAhNp7InMSxBTk7qmKwfdNn1in3FM,4857
45
+ osism/tasks/openstack.py,sha256=ZFdgudp02a9I4AiJae2Pu0_k9REYi4P7wTLA5rzx8is,7825
48
46
  osism/tasks/reconciler.py,sha256=RGUcax2gDuyVLw1nGRQn5izXclnPBo9MRl0ndLDiiYQ,2707
49
47
  osism/utils/__init__.py,sha256=DP2D7xyXnfWuH-c26elIwdwrMSY-oSkVsLFKsQfna9w,1477
50
- osism-0.20250331.0.dist-info/licenses/AUTHORS,sha256=EKFIR9F27AvoEXp1cA6FkGbjEOFt4Rcbipr5RJc7jSs,64
51
- osism-0.20250331.0.dist-info/licenses/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
52
- osism-0.20250331.0.dist-info/METADATA,sha256=0iUarhckKfE774zkRlFp8Pn21wK2bW30nztf0udL2t8,2972
53
- osism-0.20250331.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
54
- osism-0.20250331.0.dist-info/entry_points.txt,sha256=DlfrvU14rI55WuTrwNRoce9FY3ric4HeZKZx_Z3NzCw,3015
55
- osism-0.20250331.0.dist-info/pbr.json,sha256=dAx-E5EYMfbySf6Y9YoFJwWMs-TE1eIlvi2JwTL12Nw,47
56
- osism-0.20250331.0.dist-info/top_level.txt,sha256=8L8dsI9hcaGHsdnR4k_LN9EM78EhwrXRFHyAryPXZtY,6
57
- osism-0.20250331.0.dist-info/RECORD,,
48
+ osism-0.20250407.0.dist-info/licenses/AUTHORS,sha256=EKFIR9F27AvoEXp1cA6FkGbjEOFt4Rcbipr5RJc7jSs,64
49
+ osism-0.20250407.0.dist-info/licenses/LICENSE,sha256=tAkwu8-AdEyGxGoSvJ2gVmQdcicWw3j1ZZueVV74M-E,11357
50
+ osism-0.20250407.0.dist-info/METADATA,sha256=TIDKl4LiJsyH7yJLztBxKbVc3BWnaKMdHxXRZ377Reg,2972
51
+ osism-0.20250407.0.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
52
+ osism-0.20250407.0.dist-info/entry_points.txt,sha256=DlfrvU14rI55WuTrwNRoce9FY3ric4HeZKZx_Z3NzCw,3015
53
+ osism-0.20250407.0.dist-info/pbr.json,sha256=7g2xfwKFA8HvM2aPcqC5h0SXZ_VjJYyEL03mJcdLU6A,47
54
+ osism-0.20250407.0.dist-info/top_level.txt,sha256=8L8dsI9hcaGHsdnR4k_LN9EM78EhwrXRFHyAryPXZtY,6
55
+ osism-0.20250407.0.dist-info/RECORD,,
@@ -0,0 +1 @@
1
+ {"git_version": "976a50a", "is_release": false}