osism 0.20250602.0__py3-none-any.whl → 0.20250616.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,10 +2,14 @@
2
2
 
3
3
  from cliff.command import Command
4
4
 
5
+ import tempfile
6
+ import os
5
7
  from loguru import logger
6
8
  import openstack
7
9
  from tabulate import tabulate
8
10
  import json
11
+ import yaml
12
+ from openstack.baremetal import configdrive as configdrive_builder
9
13
 
10
14
  from osism.commands import get_cloud_connection
11
15
 
@@ -70,19 +74,18 @@ class BaremetalDeploy(Command):
70
74
  def get_parser(self, prog_name):
71
75
  parser = super(BaremetalDeploy, self).get_parser(prog_name)
72
76
 
73
- parser_exc_group = parser.add_mutually_exclusive_group(required=True)
74
- parser_exc_group.add_argument(
77
+ parser.add_argument(
78
+ "name",
79
+ nargs="?",
80
+ type=str,
81
+ help="Deploy given baremetal node when in provision state available",
82
+ )
83
+ parser.add_argument(
75
84
  "--all",
76
85
  default=False,
77
86
  help="Deploy all baremetal nodes in provision state available",
78
87
  action="store_true",
79
88
  )
80
- parser_exc_group.add_argument(
81
- "--name",
82
- default=[],
83
- help="Deploy given baremetal node when in provision state available. May be specified multiple times",
84
- action="append",
85
- )
86
89
  parser.add_argument(
87
90
  "--rebuild",
88
91
  default=False,
@@ -99,10 +102,14 @@ class BaremetalDeploy(Command):
99
102
 
100
103
  def take_action(self, parsed_args):
101
104
  all_nodes = parsed_args.all
102
- names = parsed_args.name
105
+ name = parsed_args.name
103
106
  rebuild = parsed_args.rebuild
104
107
  yes_i_really_really_mean_it = parsed_args.yes_i_really_really_mean_it
105
108
 
109
+ if not all_nodes and not name:
110
+ logger.error("Please specify a node name or use --all")
111
+ return
112
+
106
113
  if all_nodes and rebuild and not yes_i_really_really_mean_it:
107
114
  logger.error(
108
115
  "Please confirm that you wish to rebuild all nodes by specifying '--yes-i-really-really-mean-it'"
@@ -114,14 +121,14 @@ class BaremetalDeploy(Command):
114
121
  if all_nodes:
115
122
  deploy_nodes = list(conn.baremetal.nodes(details=True))
116
123
  else:
117
- deploy_nodes = [
118
- conn.baremetal.find_node(name, ignore_missing=True, details=True)
119
- for name in names
120
- ]
124
+ node = conn.baremetal.find_node(name, ignore_missing=True, details=True)
125
+ if not node:
126
+ logger.warning(f"Could not find node {name}")
127
+ return
128
+ deploy_nodes = [node]
121
129
 
122
- for node_idx, node in enumerate(deploy_nodes):
130
+ for node in deploy_nodes:
123
131
  if not node:
124
- logger.warning(f"Could not find node {names[node_idx]}")
125
132
  continue
126
133
 
127
134
  if node.provision_state in ["available", "deploy failed"]:
@@ -145,23 +152,60 @@ class BaremetalDeploy(Command):
145
152
  except openstack.exceptions.ValidationException:
146
153
  logger.warning(f"Node {node.name} ({node.id}) could not be validated")
147
154
  continue
155
+ # NOTE: Prepare osism config drive
148
156
  try:
149
- config_drive = {"meta_data": {}}
157
+ playbook = []
158
+ play = {
159
+ "name": "Run bootstrap - part 2",
160
+ "hosts": "localhost",
161
+ "connection": "local",
162
+ "gather_facts": True,
163
+ "vars": {},
164
+ "roles": [
165
+ "osism.commons.hostname",
166
+ "osism.commons.hosts",
167
+ ],
168
+ }
169
+ play["vars"].update(
170
+ {"hostname_name": node.name, "hosts_type": "template"}
171
+ )
150
172
  if (
151
173
  "netplan_parameters" in node.extra
152
174
  and node.extra["netplan_parameters"]
153
175
  ):
154
- config_drive["meta_data"].update(
176
+ play["vars"].update(
155
177
  {
156
- "netplan_parameters": json.loads(
157
- node.extra["netplan_parameters"]
158
- )
178
+ "network_allow_service_restart": True,
159
179
  }
160
180
  )
181
+ play["vars"].update(json.loads(node.extra["netplan_parameters"]))
182
+ play["roles"].append("osism.commons.network")
161
183
  if "frr_parameters" in node.extra and node.extra["frr_parameters"]:
162
- config_drive["meta_data"].update(
163
- {"frr_parameters": json.loads(node.extra["frr_parameters"])}
184
+ play["vars"].update(
185
+ {
186
+ "frr_dummy_interface": "loopback0",
187
+ }
164
188
  )
189
+ play["vars"].update(json.loads(node.extra["frr_parameters"]))
190
+ play["roles"].append("osism.services.frr")
191
+ playbook.append(play)
192
+ with tempfile.TemporaryDirectory() as tmp_dir:
193
+ with open(os.path.join(tmp_dir, "playbook.yml"), "w") as file:
194
+ yaml.dump(
195
+ playbook,
196
+ file,
197
+ default_flow_style=False,
198
+ explicit_start=True,
199
+ indent=2,
200
+ sort_keys=False,
201
+ )
202
+ config_drive = configdrive_builder.pack(tmp_dir)
203
+ except Exception as exc:
204
+ logger.warning(
205
+ f"Failed to build config drive for {node.name} ({node.id}): {exc}"
206
+ )
207
+ continue
208
+ try:
165
209
  conn.baremetal.set_node_provision_state(
166
210
  node.id, provision_state, config_drive=config_drive
167
211
  )
@@ -176,19 +220,18 @@ class BaremetalUndeploy(Command):
176
220
  def get_parser(self, prog_name):
177
221
  parser = super(BaremetalUndeploy, self).get_parser(prog_name)
178
222
 
179
- parser_exc_group = parser.add_mutually_exclusive_group(required=True)
180
- parser_exc_group.add_argument(
223
+ parser.add_argument(
224
+ "name",
225
+ nargs="?",
226
+ type=str,
227
+ help="Undeploy given baremetal node",
228
+ )
229
+ parser.add_argument(
181
230
  "--all",
182
231
  default=False,
183
232
  help="Undeploy all baremetal nodes",
184
233
  action="store_true",
185
234
  )
186
- parser_exc_group.add_argument(
187
- "--name",
188
- default=[],
189
- help="Undeploy given baremetal node. May be specified multiple times",
190
- action="append",
191
- )
192
235
  parser.add_argument(
193
236
  "--yes-i-really-really-mean-it",
194
237
  default=False,
@@ -199,9 +242,13 @@ class BaremetalUndeploy(Command):
199
242
 
200
243
  def take_action(self, parsed_args):
201
244
  all_nodes = parsed_args.all
202
- names = parsed_args.name
245
+ name = parsed_args.name
203
246
  yes_i_really_really_mean_it = parsed_args.yes_i_really_really_mean_it
204
247
 
248
+ if not all_nodes and not name:
249
+ logger.error("Please specify a node name or use --all")
250
+ return
251
+
205
252
  if all_nodes and not yes_i_really_really_mean_it:
206
253
  logger.error(
207
254
  "Please confirm that you wish to undeploy all nodes by specifying '--yes-i-really-really-mean-it'"
@@ -213,14 +260,14 @@ class BaremetalUndeploy(Command):
213
260
  if all_nodes:
214
261
  deploy_nodes = list(conn.baremetal.nodes())
215
262
  else:
216
- deploy_nodes = [
217
- conn.baremetal.find_node(name, ignore_missing=True, details=False)
218
- for name in names
219
- ]
263
+ node = conn.baremetal.find_node(name, ignore_missing=True, details=False)
264
+ if not node:
265
+ logger.warning(f"Could not find node {name}")
266
+ return
267
+ deploy_nodes = [node]
220
268
 
221
- for node_idx, node in enumerate(deploy_nodes):
269
+ for node in deploy_nodes:
222
270
  if not node:
223
- logger.warning(f"Could not find node {names[node_idx]}")
224
271
  continue
225
272
 
226
273
  if node.provision_state in ["active", "deploy failed", "error"]:
osism/commands/netbox.py CHANGED
@@ -8,6 +8,7 @@ from loguru import logger
8
8
  import yaml
9
9
 
10
10
  from osism.tasks import conductor, netbox, handle_task
11
+ from osism import utils
11
12
 
12
13
 
13
14
  class Ironic(Command):
@@ -18,6 +19,12 @@ class Ironic(Command):
18
19
  help="Do not wait until the sync has been completed",
19
20
  action="store_true",
20
21
  )
22
+ parser.add_argument(
23
+ "--task-timeout",
24
+ default=os.environ.get("OSISM_TASK_TIMEOUT", 300),
25
+ type=int,
26
+ help="Timeout for a scheduled task that has not been executed yet",
27
+ )
21
28
  parser.add_argument(
22
29
  "--force-update",
23
30
  help="Force update of baremetal nodes (Used to update non-comparable items like passwords)",
@@ -27,13 +34,23 @@ class Ironic(Command):
27
34
 
28
35
  def take_action(self, parsed_args):
29
36
  wait = not parsed_args.no_wait
37
+ task_timeout = parsed_args.task_timeout
30
38
 
31
39
  task = conductor.sync_ironic.delay(force_update=parsed_args.force_update)
32
40
  if wait:
33
41
  logger.info(
34
- f"Task {task.task_id} (sync ironic) is running. Wait. No more output."
42
+ f"Task {task.task_id} (sync ironic) is running in background. Output comming soon."
43
+ )
44
+ try:
45
+ return utils.fetch_task_output(task.id, timeout=task_timeout)
46
+ except TimeoutError:
47
+ logger.error(
48
+ f"Timeout while waiting for further output of task {task.task_id} (sync ironic)"
49
+ )
50
+ else:
51
+ logger.info(
52
+ f"Task {task.task_id} (sync ironic) is running in background. No more output."
35
53
  )
36
- task.wait(timeout=None, interval=0.5)
37
54
 
38
55
 
39
56
  class Sync(Command):
@@ -63,13 +80,13 @@ class Manage(Command):
63
80
  parser.add_argument(
64
81
  "--no-wait",
65
82
  default=False,
66
- help="Do not wait until the management of the netbox has been completed",
83
+ help="Do not wait until the management of the NetBox has been completed",
67
84
  action="store_true",
68
85
  )
69
86
  parser.add_argument(
70
87
  "--no-netbox-wait",
71
88
  default=False,
72
- help="Do not wait for the netbox API to be ready",
89
+ help="Do not wait for the NetBox API to be ready",
73
90
  action="store_true",
74
91
  )
75
92
  parser.add_argument(
@@ -195,7 +212,7 @@ class Console(Command):
195
212
  url = os.environ.get("NETBOX_API", None)
196
213
 
197
214
  if not token or not url:
198
- logger.error("Netbox integration not configured.")
215
+ logger.error("NetBox integration not configured.")
199
216
  return
200
217
 
201
218
  subprocess.call(
@@ -2,13 +2,12 @@
2
2
 
3
3
  import os
4
4
  import subprocess
5
- import time
6
5
 
7
6
  from cliff.command import Command
8
7
  from loguru import logger
9
8
 
10
9
  from osism.tasks import reconciler
11
- from osism.utils import redis
10
+ from osism import utils
12
11
 
13
12
 
14
13
  class Run(Command):
@@ -50,33 +49,12 @@ class Sync(Command):
50
49
  logger.info(
51
50
  f"Task {t.task_id} (sync inventory) is running in background. Output coming soon."
52
51
  )
53
- rc = 0
54
- stoptime = time.time() + task_timeout
55
- last_id = 0
56
- while time.time() < stoptime:
57
- data = redis.xread(
58
- {str(t.task_id): last_id}, count=1, block=(300 * 1000)
52
+ try:
53
+ return utils.fetch_task_output(t.id, timeout=task_timeout)
54
+ except TimeoutError:
55
+ logger.error(
56
+ f"Timeout while waiting for further output of task {t.task_id} (sync inventory)"
59
57
  )
60
- if data:
61
- stoptime = time.time() + task_timeout
62
- messages = data[0]
63
- for message_id, message in messages[1]:
64
- last_id = message_id.decode()
65
- message_type = message[b"type"].decode()
66
- message_content = message[b"content"].decode()
67
-
68
- logger.debug(
69
- f"Processing message {last_id} of type {message_type}"
70
- )
71
- redis.xdel(str(t.task_id), last_id)
72
-
73
- if message_type == "stdout":
74
- print(message_content, end="")
75
- elif message_type == "rc":
76
- rc = int(message_content)
77
- elif message_type == "action" and message_content == "quit":
78
- redis.close()
79
- return rc
80
58
  else:
81
59
  logger.info(
82
60
  f"Task {t.task_id} (sync inventory) is running in background. No more output."
osism/commands/sync.py CHANGED
@@ -1,8 +1,9 @@
1
1
  # SPDX-License-Identifier: Apache-2.0
2
2
 
3
3
  from cliff.command import Command
4
+ from loguru import logger
4
5
 
5
- from osism.tasks import ansible, handle_task
6
+ from osism.tasks import ansible, conductor, handle_task
6
7
 
7
8
 
8
9
  class Facts(Command):
@@ -17,3 +18,29 @@ class Facts(Command):
17
18
  )
18
19
  rc = handle_task(t)
19
20
  return rc
21
+
22
+
23
+ class Sonic(Command):
24
+ def get_parser(self, prog_name):
25
+ parser = super(Sonic, self).get_parser(prog_name)
26
+ parser.add_argument(
27
+ "--no-wait",
28
+ default=False,
29
+ help="Do not wait until the sync has been completed",
30
+ action="store_true",
31
+ )
32
+ return parser
33
+
34
+ def take_action(self, parsed_args):
35
+ wait = not parsed_args.no_wait
36
+
37
+ task = conductor.sync_sonic.delay()
38
+ if wait:
39
+ logger.info(
40
+ f"Task {task.task_id} (sync sonic) is running. Wait. No more output."
41
+ )
42
+ task.wait(timeout=None, interval=0.5)
43
+ else:
44
+ logger.info(
45
+ f"Task {task.task_id} (sync sonic) is running in background. No more output."
46
+ )
@@ -1,14 +1,13 @@
1
1
  # SPDX-License-Identifier: Apache-2.0
2
2
 
3
3
  import argparse
4
- import time
5
4
 
6
5
  from cliff.command import Command
7
6
  from loguru import logger
8
7
 
9
8
  from osism.core.enums import VALIDATE_PLAYBOOKS
10
9
  from osism.tasks import ansible, ceph, kolla
11
- from osism.utils import redis
10
+ from osism import utils
12
11
 
13
12
 
14
13
  class Run(Command):
@@ -53,35 +52,13 @@ class Run(Command):
53
52
  return parser
54
53
 
55
54
  def _handle_task(self, t, wait, format, timeout, playbook):
56
- rc = 0
57
55
  if wait:
58
- stoptime = time.time() + timeout
59
- last_id = 0
60
- while time.time() < stoptime:
61
- data = redis.xread(
62
- {str(t.task_id): last_id}, count=1, block=(timeout * 1000)
56
+ try:
57
+ return utils.fetch_task_output(t.id, timeout=timeout)
58
+ except TimeoutError:
59
+ logger.error(
60
+ f"Timeout while waiting for further output of task {t.task_id} (sync inventory)"
63
61
  )
64
- if data:
65
- stoptime = time.time() + timeout
66
- messages = data[0]
67
- for message_id, message in messages[1]:
68
- last_id = message_id.decode()
69
- message_type = message[b"type"].decode()
70
- message_content = message[b"content"].decode()
71
-
72
- logger.debug(
73
- f"Processing message {last_id} of type {message_type}"
74
- )
75
- redis.xdel(str(t.task_id), message_id)
76
-
77
- if message_type == "stdout":
78
- print(message_content, end="")
79
- elif message_type == "rc":
80
- rc = int(message_content)
81
- elif message_type == "action" and message_content == "quit":
82
- redis.close()
83
- return rc
84
-
85
62
  else:
86
63
  if format == "log":
87
64
  logger.info(
@@ -90,7 +67,7 @@ class Run(Command):
90
67
  elif format == "script":
91
68
  print(f"{t.task_id}")
92
69
 
93
- return rc
70
+ return 0
94
71
 
95
72
  def take_action(self, parsed_args):
96
73
  arguments = parsed_args.arguments
osism/commands/vault.py CHANGED
@@ -5,6 +5,7 @@
5
5
 
6
6
  import os
7
7
  import subprocess
8
+ import sys
8
9
 
9
10
  from cliff.command import Command
10
11
  from cryptography.fernet import Fernet
@@ -31,7 +32,14 @@ class SetPassword(Command):
31
32
 
32
33
  f = Fernet(key)
33
34
 
34
- ansible_vault_password = prompt("Ansible Vault password: ", is_password=True)
35
+ # Check if password is being piped from STDIN
36
+ if not sys.stdin.isatty():
37
+ ansible_vault_password = sys.stdin.read().strip()
38
+ else:
39
+ ansible_vault_password = prompt(
40
+ "Ansible Vault password: ", is_password=True
41
+ )
42
+
35
43
  redis.set(
36
44
  "ansible_vault_password", f.encrypt(ansible_vault_password.encode("utf-8"))
37
45
  )
osism/commands/wait.py CHANGED
@@ -119,38 +119,15 @@ class Run(Command):
119
119
 
120
120
  if live:
121
121
  utils.redis.ping()
122
-
123
- last_id = 0
124
- while_True = True
125
- while while_True:
126
- data = utils.redis.xread(
127
- {str(task_id): last_id}, count=1, block=1000
122
+ try:
123
+ rc = utils.fetch_task_output(task_id)
124
+ except TimeoutError:
125
+ logger.error(
126
+ f"Timeout while waiting for further output of task {task_id}"
128
127
  )
129
- if data:
130
- messages = data[0]
131
- for message_id, message in messages[1]:
132
- last_id = message_id.decode()
133
- message_type = message[b"type"].decode()
134
- message_content = message[b"content"].decode()
135
-
136
- logger.debug(
137
- f"Processing message {last_id} of type {message_type}"
138
- )
139
- utils.redis.xdel(str(task_id), last_id)
140
-
141
- if message_type == "stdout":
142
- print(message_content, end="")
143
- elif message_type == "rc":
144
- rc = int(message_content)
145
- elif (
146
- message_type == "action"
147
- and message_content == "quit"
148
- ):
149
- utils.redis.close()
150
- if len(task_ids) == 1:
151
- return rc
152
- else:
153
- while_True = False
128
+
129
+ if len(task_ids) == 1:
130
+ return rc
154
131
  else:
155
132
  tmp_task_ids.insert(0, task_id)
156
133
 
osism/core/enums.py CHANGED
@@ -102,6 +102,7 @@ VALIDATE_PLAYBOOKS = {
102
102
  "ceph-osds": {"environment": "ceph", "runtime": "osism-ansible"},
103
103
  "container-status": {"environment": "generic", "runtime": "osism-ansible"},
104
104
  "kernel-version": {"environment": "generic", "runtime": "osism-ansible"},
105
+ "docker-version": {"environment": "generic", "runtime": "osism-ansible"},
105
106
  "kolla-connectivity": {"environment": "kolla", "runtime": "osism-ansible"},
106
107
  "mysql-open-files-limit": {"environment": "generic", "runtime": "osism-ansible"},
107
108
  "ntp": {"environment": "generic", "runtime": "osism-ansible"},
@@ -93,7 +93,7 @@ class BaremetalEvents:
93
93
  netbox.set_maintenance.delay(name, state=object_data["maintenance"])
94
94
 
95
95
  def node_provision_set_success(self, payload: dict[Any, Any]) -> None:
96
- # A provision status was successfully set, update it in the netbox
96
+ # A provision status was successfully set, update it in the NetBox
97
97
  object_data = self.get_object_data(payload)
98
98
  name = object_data["name"]
99
99
  logger.info(
osism/settings.py CHANGED
@@ -35,11 +35,21 @@ INVENTORY_RECONCILER_SCHEDULE = float(
35
35
 
36
36
  OSISM_API_URL = os.getenv("OSISM_API_URL", None)
37
37
 
38
- NETBOX_FILTER_CONDUCTOR = os.getenv(
39
- "NETBOX_FILTER_CONDUCTOR",
38
+ NETBOX_FILTER_CONDUCTOR_IRONIC = os.getenv(
39
+ "NETBOX_FILTER_CONDUCTOR_IRONIC",
40
40
  "[{'state': 'active', 'tag': ['managed-by-ironic']}]",
41
41
  )
42
42
 
43
+ NETBOX_FILTER_CONDUCTOR_SONIC = os.getenv(
44
+ "NETBOX_FILTER_CONDUCTOR_SONIC",
45
+ "[{'state': 'active', 'tag': ['managed-by-metalbox']}]",
46
+ )
47
+
48
+ # SONiC export configuration
49
+ SONIC_EXPORT_DIR = os.getenv("SONIC_EXPORT_DIR", "/etc/sonic/export")
50
+ SONIC_EXPORT_PREFIX = os.getenv("SONIC_EXPORT_PREFIX", "osism_")
51
+ SONIC_EXPORT_SUFFIX = os.getenv("SONIC_EXPORT_SUFFIX", "_config_db.json")
52
+
43
53
  NETBOX_SECONDARIES = (
44
54
  os.getenv("NETBOX_SECONDARIES", read_secret("NETBOX_SECONDARIES")) or "[]"
45
55
  )
osism/tasks/__init__.py CHANGED
@@ -4,7 +4,6 @@ import logging
4
4
  import os
5
5
  import re
6
6
  import subprocess
7
- import time
8
7
 
9
8
  from loguru import logger
10
9
  from pottery import Redlock
@@ -163,14 +162,13 @@ def run_ansible_in_environment(
163
162
  while p.poll() is None:
164
163
  line = p.stdout.readline().decode("utf-8")
165
164
  if publish:
166
- utils.redis.xadd(request_id, {"type": "stdout", "content": line})
165
+ utils.push_task_output(request_id, line)
167
166
  result += line
168
167
 
169
168
  rc = p.wait(timeout=60)
170
169
 
171
170
  if publish:
172
- utils.redis.xadd(request_id, {"type": "rc", "content": rc})
173
- utils.redis.xadd(request_id, {"type": "action", "content": "quit"})
171
+ utils.finish_task_output(request_id, rc=rc)
174
172
 
175
173
  if locking:
176
174
  lock.release()
@@ -212,14 +210,13 @@ def run_command(
212
210
  while p.poll() is None:
213
211
  line = p.stdout.readline().decode("utf-8")
214
212
  if publish:
215
- utils.redis.xadd(request_id, {"type": "stdout", "content": line})
213
+ utils.push_task_output(request_id, line)
216
214
  result += line
217
215
 
218
216
  rc = p.wait(timeout=60)
219
217
 
220
218
  if publish:
221
- utils.redis.xadd(request_id, {"type": "rc", "content": rc})
222
- utils.redis.xadd(request_id, {"type": "action", "content": "quit"})
219
+ utils.finish_task_output(request_id, rc=rc)
223
220
 
224
221
  if locking:
225
222
  lock.release()
@@ -228,39 +225,10 @@ def run_command(
228
225
 
229
226
 
230
227
  def handle_task(t, wait=True, format="log", timeout=3600):
231
- rc = 0
232
228
  if wait:
233
- stoptime = time.time() + timeout
234
- last_id = 0
235
- while time.time() < stoptime:
236
- data = utils.redis.xread(
237
- {str(t.task_id): last_id}, count=1, block=(timeout * 1000)
238
- )
239
- if data:
240
- stoptime = time.time() + timeout
241
- messages = data[0]
242
- for message_id, message in messages[1]:
243
- last_id = message_id.decode()
244
- message_type = message[b"type"].decode()
245
- message_content = message[b"content"].decode()
246
-
247
- logger.debug(f"Processing message {last_id} of type {message_type}")
248
- utils.redis.xdel(str(t.task_id), last_id)
249
-
250
- if message_type == "stdout":
251
- print(message_content, end="", flush=True)
252
- if "PLAY RECAP" in message_content:
253
- logger.info(
254
- "Play has been completed. There may now be a delay until "
255
- "all logs have been written."
256
- )
257
- logger.info("Please wait and do not abort execution.")
258
- elif message_type == "rc":
259
- rc = int(message_content)
260
- elif message_type == "action" and message_content == "quit":
261
- utils.redis.close()
262
- return rc
263
- else:
229
+ try:
230
+ return utils.fetch_task_output(t.id, timeout=timeout)
231
+ except TimeoutError:
264
232
  logger.info(
265
233
  f"There has been no output from the task {t.task_id} for {timeout} second(s)."
266
234
  )
@@ -284,4 +252,4 @@ def handle_task(t, wait=True, format="log", timeout=3600):
284
252
  elif format == "script":
285
253
  print(f"{t.task_id}")
286
254
 
287
- return rc
255
+ return 0