robotframework-pabot 5.1.0__py3-none-any.whl → 5.2.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,376 @@
1
+ import os
2
+ import sys
3
+ import time
4
+ import signal
5
+ import threading
6
+ import subprocess
7
+ import datetime
8
+ import queue
9
+ import locale
10
+
11
+ try:
12
+ import psutil
13
+ except ImportError:
14
+ psutil = None
15
+
16
+ from .writer import get_writer, Color
17
+
18
+
19
+ def split_on_first(lst, value):
20
+ for i, x in enumerate(lst):
21
+ if x == value:
22
+ return lst[:i], lst[i+1:]
23
+ return lst, []
24
+
25
+
26
+ class ProcessManager:
27
+ def __init__(self):
28
+ self.processes = []
29
+ self.lock = threading.Lock()
30
+ self.writer = get_writer()
31
+
32
+ # Install SIGINT only in main thread
33
+ try:
34
+ if threading.current_thread() is threading.main_thread():
35
+ signal.signal(signal.SIGINT, self._handle_sigint)
36
+ else:
37
+ self.writer.write(
38
+ "[ProcessManager] (test mode) signal handlers disabled (not in main thread)"
39
+ )
40
+ except Exception as e:
41
+ self.writer.write(f"[WARN] Could not register signal handler: {e}")
42
+
43
+ # -------------------------------
44
+ # SIGNAL HANDLING
45
+ # -------------------------------
46
+
47
+ def _handle_sigint(self, signum, frame):
48
+ self.writer.write("[ProcessManager] Ctrl+C detected — terminating all subprocesses", color=Color.RED)
49
+ self.terminate_all()
50
+ sys.exit(130)
51
+
52
+ # -------------------------------
53
+ # OUTPUT STREAM READERS
54
+ # -------------------------------
55
+
56
+ def _enqueue_output(self, pipe, q):
57
+ """
58
+ Reads lines from `pipe` and puts them into queue `q`.
59
+ When pipe is exhausted, pushes `None` sentinel.
60
+ """
61
+ try:
62
+ with pipe:
63
+ for line in iter(pipe.readline, b""):
64
+ q.put(line)
65
+ finally:
66
+ q.put(None) # sentinel → "this stream is finished"
67
+
68
+ def _safe_write_to_stream(self, stream, text):
69
+ """
70
+ Writes text safely to an output stream.
71
+ If encoding errors occur, fall back to bytes/replace.
72
+ """
73
+ try:
74
+ stream.write(text)
75
+ try:
76
+ stream.flush()
77
+ except Exception:
78
+ pass
79
+ return
80
+ except UnicodeEncodeError:
81
+ pass
82
+ except Exception:
83
+ pass
84
+
85
+ enc = getattr(stream, "encoding", None) or locale.getpreferredencoding(False) or "utf-8"
86
+
87
+ try:
88
+ b = text.encode(enc, errors="replace")
89
+ if hasattr(stream, "buffer"):
90
+ try:
91
+ stream.buffer.write(b)
92
+ stream.buffer.write(b"\n")
93
+ stream.buffer.flush()
94
+ return
95
+ except Exception:
96
+ pass
97
+
98
+ safe = b.decode(enc, errors="replace")
99
+ stream.write(safe + "\n")
100
+ stream.flush()
101
+ except Exception:
102
+ try:
103
+ print(text)
104
+ except Exception:
105
+ pass
106
+
107
+ # -------------------------------
108
+ # STREAM OUTPUT MERGER
109
+ # -------------------------------
110
+
111
+ def _stream_output(self, process, stdout=None, stderr=None,
112
+ item_name="process", log_file=None):
113
+
114
+ q_out = queue.Queue()
115
+ q_err = queue.Queue()
116
+
117
+ t_out = None
118
+ t_err = None
119
+
120
+ if process.stdout:
121
+ t_out = threading.Thread(target=self._enqueue_output, args=(process.stdout, q_out))
122
+ t_out.daemon = True
123
+ t_out.start()
124
+
125
+ if process.stderr:
126
+ t_err = threading.Thread(target=self._enqueue_output, args=(process.stderr, q_err))
127
+ t_err.daemon = True
128
+ t_err.start()
129
+
130
+ stdout_done = False
131
+ stderr_done = False
132
+
133
+ log_handle = None
134
+ if log_file:
135
+ os.makedirs(os.path.dirname(log_file), exist_ok=True)
136
+ log_handle = open(log_file, "a", encoding="utf-8")
137
+
138
+ try:
139
+ while True:
140
+ now = datetime.datetime.now()
141
+
142
+ # STDOUT
143
+ if not stdout_done:
144
+ try:
145
+ line = q_out.get(timeout=0.05)
146
+ if line is None:
147
+ stdout_done = True
148
+ else:
149
+ msg = line.decode(errors="replace").rstrip()
150
+ self._safe_write_to_stream(stdout or sys.stdout, msg + "\n")
151
+ if log_handle:
152
+ log_handle.write(f"{now} {msg}\n")
153
+ except queue.Empty:
154
+ pass
155
+
156
+ # STDERR
157
+ if not stderr_done:
158
+ try:
159
+ line = q_err.get_nowait()
160
+ if line is None:
161
+ stderr_done = True
162
+ else:
163
+ msg = line.decode(errors="replace").rstrip()
164
+ self._safe_write_to_stream(stderr or sys.stderr, msg + "\n")
165
+ if log_handle:
166
+ log_handle.write(f"{now} {msg}\n")
167
+ except queue.Empty:
168
+ pass
169
+
170
+ # Terminate when both streams finished
171
+ if stdout_done and stderr_done:
172
+ break
173
+
174
+ finally:
175
+ if t_out:
176
+ t_out.join()
177
+ if t_err:
178
+ t_err.join()
179
+ if log_handle:
180
+ log_handle.close()
181
+
182
+ # -------------------------------
183
+ # PROCESS CREATION
184
+ # -------------------------------
185
+
186
+ def _start_process(self, cmd, env=None):
187
+ if sys.platform == "win32":
188
+ return subprocess.Popen(
189
+ cmd,
190
+ stdout=subprocess.PIPE,
191
+ stderr=subprocess.PIPE,
192
+ env=env,
193
+ shell=False,
194
+ creationflags=subprocess.CREATE_NEW_PROCESS_GROUP,
195
+ )
196
+ else:
197
+ return subprocess.Popen(
198
+ cmd,
199
+ stdout=subprocess.PIPE,
200
+ stderr=subprocess.PIPE,
201
+ env=env,
202
+ shell=False,
203
+ preexec_fn=os.setsid,
204
+ )
205
+
206
+ # -------------------------------
207
+ # PROCESS TREE TERMINATION
208
+ # -------------------------------
209
+
210
+ def _terminate_tree(self, process):
211
+ if process.poll() is not None:
212
+ return
213
+
214
+ self.writer.write(
215
+ f"[ProcessManager] Terminating process tree PID={process.pid}",
216
+ color=Color.YELLOW
217
+ )
218
+
219
+ # PRIMARY: psutil (best reliability)
220
+ if psutil:
221
+ try:
222
+ parent = psutil.Process(process.pid)
223
+ children = parent.children(recursive=True)
224
+ for c in children:
225
+ try:
226
+ c.terminate()
227
+ except Exception:
228
+ pass
229
+ psutil.wait_procs(children, timeout=5)
230
+
231
+ for c in children:
232
+ if c.is_running():
233
+ try:
234
+ c.kill()
235
+ except Exception:
236
+ pass
237
+
238
+ try:
239
+ parent.terminate()
240
+ except Exception:
241
+ pass
242
+
243
+ try:
244
+ parent.wait(timeout=5)
245
+ except psutil.TimeoutExpired:
246
+ try:
247
+ parent.kill()
248
+ except Exception:
249
+ pass
250
+
251
+ return
252
+ except Exception:
253
+ pass
254
+
255
+ # FALLBACK — Windows
256
+ if sys.platform == "win32":
257
+ subprocess.run(
258
+ ["taskkill", "/PID", str(process.pid), "/T", "/F"],
259
+ stdout=subprocess.DEVNULL,
260
+ stderr=subprocess.DEVNULL,
261
+ )
262
+ return
263
+
264
+ # FALLBACK — Linux / macOS
265
+ try:
266
+ os.killpg(os.getpgid(process.pid), signal.SIGTERM)
267
+ time.sleep(2)
268
+ if process.poll() is None:
269
+ os.killpg(os.getpgid(process.pid), signal.SIGKILL)
270
+ except Exception:
271
+ if process.poll() is None:
272
+ try:
273
+ process.kill()
274
+ except Exception:
275
+ pass
276
+
277
+ try:
278
+ process.wait(timeout=5)
279
+ except Exception:
280
+ pass
281
+
282
+ # -------------------------------
283
+ # PUBLIC API
284
+ # -------------------------------
285
+
286
+ def terminate_all(self):
287
+ with self.lock:
288
+ for p in list(self.processes):
289
+ self._terminate_tree(p)
290
+ self.processes.clear()
291
+
292
+ def run(self, cmd, *, env=None, stdout=None, stderr=None,
293
+ timeout=None, verbose=False, item_name="process",
294
+ log_file=None, pool_id=0, item_index=0):
295
+
296
+ start = time.time()
297
+ process = self._start_process(cmd, env)
298
+
299
+ with self.lock:
300
+ self.processes.append(process)
301
+
302
+ ts = datetime.datetime.now()
303
+
304
+ if verbose:
305
+ self.writer.write(
306
+ f"{ts} [PID:{process.pid}] [{pool_id}] [ID:{item_index}] "
307
+ f"EXECUTING PARALLEL {item_name}:\n{' '.join(cmd)}"
308
+ )
309
+ else:
310
+ self.writer.write(
311
+ f"{ts} [PID:{process.pid}] [{pool_id}] [ID:{item_index}] EXECUTING {item_name}"
312
+ )
313
+
314
+ # Start logging thread
315
+ log_thread = threading.Thread(
316
+ target=self._stream_output,
317
+ args=(process, stdout, stderr, item_name, log_file),
318
+ )
319
+ log_thread.daemon = True
320
+ log_thread.start()
321
+
322
+ rc = None
323
+ ping_interval = 50 # 5s
324
+ next_ping = ping_interval
325
+ counter = 0
326
+
327
+ while rc is None:
328
+ rc = process.poll()
329
+
330
+ # TIMEOUT CHECK
331
+ if timeout and (time.time() - start > timeout):
332
+ ts = datetime.datetime.now()
333
+ self.writer.write(
334
+ f"{ts} [PID:{process.pid}] [{pool_id}] [ID:{item_index}] "
335
+ f"Process {item_name} killed due to exceeding the maximum timeout of {timeout} seconds"
336
+ )
337
+ self._terminate_tree(process)
338
+ rc = -1
339
+
340
+ # Dryrun process to mark all tests as failed due to timeout
341
+ this_dir = os.path.dirname(os.path.abspath(__file__))
342
+ listener_path = os.path.join(this_dir, "timeout_listener.py")
343
+ dry_run_env = env.copy() if env else os.environ.copy()
344
+ before, after = split_on_first(cmd, "-A")
345
+ dryrun_cmd = before + ["--dryrun", '--listener', listener_path, '-A'] + after
346
+
347
+ self.writer.write(
348
+ f"{ts} [PID:{process.pid}] [{pool_id}] [ID:{item_index}] "
349
+ f"Starting dry run to mark tests as failed due to timeout: {' '.join(dryrun_cmd)}"
350
+ )
351
+ subprocess.run(dryrun_cmd, env=dry_run_env, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
352
+
353
+ break
354
+
355
+ # Progress ping
356
+ if counter == next_ping:
357
+ ts = datetime.datetime.now()
358
+ self.writer.write(
359
+ f"{ts} [PID:{process.pid}] [{pool_id}] [ID:{item_index}] still running "
360
+ f"{item_name} after {(counter * 0.1):.1f}s"
361
+ )
362
+ ping_interval += 50
363
+ next_ping += ping_interval
364
+
365
+ time.sleep(0.1)
366
+ counter += 1
367
+
368
+ log_thread.join()
369
+
370
+ elapsed = round(time.time() - start, 1)
371
+
372
+ with self.lock:
373
+ if process in self.processes:
374
+ self.processes.remove(process)
375
+
376
+ return process, (rc, elapsed)
pabot/__init__.py CHANGED
@@ -7,4 +7,4 @@ try:
7
7
  except ImportError:
8
8
  pass
9
9
 
10
- __version__ = "5.1.0"
10
+ __version__ = "5.2.0b1"
pabot/arguments.py CHANGED
@@ -151,6 +151,11 @@ def _parse_artifacts(arg):
151
151
 
152
152
 
153
153
  def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str, object]]
154
+ """
155
+ Parse pabot-specific command line arguments.
156
+ Supports new --ordering syntax:
157
+ --ordering <file> [static|dynamic] [skip|run_all]
158
+ """
154
159
  pabot_args = {
155
160
  "command": ["pybot" if ROBOT_VERSION < "3.1" else "robot"],
156
161
  "verbose": False,
@@ -170,7 +175,8 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
170
175
  "chunk": False,
171
176
  "no-rebot": False,
172
177
  }
173
- # Explicitly define argument types for validation
178
+
179
+ # Arguments that are flags (boolean)
174
180
  flag_args = {
175
181
  "verbose",
176
182
  "help",
@@ -178,8 +184,10 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
178
184
  "pabotlib",
179
185
  "artifactsinsubfolders",
180
186
  "chunk",
181
- "no-rebot"
187
+ "no-rebot",
182
188
  }
189
+
190
+ # Arguments that expect values
183
191
  value_args = {
184
192
  "hive": str,
185
193
  "processes": lambda x: int(x) if x != "all" else None,
@@ -188,7 +196,7 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
188
196
  "pabotlibport": int,
189
197
  "pabotprerunmodifier": str,
190
198
  "processtimeout": int,
191
- "ordering": str,
199
+ "ordering": str, # special handling below
192
200
  "suitesfrom": str,
193
201
  "artifacts": _parse_artifacts,
194
202
  "shard": _parse_shard,
@@ -198,7 +206,7 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
198
206
  remaining_args = []
199
207
  i = 0
200
208
 
201
- # Track conflicting options during parsing
209
+ # Track conflicting pabotlib options
202
210
  saw_pabotlib_flag = False
203
211
  saw_no_pabotlib = False
204
212
 
@@ -209,19 +217,20 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
209
217
  i += 1
210
218
  continue
211
219
 
212
- arg_name = arg[2:] # Strip '--'
220
+ arg_name = arg[2:] # remove leading '--'
213
221
 
222
+ # Handle mutually exclusive pabotlib flags
214
223
  if arg_name == "no-pabotlib":
215
224
  saw_no_pabotlib = True
216
- pabot_args["pabotlib"] = False # Just set the main flag
217
- args = args[1:]
225
+ pabot_args["pabotlib"] = False
226
+ i += 1
218
227
  continue
219
228
  if arg_name == "pabotlib":
220
229
  saw_pabotlib_flag = True
221
- args = args[1:]
230
+ i += 1
222
231
  continue
223
232
 
224
- # Special case for command
233
+ # Special handling for --command ... --end-command
225
234
  if arg_name == "command":
226
235
  try:
227
236
  end_index = args.index("--end-command", i)
@@ -231,7 +240,7 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
231
240
  except ValueError:
232
241
  raise DataError("--command requires matching --end-command")
233
242
 
234
- # Handle flag arguments
243
+ # Handle boolean flags
235
244
  if arg_name in flag_args:
236
245
  pabot_args[arg_name] = True
237
246
  i += 1
@@ -242,23 +251,57 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
242
251
  if i + 1 >= len(args):
243
252
  raise DataError(f"--{arg_name} requires a value")
244
253
  try:
245
- value = value_args[arg_name](args[i + 1])
246
- if arg_name == "shard":
247
- pabot_args["shardindex"], pabot_args["shardcount"] = value
248
- elif arg_name == "pabotlibhost":
249
- pabot_args["pabotlib"] = False
250
- pabot_args[arg_name] = value
251
- elif arg_name == "artifacts":
252
- pabot_args["artifacts"] = value[0]
253
- pabot_args["artifactstimestamps"] = value[1]
254
+ # Special parsing for --ordering <file> [mode] [failure_policy]
255
+ if arg_name == "ordering":
256
+ if i + 1 >= len(args):
257
+ raise DataError("--ordering requires at least a file path")
258
+
259
+ ordering_file = args[i + 1]
260
+ mode = "static" # default
261
+ failure_policy = "run_all" # default
262
+
263
+ # optional mode
264
+ if i + 2 < len(args) and args[i + 2] in ("static", "dynamic"):
265
+ mode = args[i + 2]
266
+ i_mode_offset = 1
267
+ else:
268
+ i_mode_offset = 0
269
+
270
+ # optional failure policy, only for dynamic mode
271
+ if mode == "dynamic" and i + 2 + i_mode_offset < len(args) and args[i + 2 + i_mode_offset] in ("skip", "run_all"):
272
+ failure_policy = args[i + 2 + i_mode_offset]
273
+ i_failure_offset = 1
274
+ else:
275
+ i_failure_offset = 0
276
+
277
+ # store
278
+ pabot_args["ordering"] = {
279
+ "file": ordering_file,
280
+ "mode": mode,
281
+ "failure_policy": failure_policy,
282
+ }
283
+
284
+ # move index past ordering args only
285
+ i += 2 + i_mode_offset + i_failure_offset
286
+ continue
254
287
  else:
255
- pabot_args[arg_name] = value
256
- i += 2
257
- continue
258
- except (ValueError, TypeError) as e:
288
+ value = value_args[arg_name](args[i + 1])
289
+ if arg_name == "shard":
290
+ pabot_args["shardindex"], pabot_args["shardcount"] = value
291
+ elif arg_name == "pabotlibhost":
292
+ pabot_args["pabotlib"] = False
293
+ pabot_args[arg_name] = value
294
+ elif arg_name == "artifacts":
295
+ pabot_args["artifacts"] = value[0]
296
+ pabot_args["artifactstimestamps"] = value[1]
297
+ else:
298
+ pabot_args[arg_name] = value
299
+ i += 2
300
+ continue
301
+ except (ValueError, TypeError):
259
302
  raise DataError(f"Invalid value for --{arg_name}: {args[i + 1]}")
260
-
261
- # Handle argument files
303
+
304
+ # Handle argumentfiles like --argumentfile1
262
305
  match = ARGSMATCHER.match(arg)
263
306
  if match:
264
307
  if i + 1 >= len(args):
@@ -267,10 +310,11 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
267
310
  i += 2
268
311
  continue
269
312
 
270
- # If we get here, it's a non-pabot argument
313
+ # Any other non-pabot argument
271
314
  remaining_args.append(arg)
272
315
  i += 1
273
316
 
317
+ # Check for conflicting pabotlib flags
274
318
  if saw_pabotlib_flag and saw_no_pabotlib:
275
319
  raise DataError("Cannot use both --pabotlib and --no-pabotlib options together")
276
320