robotframework-pabot 5.1.0__py3-none-any.whl → 5.2.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pabot/pabot.py CHANGED
@@ -48,6 +48,7 @@ from glob import glob
48
48
  from io import BytesIO, StringIO
49
49
  from multiprocessing.pool import ThreadPool
50
50
  from natsort import natsorted
51
+ from pathlib import Path
51
52
 
52
53
  from robot import __version__ as ROBOT_VERSION
53
54
  from robot import rebot
@@ -83,6 +84,7 @@ from .execution_items import (
83
84
  create_dependency_tree,
84
85
  )
85
86
  from .result_merger import merge
87
+ from .writer import get_writer
86
88
 
87
89
  try:
88
90
  import queue # type: ignore
@@ -100,10 +102,10 @@ try:
100
102
  except ImportError:
101
103
  METADATA_AVAILABLE = False
102
104
 
103
- from typing import IO, Any, Dict, List, Optional, Tuple, Union
105
+ from typing import Any, Dict, List, Optional, Tuple, Union
104
106
 
105
107
  CTRL_C_PRESSED = False
106
- MESSAGE_QUEUE = queue.Queue()
108
+ #MESSAGE_QUEUE = queue.Queue()
107
109
  EXECUTION_POOL_IDS = [] # type: List[int]
108
110
  EXECUTION_POOL_ID_LOCK = threading.Lock()
109
111
  POPEN_LOCK = threading.Lock()
@@ -130,6 +132,15 @@ _ALL_ELAPSED = [] # type: List[Union[int, float]]
130
132
  # Python version check for supporting importlib.metadata (requires Python 3.8+)
131
133
  IS_PYTHON_3_8_OR_NEWER = sys.version_info >= (3, 8)
132
134
 
135
+ _PROCESS_MANAGER = None
136
+
137
+ def _ensure_process_manager():
138
+ global _PROCESS_MANAGER
139
+ if _PROCESS_MANAGER is None:
140
+ from pabot.ProcessManager import ProcessManager
141
+ _PROCESS_MANAGER = ProcessManager()
142
+ return _PROCESS_MANAGER
143
+
133
144
 
134
145
  def read_args_from_readme():
135
146
  """Reads a specific section from package METADATA or development README.md if available."""
@@ -215,7 +226,7 @@ class Color:
215
226
 
216
227
 
217
228
  def execute_and_wait_with(item):
218
- # type: ('QueueItem') -> None
229
+ # type: ('QueueItem') -> int
219
230
  global CTRL_C_PRESSED, _NUMBER_OF_ITEMS_TO_BE_EXECUTED
220
231
  is_last = _NUMBER_OF_ITEMS_TO_BE_EXECUTED == 1
221
232
  _NUMBER_OF_ITEMS_TO_BE_EXECUTED -= 1
@@ -234,6 +245,7 @@ def execute_and_wait_with(item):
234
245
  run_cmd, run_options = _create_command_for_execution(
235
246
  caller_id, datasources, is_last, item, outs_dir
236
247
  )
248
+ rc = 0
237
249
  if item.hive:
238
250
  _hived_execute(
239
251
  item.hive,
@@ -246,7 +258,7 @@ def execute_and_wait_with(item):
246
258
  item.index,
247
259
  )
248
260
  else:
249
- _try_execute_and_wait(
261
+ rc = _try_execute_and_wait(
250
262
  run_cmd,
251
263
  run_options,
252
264
  outs_dir,
@@ -264,6 +276,7 @@ def execute_and_wait_with(item):
264
276
  )
265
277
  except:
266
278
  _write(traceback.format_exc())
279
+ return rc
267
280
 
268
281
 
269
282
  def _create_command_for_execution(caller_id, datasources, is_last, item, outs_dir):
@@ -281,6 +294,7 @@ def _create_command_for_execution(caller_id, datasources, is_last, item, outs_di
281
294
  item.index,
282
295
  item.last_level,
283
296
  item.processes,
297
+ item.skip,
284
298
  )
285
299
  + datasources
286
300
  )
@@ -317,7 +331,7 @@ def _try_execute_and_wait(
317
331
  process_timeout=None,
318
332
  sleep_before_start=0
319
333
  ):
320
- # type: (List[str], List[str], str, str, bool, int, str, int, bool, Optional[int], int) -> None
334
+ # type: (List[str], List[str], str, str, bool, int, str, int, bool, Optional[int], int) -> int
321
335
  plib = None
322
336
  is_ignored = False
323
337
  if _pabotlib_in_use():
@@ -360,6 +374,7 @@ def _try_execute_and_wait(
360
374
  )
361
375
  if is_ignored and os.path.isdir(outs_dir):
362
376
  _rmtree_with_path(outs_dir)
377
+ return rc
363
378
 
364
379
 
365
380
  def _result_to_stdout(
@@ -556,88 +571,39 @@ def _run(
556
571
  process_timeout,
557
572
  sleep_before_start,
558
573
  ):
559
- # type: (List[str], List[str], IO[Any], IO[Any], str, bool, int, int, str, Optional[int], int) -> Tuple[Union[subprocess.Popen[bytes], subprocess.Popen], Tuple[int, float]]
560
574
  timestamp = datetime.datetime.now()
575
+
561
576
  if sleep_before_start > 0:
562
- _write(
563
- "%s [%s] [ID:%s] SLEEPING %s SECONDS BEFORE STARTING %s"
564
- % (timestamp, pool_id, item_index, sleep_before_start, item_name),
565
- )
577
+ _write(f"{timestamp} [{pool_id}] [ID:{item_index}] SLEEPING {sleep_before_start} SECONDS BEFORE STARTING {item_name}")
566
578
  time.sleep(sleep_before_start)
567
- timestamp = datetime.datetime.now()
579
+
568
580
  command_name = run_command[-1].replace(" ", "_")
569
581
  argfile_path = os.path.join(outs_dir, f"{command_name}_argfile.txt")
570
582
  _write_internal_argument_file(run_options, filename=argfile_path)
571
- cmd = ' '.join(run_command + ['-A'] + [argfile_path])
572
- if PY2:
573
- cmd = cmd.decode("utf-8").encode(SYSTEM_ENCODING)
574
- # avoid hitting https://bugs.python.org/issue10394
575
- with POPEN_LOCK:
576
- my_env = os.environ.copy()
577
- syslog_file = my_env.get("ROBOT_SYSLOG_FILE", None)
578
- if syslog_file:
579
- my_env["ROBOT_SYSLOG_FILE"] = os.path.join(
580
- outs_dir, os.path.basename(syslog_file)
581
- )
582
- process = subprocess.Popen(
583
- cmd, shell=True, stderr=stderr, stdout=stdout, env=my_env
584
- )
585
- if verbose:
586
- _write_with_id(
587
- process,
588
- pool_id,
589
- item_index,
590
- "EXECUTING PARALLEL %s with command:\n%s" % (item_name, cmd),
591
- timestamp=timestamp,
592
- )
593
- else:
594
- _write_with_id(
595
- process,
596
- pool_id,
597
- item_index,
598
- "EXECUTING %s" % item_name,
599
- timestamp=timestamp,
600
- )
601
- return process, _wait_for_return_code(
602
- process, item_name, pool_id, item_index, process_timeout
603
- )
604
-
605
583
 
606
- def _wait_for_return_code(process, item_name, pool_id, item_index, process_timeout):
607
- rc = None
608
- elapsed = 0
609
- ping_time = ping_interval = 150
610
- while rc is None:
611
- rc = process.poll()
612
- time.sleep(0.1)
613
- elapsed += 1
614
-
615
- if process_timeout and elapsed / 10.0 >= process_timeout:
616
- process.terminate()
617
- process.wait()
618
- rc = (
619
- -1
620
- ) # Set a return code indicating that the process was killed due to timeout
621
- _write_with_id(
622
- process,
623
- pool_id,
624
- item_index,
625
- "Process %s killed due to exceeding the maximum timeout of %s seconds"
626
- % (item_name, process_timeout),
627
- )
628
- break
629
-
630
- if elapsed == ping_time:
631
- ping_interval += 50
632
- ping_time += ping_interval
633
- _write_with_id(
634
- process,
635
- pool_id,
636
- item_index,
637
- "still running %s after %s seconds" % (item_name, elapsed / 10.0),
638
- )
584
+ cmd = run_command + ['-A', argfile_path]
585
+ my_env = os.environ.copy()
586
+ syslog_file = my_env.get("ROBOT_SYSLOG_FILE", None)
587
+ if syslog_file:
588
+ my_env["ROBOT_SYSLOG_FILE"] = os.path.join(outs_dir, os.path.basename(syslog_file))
589
+
590
+ log_path = os.path.join(outs_dir, f"{command_name}_{item_index}.log")
591
+
592
+ manager = _ensure_process_manager()
593
+ process, (rc, elapsed) = manager.run(
594
+ cmd,
595
+ env=my_env,
596
+ stdout=stdout,
597
+ stderr=stderr,
598
+ timeout=process_timeout,
599
+ verbose=verbose,
600
+ item_name=item_name,
601
+ log_file=log_path,
602
+ pool_id=pool_id,
603
+ item_index=item_index,
604
+ )
639
605
 
640
- return rc, elapsed / 10.0
606
+ return process, (rc, elapsed)
641
607
 
642
608
 
643
609
  def _read_file(file_handle):
@@ -697,6 +663,7 @@ def _options_for_executor(
697
663
  queueIndex,
698
664
  last_level,
699
665
  processes,
666
+ skip,
700
667
  ):
701
668
  options = options.copy()
702
669
  options["log"] = "NONE"
@@ -733,6 +700,11 @@ def _options_for_executor(
733
700
  options["argumentfile"] = argfile
734
701
  if options.get("test", False) and options.get("include", []):
735
702
  del options["include"]
703
+ if skip:
704
+ this_dir = os.path.dirname(os.path.abspath(__file__))
705
+ listener_path = os.path.join(this_dir, "skip_listener.py")
706
+ options["dryrun"] = True
707
+ options["listener"].append(listener_path)
736
708
  return _set_terminal_coloring_options(options)
737
709
 
738
710
 
@@ -1446,6 +1418,117 @@ def keyboard_interrupt(*args):
1446
1418
  CTRL_C_PRESSED = True
1447
1419
 
1448
1420
 
1421
+ def _get_depends(item):
1422
+ return getattr(item.execution_item, "depends", [])
1423
+
1424
+
1425
+ def _dependencies_satisfied(item, completed):
1426
+ return all(dep in completed for dep in _get_depends(item))
1427
+
1428
+
1429
+ def _collect_transitive_dependents(failed_name, pending_items):
1430
+ """
1431
+ Returns all pending items that (directly or indirectly) depend on failed_name.
1432
+ """
1433
+ to_skip = set()
1434
+ queue = [failed_name]
1435
+
1436
+ # Build dependency map once
1437
+ depends_map = {
1438
+ item.execution_item.name: set(_get_depends(item))
1439
+ for item in pending_items
1440
+ }
1441
+
1442
+ while queue:
1443
+ current = queue.pop(0)
1444
+ for item_name, deps in depends_map.items():
1445
+ if current in deps and item_name not in to_skip:
1446
+ to_skip.add(item_name)
1447
+ queue.append(item_name)
1448
+
1449
+ return to_skip
1450
+
1451
+
1452
+ def _parallel_execute_dynamic(
1453
+ items,
1454
+ processes,
1455
+ datasources,
1456
+ outs_dir,
1457
+ opts_for_run,
1458
+ pabot_args,
1459
+ ):
1460
+ original_signal_handler = signal.signal(signal.SIGINT, keyboard_interrupt)
1461
+
1462
+ max_processes = processes or len(items)
1463
+ pool = ThreadPool(max_processes)
1464
+
1465
+ pending = set(items)
1466
+ running = {}
1467
+ completed = set()
1468
+ failed = set()
1469
+
1470
+ failure_policy = pabot_args.get("ordering", {}).get("failure_policy", "run_all")
1471
+ lock = threading.Lock()
1472
+
1473
+ def on_complete(it, rc):
1474
+ nonlocal pending, running, completed, failed
1475
+
1476
+ with lock:
1477
+ running.pop(it, None)
1478
+ completed.add(it.execution_item.name)
1479
+
1480
+ if rc != 0:
1481
+ failed.add(it.execution_item.name)
1482
+
1483
+ if failure_policy == "skip":
1484
+ to_skip_names = _collect_transitive_dependents(
1485
+ it.execution_item.name,
1486
+ pending,
1487
+ )
1488
+
1489
+ for other in list(pending):
1490
+ if other.execution_item.name in to_skip_names:
1491
+ _write(
1492
+ f"Skipping '{other.execution_item.name}' because dependency "
1493
+ f"'{it.execution_item.name}' failed (transitive).",
1494
+ Color.YELLOW,
1495
+ )
1496
+ other.skip = True
1497
+
1498
+ try:
1499
+ while pending or running:
1500
+ with lock:
1501
+ ready = [
1502
+ item for item in list(pending)
1503
+ if _dependencies_satisfied(item, completed)
1504
+ ]
1505
+
1506
+ while ready and len(running) < max_processes:
1507
+ item = ready.pop(0)
1508
+ pending.remove(item)
1509
+
1510
+ result = pool.apply_async(
1511
+ execute_and_wait_with,
1512
+ (item,),
1513
+ callback=lambda rc, it=item: on_complete(it, rc),
1514
+ )
1515
+ running[item] = result
1516
+
1517
+ dynamic_items = _get_dynamically_created_execution_items(
1518
+ datasources, outs_dir, opts_for_run, pabot_args
1519
+ )
1520
+ if dynamic_items:
1521
+ with lock:
1522
+ for di in dynamic_items:
1523
+ pending.add(di)
1524
+
1525
+ time.sleep(0.1)
1526
+
1527
+ finally:
1528
+ pool.close()
1529
+ signal.signal(signal.SIGINT, original_signal_handler)
1530
+
1531
+
1449
1532
  def _parallel_execute(
1450
1533
  items, processes, datasources, outs_dir, opts_for_run, pabot_args
1451
1534
  ):
@@ -1540,16 +1623,34 @@ def _copy_output_artifacts(options, timestamp_id=None, file_extensions=None, inc
1540
1623
 
1541
1624
 
1542
1625
  def _check_pabot_results_for_missing_xml(base_dir, command_name, output_xml_name):
1626
+ """
1627
+ Check for missing Robot Framework output XML files in pabot result directories,
1628
+ taking into account the optional timestamp added by the -T option.
1629
+
1630
+ Args:
1631
+ base_dir: The root directory containing pabot subdirectories
1632
+ command_name: Name of the command that generated the output (used for fallback stderr filename)
1633
+ output_xml_name: Expected XML filename, e.g., 'output.xml'
1634
+
1635
+ Returns:
1636
+ List of paths to stderr output files for directories where the XML is missing.
1637
+ """
1543
1638
  missing = []
1639
+ # Prepare regex to match timestamped filenames like output-YYYYMMDD-hhmmss.xml
1640
+ name_stem = os.path.splitext(output_xml_name)[0]
1641
+ name_suffix = os.path.splitext(output_xml_name)[1]
1642
+ pattern = re.compile(rf"^{re.escape(name_stem)}(-\d{{8}}-\d{{6}})?{re.escape(name_suffix)}$")
1643
+
1544
1644
  for root, dirs, _ in os.walk(base_dir):
1545
1645
  if root == base_dir:
1546
1646
  for subdir in dirs:
1547
1647
  subdir_path = os.path.join(base_dir, subdir)
1548
- has_xml = any(fname.endswith(output_xml_name) for fname in os.listdir(subdir_path))
1648
+ # Check if any file matches the expected XML name or timestamped variant
1649
+ has_xml = any(pattern.match(fname) for fname in os.listdir(subdir_path))
1549
1650
  if not has_xml:
1550
- command_name = command_name.replace(" ", "_")
1551
- missing.append(os.path.join(subdir_path, f'{command_name}_stderr.out'))
1552
- break
1651
+ sanitized_cmd = command_name.replace(" ", "_")
1652
+ missing.append(os.path.join(subdir_path, f"{sanitized_cmd}_stderr.out"))
1653
+ break # only check immediate subdirectories
1553
1654
  return missing
1554
1655
 
1555
1656
 
@@ -1630,10 +1731,25 @@ def _write_stats(stats):
1630
1731
  _write("===================================================")
1631
1732
 
1632
1733
 
1734
+ def add_timestamp_to_filename(file_path: str, timestamp: str) -> str:
1735
+ """
1736
+ Rename the given file by inserting a timestamp before the extension.
1737
+ Format: YYYYMMDD-hhmmss
1738
+ Example: output.xml -> output-20251222-152233.xml
1739
+ """
1740
+ file_path = Path(file_path)
1741
+ if not file_path.exists():
1742
+ raise FileNotFoundError(f"{file_path} does not exist")
1743
+
1744
+ new_name = f"{file_path.stem}-{timestamp}{file_path.suffix}"
1745
+ new_path = file_path.with_name(new_name)
1746
+ file_path.rename(new_path)
1747
+ return str(new_path)
1748
+
1749
+
1633
1750
  def _report_results_for_one_run(
1634
1751
  outs_dir, pabot_args, options, start_time_string, tests_root_name, stats
1635
1752
  ):
1636
- _write(pabot_args)
1637
1753
  copied_artifacts = _copy_output_artifacts(
1638
1754
  options, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]), pabot_args["artifacts"], pabot_args["artifactsinsubfolders"]
1639
1755
  )
@@ -1641,6 +1757,9 @@ def _report_results_for_one_run(
1641
1757
  outs_dir, options, tests_root_name, stats, copied_artifacts, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"])
1642
1758
  )
1643
1759
  _write_stats(stats)
1760
+ ts = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
1761
+ if "timestampoutputs" in options and options["timestampoutputs"]:
1762
+ output_path = add_timestamp_to_filename(output_path, ts)
1644
1763
  if (
1645
1764
  "report" in options
1646
1765
  and options["report"].upper() == "NONE"
@@ -1653,7 +1772,7 @@ def _report_results_for_one_run(
1653
1772
  else:
1654
1773
  _write("Output: %s" % output_path)
1655
1774
  options["output"] = None # Do not write output again with rebot
1656
- return rebot(output_path, **_options_for_rebot(options, start_time_string, _now()))
1775
+ return rebot(output_path, **_options_for_rebot(options, start_time_string, ts))
1657
1776
 
1658
1777
 
1659
1778
  def _merge_one_run(
@@ -1664,7 +1783,16 @@ def _merge_one_run(
1664
1783
  os.path.join(options.get("outputdir", "."), outputfile)
1665
1784
  )
1666
1785
  filename = options.get("output") or "output.xml"
1667
- files = natsorted(glob(os.path.join(_glob_escape(outs_dir), f"**/*{filename}"), recursive=True))
1786
+ base_name, ext = os.path.splitext(filename)
1787
+ # Glob all candidates
1788
+ candidate_files = glob(os.path.join(outs_dir, "**", f"*{base_name}*{ext}"), recursive=True)
1789
+
1790
+ # Regex: basename or basename-YYYYMMDD-hhmmss.ext
1791
+ ts_pattern = re.compile(rf"^{re.escape(base_name)}(?:-\d{{8}}-\d{{6}})?{re.escape(ext)}$")
1792
+
1793
+ files = [f for f in candidate_files if ts_pattern.search(os.path.basename(f))]
1794
+ files = natsorted(files)
1795
+
1668
1796
  if not files:
1669
1797
  _write('WARN: No output files in "%s"' % outs_dir, Color.YELLOW)
1670
1798
  return ""
@@ -1715,19 +1843,9 @@ def _glob_escape(pathname):
1715
1843
  return drive + pathname
1716
1844
 
1717
1845
 
1718
- def _writer():
1719
- while True:
1720
- message = MESSAGE_QUEUE.get()
1721
- if message is None:
1722
- MESSAGE_QUEUE.task_done()
1723
- return
1724
- print(message)
1725
- sys.stdout.flush()
1726
- MESSAGE_QUEUE.task_done()
1727
-
1728
-
1729
1846
  def _write(message, color=None):
1730
- MESSAGE_QUEUE.put(_wrap_with(color, message))
1847
+ writer = get_writer()
1848
+ writer.write(message, color=color)
1731
1849
 
1732
1850
 
1733
1851
  def _wrap_with(color, message):
@@ -1740,16 +1858,6 @@ def _is_output_coloring_supported():
1740
1858
  return sys.stdout.isatty() and os.name in Color.SUPPORTED_OSES
1741
1859
 
1742
1860
 
1743
- def _start_message_writer():
1744
- t = threading.Thread(target=_writer)
1745
- t.start()
1746
-
1747
-
1748
- def _stop_message_writer():
1749
- MESSAGE_QUEUE.put(None)
1750
- MESSAGE_QUEUE.join()
1751
-
1752
-
1753
1861
  def _is_port_available(port):
1754
1862
  """Check if a given port on localhost is available."""
1755
1863
  with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
@@ -1855,8 +1963,9 @@ class QueueItem(object):
1855
1963
  hive=None,
1856
1964
  processes=0,
1857
1965
  timeout=None,
1966
+ skip=False,
1858
1967
  ):
1859
- # type: (List[str], str, Dict[str, object], ExecutionItem, List[str], bool, Tuple[str, Optional[str]], Optional[str], int, Optional[int]) -> None
1968
+ # type: (List[str], str, Dict[str, object], ExecutionItem, List[str], bool, Tuple[str, Optional[str]], Optional[str], int, Optional[int], bool) -> None
1860
1969
  self.datasources = datasources
1861
1970
  self.outs_dir = (
1862
1971
  outs_dir.encode("utf-8") if PY2 and is_unicode(outs_dir) else outs_dir
@@ -1876,6 +1985,7 @@ class QueueItem(object):
1876
1985
  self.processes = processes
1877
1986
  self.timeout = timeout
1878
1987
  self.sleep_before_start = execution_item.get_sleep()
1988
+ self.skip = skip
1879
1989
 
1880
1990
  @property
1881
1991
  def index(self):
@@ -2070,7 +2180,16 @@ def _get_dynamically_created_execution_items(
2070
2180
  if not _pabotlib_in_use():
2071
2181
  return None
2072
2182
  plib = Remote(_PABOTLIBURI)
2073
- new_suites = plib.run_keyword("get_added_suites", [], {})
2183
+ try:
2184
+ new_suites = plib.run_keyword("get_added_suites", [], {})
2185
+ except RuntimeError as err:
2186
+ _write(
2187
+ "[WARN] PabotLib unreachable during post-run phase, "
2188
+ "assuming no dynamically added suites. "
2189
+ "Original error: %s",
2190
+ err,
2191
+ )
2192
+ new_suites = []
2074
2193
  if len(new_suites) == 0:
2075
2194
  return None
2076
2195
  suite_group = [DynamicSuiteItem(s, v) for s, v in new_suites]
@@ -2102,6 +2221,7 @@ def main(args=None):
2102
2221
 
2103
2222
  def main_program(args):
2104
2223
  global _PABOTLIBPROCESS
2224
+ outs_dir = None
2105
2225
  args = args or sys.argv[1:]
2106
2226
  if len(args) == 0:
2107
2227
  print(
@@ -2115,7 +2235,6 @@ def main_program(args):
2115
2235
  start_time_string = _now()
2116
2236
  # NOTE: timeout option
2117
2237
  try:
2118
- _start_message_writer()
2119
2238
  options, datasources, pabot_args, opts_for_run = parse_args(args)
2120
2239
  if pabot_args["help"]:
2121
2240
  help_print = __doc__.replace(
@@ -2123,15 +2242,22 @@ def main_program(args):
2123
2242
  read_args_from_readme()
2124
2243
  )
2125
2244
  print(help_print.replace("[PABOT_VERSION]", PABOT_VERSION))
2126
- return 0
2245
+ return 251
2127
2246
  if len(datasources) == 0:
2128
2247
  print("[ " + _wrap_with(Color.RED, "ERROR") + " ]: No datasources given.")
2129
2248
  print("Try --help for usage information.")
2130
2249
  return 252
2250
+ outs_dir = _output_dir(options)
2251
+
2252
+ # These ensure MessageWriter and ProcessManager are ready before any parallel execution.
2253
+ writer = get_writer(log_dir=outs_dir)
2254
+ _ensure_process_manager()
2255
+ _write(f"Initialized logging in {outs_dir}")
2256
+
2131
2257
  _PABOTLIBPROCESS = _start_remote_library(pabot_args)
2132
2258
  if _pabotlib_in_use():
2133
2259
  _initialize_queue_index()
2134
- outs_dir = _output_dir(options)
2260
+
2135
2261
  suite_groups = _group_suites(outs_dir, datasources, options, pabot_args)
2136
2262
  if pabot_args["verbose"]:
2137
2263
  _write("Suite names resolved in %s seconds" % str(time.time() - start_time))
@@ -2142,16 +2268,30 @@ def main_program(args):
2142
2268
  execution_items = _create_execution_items(
2143
2269
  suite_groups, datasources, outs_dir, options, opts_for_run, pabot_args
2144
2270
  )
2145
- while execution_items:
2146
- items = execution_items.pop(0)
2147
- _parallel_execute(
2148
- items,
2271
+ if pabot_args.get("ordering", {}).get("mode") == "dynamic":
2272
+ # flatten stages
2273
+ all_items = []
2274
+ for stage in execution_items:
2275
+ all_items.extend(stage)
2276
+ _parallel_execute_dynamic(
2277
+ all_items,
2149
2278
  pabot_args["processes"],
2150
2279
  datasources,
2151
2280
  outs_dir,
2152
2281
  opts_for_run,
2153
2282
  pabot_args,
2154
2283
  )
2284
+ else:
2285
+ while execution_items:
2286
+ items = execution_items.pop(0)
2287
+ _parallel_execute(
2288
+ items,
2289
+ pabot_args["processes"],
2290
+ datasources,
2291
+ outs_dir,
2292
+ opts_for_run,
2293
+ pabot_args,
2294
+ )
2155
2295
  if pabot_args["no-rebot"]:
2156
2296
  _write((
2157
2297
  "All tests were executed, but the --no-rebot argument was given, "
@@ -2159,7 +2299,7 @@ def main_program(args):
2159
2299
  f"All results have been saved in the {outs_dir} folder."
2160
2300
  ))
2161
2301
  _write("===================================================")
2162
- return 0 if not _ABNORMAL_EXIT_HAPPENED else 252
2302
+ return 253
2163
2303
  result_code = _report_results(
2164
2304
  outs_dir,
2165
2305
  pabot_args,
@@ -2172,24 +2312,62 @@ def main_program(args):
2172
2312
  version_print = __doc__.replace("\nPLACEHOLDER_README.MD\n", "")
2173
2313
  print(version_print.replace("[PABOT_VERSION]", PABOT_VERSION))
2174
2314
  print(i.message)
2315
+ return 251
2175
2316
  except DataError as err:
2176
2317
  print(err.message)
2177
2318
  return 252
2178
2319
  except Exception:
2179
- _write("[ERROR] EXCEPTION RAISED DURING PABOT EXECUTION", Color.RED)
2180
- _write(
2181
- "[ERROR] PLEASE CONSIDER REPORTING THIS ISSUE TO https://github.com/mkorpela/pabot/issues",
2182
- Color.RED,
2183
- )
2184
- _write("Pabot: %s" % PABOT_VERSION)
2185
- _write("Python: %s" % sys.version)
2186
- _write("Robot Framework: %s" % ROBOT_VERSION)
2187
- raise
2320
+ if not CTRL_C_PRESSED:
2321
+ _write("[ERROR] EXCEPTION RAISED DURING PABOT EXECUTION", Color.RED)
2322
+ _write(
2323
+ "[ERROR] PLEASE CONSIDER REPORTING THIS ISSUE TO https://github.com/mkorpela/pabot/issues",
2324
+ Color.RED,
2325
+ )
2326
+ _write("Pabot: %s" % PABOT_VERSION)
2327
+ _write("Python: %s" % sys.version)
2328
+ _write("Robot Framework: %s" % ROBOT_VERSION)
2329
+ import traceback
2330
+ traceback.print_exc()
2331
+ sys.exit(255)
2332
+ else:
2333
+ _write("[ERROR] Execution stopped by user (Ctrl+C)", Color.RED)
2334
+ sys.exit(253)
2188
2335
  finally:
2189
- if _PABOTLIBPROCESS:
2190
- _stop_remote_library(_PABOTLIBPROCESS)
2191
- _print_elapsed(start_time, time.time())
2192
- _stop_message_writer()
2336
+ # Ensure that writer exists
2337
+ writer = None
2338
+ try:
2339
+ if outs_dir is not None:
2340
+ writer = get_writer(log_dir=outs_dir)
2341
+ except Exception as e:
2342
+ print(f"[WARN] Could not initialize writer in finally: {e}")
2343
+ # Try to stop remote library
2344
+ try:
2345
+ if _PABOTLIBPROCESS:
2346
+ _stop_remote_library(_PABOTLIBPROCESS)
2347
+ except Exception as e:
2348
+ if writer:
2349
+ writer.write(f"[WARN] Failed to stop remote library cleanly: {e}", Color.YELLOW)
2350
+ else:
2351
+ print(f"[WARN] Failed to stop remote library cleanly: {e}")
2352
+ # print elapsed time
2353
+ try:
2354
+ _print_elapsed(start_time, time.time())
2355
+ except Exception as e:
2356
+ if writer:
2357
+ writer.write(f"[WARN] Failed to print elapsed time: {e}", Color.YELLOW)
2358
+ else:
2359
+ print(f"[WARN] Failed to print elapsed time: {e}")
2360
+ # Flush and stop writer
2361
+ if writer:
2362
+ try:
2363
+ writer.flush()
2364
+ writer.write("Logs flushed successfully.")
2365
+ except Exception as e:
2366
+ print(f"[WARN] Could not flush writer: {e}")
2367
+ try:
2368
+ writer.stop()
2369
+ except Exception as e:
2370
+ print(f"[WARN] Could not stop writer: {e}")
2193
2371
 
2194
2372
 
2195
2373
  def _parse_ordering(filename): # type: (str) -> List[ExecutionItem]
@@ -2237,7 +2415,7 @@ def _check_ordering(ordering_file, suite_names): # type: (List[ExecutionItem],
2237
2415
  def _group_suites(outs_dir, datasources, options, pabot_args):
2238
2416
  suite_names = solve_suite_names(outs_dir, datasources, options, pabot_args)
2239
2417
  _verify_depends(suite_names)
2240
- ordering_arg = _parse_ordering(pabot_args.get("ordering")) if (pabot_args.get("ordering")) is not None else None
2418
+ ordering_arg = _parse_ordering(pabot_args.get("ordering").get("file")) if (pabot_args.get("ordering")) is not None else None
2241
2419
  if ordering_arg:
2242
2420
  _verify_depends(ordering_arg)
2243
2421
  if options.get("name"):