robotframework-pabot 5.2.0rc1__py3-none-any.whl → 5.2.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pabot/__init__.py CHANGED
@@ -7,4 +7,4 @@ try:
7
7
  except ImportError:
8
8
  pass
9
9
 
10
- __version__ = "5.2.0rc1"
10
+ __version__ = "5.2.0rc2"
pabot/arguments.py CHANGED
@@ -236,7 +236,12 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
236
236
  if arg_name == "command":
237
237
  try:
238
238
  end_index = args.index("--end-command", i)
239
- pabot_args["command"] = args[i + 1 : end_index]
239
+ pabot_args["use_user_command"] = True
240
+ cmd_lines = args[i + 1 : end_index]
241
+ cmd = []
242
+ for line in cmd_lines:
243
+ cmd.extend(line.split())
244
+ pabot_args["command"] = cmd
240
245
  i = end_index + 1
241
246
  continue
242
247
  except ValueError:
pabot/pabot.py CHANGED
@@ -108,9 +108,11 @@ CTRL_C_PRESSED = False
108
108
  _PABOTLIBURI = "127.0.0.1:8270"
109
109
  _PABOTLIBPROCESS = None # type: Optional[subprocess.Popen]
110
110
  _PABOTWRITER = None # type: Optional[MessageWriter]
111
+ _PABOTLIBTHREAD = None # type: Optional[threading.Thread]
111
112
  _NUMBER_OF_ITEMS_TO_BE_EXECUTED = 0
112
113
  _ABNORMAL_EXIT_HAPPENED = False
113
114
  _PABOTCONSOLE = "verbose" # type: str
115
+ _USE_USER_COMMAND = False
114
116
 
115
117
  _COMPLETED_LOCK = threading.Lock()
116
118
  _NOT_COMPLETED_INDEXES = [] # type: List[int]
@@ -211,15 +213,18 @@ def extract_section(lines, start_marker="<!-- START DOCSTRING -->", end_marker="
211
213
  if end_marker in line:
212
214
  break
213
215
  if inside_section:
214
- # Remove Markdown hyperlinks but keep text
215
- line = re.sub(r'\[([^\]]+)\]\(https?://[^\)]+\)', r'\1', line)
216
- # Remove Markdown section links but keep text
217
- line = re.sub(r'\[([^\]]+)\]\(#[^\)]+\)', r'\1', line)
218
- # Remove ** and backticks `
219
- line = re.sub(r'(\*\*|`)', '', line)
220
216
  extracted_lines.append(line)
221
217
 
222
- return "".join(extracted_lines).strip()
218
+ result = "".join(extracted_lines)
219
+
220
+ # Remove Markdown hyperlinks but keep text
221
+ result = re.sub(r'\[([^\]]+)\]\(https?://[^\)]+\)', r'\1', result)
222
+ # Remove Markdown section links but keep text
223
+ result = re.sub(r'\[([^\]]+)\]\(#[^\)]+\)', r'\1', result)
224
+ # Remove ** and backticks `
225
+ result = re.sub(r'(\*\*|`)', '', result)
226
+
227
+ return result.strip()
223
228
 
224
229
 
225
230
  class Color:
@@ -368,45 +373,63 @@ def _try_execute_and_wait(
368
373
  is_ignored = False
369
374
  if _pabotlib_in_use():
370
375
  plib = Remote(_PABOTLIBURI)
376
+
377
+ command_name = _get_command_name(run_cmd[0])
378
+ stdout_path = os.path.join(outs_dir, f"{command_name}_stdout.out")
379
+ stderr_path = os.path.join(outs_dir, f"{command_name}_stderr.out")
380
+
371
381
  try:
372
- with open(os.path.join(outs_dir, run_cmd[-1] + "_stdout.out"), "w") as stdout:
373
- with open(os.path.join(outs_dir, run_cmd[-1] + "_stderr.out"), "w") as stderr:
374
- process, (rc, elapsed) = _run(
375
- run_cmd,
376
- run_options,
377
- stderr,
378
- stdout,
379
- item_name,
380
- verbose,
381
- pool_id,
382
- my_index,
383
- outs_dir,
384
- process_timeout,
385
- sleep_before_start
386
- )
382
+ with open(stdout_path, "w", encoding="utf-8", buffering=1) as stdout, \
383
+ open(stderr_path, "w", encoding="utf-8", buffering=1) as stderr:
384
+
385
+ process, (rc, elapsed) = _run(
386
+ run_cmd,
387
+ run_options,
388
+ stderr,
389
+ stdout,
390
+ item_name,
391
+ verbose,
392
+ pool_id,
393
+ my_index,
394
+ outs_dir,
395
+ process_timeout,
396
+ sleep_before_start
397
+ )
398
+
399
+ # Ensure writing
400
+ stdout.flush()
401
+ stderr.flush()
402
+ os.fsync(stdout.fileno())
403
+ os.fsync(stderr.fileno())
404
+
405
+ if plib:
406
+ _increase_completed(plib, my_index)
407
+ is_ignored = _is_ignored(plib, caller_id)
408
+
409
+ # Thread-safe list append
410
+ _ALL_ELAPSED.append(elapsed)
411
+
412
+ _result_to_stdout(
413
+ elapsed=elapsed,
414
+ is_ignored=is_ignored,
415
+ item_name=item_name,
416
+ my_index=my_index,
417
+ pool_id=pool_id,
418
+ process=process,
419
+ rc=rc,
420
+ stderr=stderr_path,
421
+ stdout=stdout_path,
422
+ verbose=verbose,
423
+ show_stdout_on_failure=show_stdout_on_failure,
424
+ )
425
+
426
+ if is_ignored and os.path.isdir(outs_dir):
427
+ _rmtree_with_path(outs_dir)
428
+ return rc
429
+
387
430
  except:
388
431
  _write(traceback.format_exc(), level="error")
389
- if plib:
390
- _increase_completed(plib, my_index)
391
- is_ignored = _is_ignored(plib, caller_id)
392
- # Thread-safe list append
393
- _ALL_ELAPSED.append(elapsed)
394
- _result_to_stdout(
395
- elapsed,
396
- is_ignored,
397
- item_name,
398
- my_index,
399
- pool_id,
400
- process,
401
- rc,
402
- stderr,
403
- stdout,
404
- verbose,
405
- show_stdout_on_failure,
406
- )
407
- if is_ignored and os.path.isdir(outs_dir):
408
- _rmtree_with_path(outs_dir)
409
- return rc
432
+ return 252
410
433
 
411
434
 
412
435
  def _result_to_stdout(
@@ -603,7 +626,7 @@ def _run(
603
626
  _write(f"{timestamp} [{pool_id}] [ID:{item_index}] SLEEPING {sleep_before_start} SECONDS BEFORE STARTING {item_name}")
604
627
  time.sleep(sleep_before_start)
605
628
 
606
- command_name = run_command[-1].replace(" ", "_")
629
+ command_name = _get_command_name(run_command[0])
607
630
  argfile_path = os.path.join(outs_dir, f"{command_name}_argfile.txt")
608
631
  _write_internal_argument_file(run_options, filename=argfile_path)
609
632
 
@@ -634,11 +657,11 @@ def _run(
634
657
 
635
658
  def _read_file(file_handle):
636
659
  try:
637
- with open(file_handle.name, "r") as content_file:
660
+ with open(file_handle, "r") as content_file:
638
661
  content = content_file.read()
639
662
  return content
640
- except:
641
- return "Unable to read file %s" % file_handle
663
+ except Exception as e:
664
+ return "Unable to read file %s, error: %s" % (os.path.abspath(file_handle), e)
642
665
 
643
666
 
644
667
  def _execution_failed_message(suite_name, stdout, stderr, rc, verbose):
@@ -1262,6 +1285,7 @@ def generate_suite_names_with_builder(outs_dir, datasources, options):
1262
1285
  if ROBOT_VERSION >= "6.1":
1263
1286
  builder = TestSuiteBuilder(
1264
1287
  included_extensions=settings.extension,
1288
+ included_files=settings.parse_include,
1265
1289
  rpa=settings.rpa,
1266
1290
  lang=opts.get("language"),
1267
1291
  )
@@ -1359,7 +1383,7 @@ def _options_for_dryrun(options, outs_dir):
1359
1383
  return _set_terminal_coloring_options(options)
1360
1384
 
1361
1385
 
1362
- def _options_for_rebot(options, start_time_string, end_time_string):
1386
+ def _options_for_rebot(options, start_time_string, end_time_string, num_of_executions=0):
1363
1387
  rebot_options = options.copy()
1364
1388
  rebot_options["starttime"] = start_time_string
1365
1389
  rebot_options["endtime"] = end_time_string
@@ -1368,6 +1392,12 @@ def _options_for_rebot(options, start_time_string, end_time_string):
1368
1392
  rebot_options["test"] = []
1369
1393
  rebot_options["exclude"] = []
1370
1394
  rebot_options["include"] = []
1395
+ rebot_options["metadata"].append(
1396
+ f"Pabot Info:[https://pabot.org/?ref=log|Pabot] result from {num_of_executions} executions."
1397
+ )
1398
+ rebot_options["metadata"].append(
1399
+ f"Pabot Version:{PABOT_VERSION}"
1400
+ )
1371
1401
  if rebot_options.get("runemptysuite"):
1372
1402
  rebot_options["processemptysuite"] = True
1373
1403
  if ROBOT_VERSION >= "2.8":
@@ -1727,7 +1757,7 @@ def _copy_output_artifacts(options, timestamp_id=None, file_extensions=None, inc
1727
1757
  return copied_artifacts
1728
1758
 
1729
1759
 
1730
- def _check_pabot_results_for_missing_xml(base_dir, command_name, output_xml_name):
1760
+ def _check_pabot_results_for_missing_xml(base_dir, command_name, output_xml_name='output.xml'):
1731
1761
  """
1732
1762
  Check for missing Robot Framework output XML files in pabot result directories,
1733
1763
  taking into account the optional timestamp added by the -T option.
@@ -1753,14 +1783,18 @@ def _check_pabot_results_for_missing_xml(base_dir, command_name, output_xml_name
1753
1783
  # Check if any file matches the expected XML name or timestamped variant
1754
1784
  has_xml = any(pattern.match(fname) for fname in os.listdir(subdir_path))
1755
1785
  if not has_xml:
1756
- sanitized_cmd = command_name.replace(" ", "_")
1786
+ sanitized_cmd = _get_command_name(command_name)
1757
1787
  missing.append(os.path.join(subdir_path, f"{sanitized_cmd}_stderr.out"))
1758
1788
  break # only check immediate subdirectories
1759
1789
  return missing
1760
1790
 
1761
1791
 
1792
+ def _get_command_name(command_name):
1793
+ global _USE_USER_COMMAND
1794
+ return "user_command" if _USE_USER_COMMAND else command_name
1795
+
1796
+
1762
1797
  def _report_results(outs_dir, pabot_args, options, start_time_string, tests_root_name):
1763
- output_xml_name = options.get("output") or "output.xml"
1764
1798
  if "pythonpath" in options:
1765
1799
  del options["pythonpath"]
1766
1800
  if ROBOT_VERSION < "4.0":
@@ -1778,33 +1812,34 @@ def _report_results(outs_dir, pabot_args, options, start_time_string, tests_root
1778
1812
  missing_outputs = []
1779
1813
  if pabot_args["argumentfiles"]:
1780
1814
  outputs = [] # type: List[str]
1815
+ total_num_of_executions = 0
1781
1816
  for index, _ in pabot_args["argumentfiles"]:
1782
1817
  copied_artifacts = _copy_output_artifacts(
1783
1818
  options, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]), pabot_args["artifacts"], pabot_args["artifactsinsubfolders"], index
1784
1819
  )
1785
- outputs += [
1786
- _merge_one_run(
1787
- os.path.join(outs_dir, index),
1788
- options,
1789
- tests_root_name,
1790
- stats,
1791
- copied_artifacts,
1792
- timestamp_id=_get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]),
1793
- outputfile=os.path.join("pabot_results", "output%s.xml" % index),
1794
- )
1795
- ]
1796
- missing_outputs.extend(_check_pabot_results_for_missing_xml(os.path.join(outs_dir, index), pabot_args.get('command')[-1], output_xml_name))
1820
+ output, num_of_executions = _merge_one_run(
1821
+ os.path.join(outs_dir, index),
1822
+ options,
1823
+ tests_root_name,
1824
+ stats,
1825
+ copied_artifacts,
1826
+ timestamp_id=_get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]),
1827
+ outputfile=os.path.join("pabot_results", "output%s.xml" % index),
1828
+ )
1829
+ outputs += [output]
1830
+ total_num_of_executions += num_of_executions
1831
+ missing_outputs.extend(_check_pabot_results_for_missing_xml(os.path.join(outs_dir, index), pabot_args.get('command')))
1797
1832
  if "output" not in options:
1798
1833
  options["output"] = "output.xml"
1799
1834
  _write_stats(stats)
1800
1835
  stdout_writer = get_stdout_writer()
1801
1836
  stderr_writer = get_stderr_writer(original_stderr_name='Internal Rebot')
1802
- exit_code = rebot(*outputs, **_options_for_rebot(options, start_time_string, _now()), stdout=stdout_writer, stderr=stderr_writer)
1837
+ exit_code = rebot(*outputs, **_options_for_rebot(options, start_time_string, _now(), total_num_of_executions), stdout=stdout_writer, stderr=stderr_writer)
1803
1838
  else:
1804
1839
  exit_code = _report_results_for_one_run(
1805
1840
  outs_dir, pabot_args, options, start_time_string, tests_root_name, stats
1806
1841
  )
1807
- missing_outputs.extend(_check_pabot_results_for_missing_xml(outs_dir, pabot_args.get('command')[-1], output_xml_name))
1842
+ missing_outputs.extend(_check_pabot_results_for_missing_xml(outs_dir, pabot_args.get('command')))
1808
1843
  if missing_outputs:
1809
1844
  _write(("[ " + _wrap_with(Color.YELLOW, 'WARNING') + " ] "
1810
1845
  "One or more subprocesses encountered an error and the "
@@ -1860,7 +1895,7 @@ def _report_results_for_one_run(
1860
1895
  copied_artifacts = _copy_output_artifacts(
1861
1896
  options, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]), pabot_args["artifacts"], pabot_args["artifactsinsubfolders"]
1862
1897
  )
1863
- output_path = _merge_one_run(
1898
+ output_path, num_of_executions = _merge_one_run(
1864
1899
  outs_dir, options, tests_root_name, stats, copied_artifacts, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"])
1865
1900
  )
1866
1901
  _write_stats(stats)
@@ -1881,7 +1916,7 @@ def _report_results_for_one_run(
1881
1916
  options["output"] = None # Do not write output again with rebot
1882
1917
  stdout_writer = get_stdout_writer()
1883
1918
  stderr_writer = get_stderr_writer(original_stderr_name="Internal Rebot")
1884
- exit_code = rebot(output_path, **_options_for_rebot(options, start_time_string, ts), stdout=stdout_writer, stderr=stderr_writer)
1919
+ exit_code = rebot(output_path, **_options_for_rebot(options, start_time_string, ts, num_of_executions), stdout=stdout_writer, stderr=stderr_writer)
1885
1920
  return exit_code
1886
1921
 
1887
1922
 
@@ -1892,7 +1927,7 @@ def _merge_one_run(
1892
1927
  output_path = os.path.abspath(
1893
1928
  os.path.join(options.get("outputdir", "."), outputfile)
1894
1929
  )
1895
- filename = options.get("output") or "output.xml"
1930
+ filename = "output.xml"
1896
1931
  base_name, ext = os.path.splitext(filename)
1897
1932
  # Glob all candidates
1898
1933
  candidate_files = glob(os.path.join(outs_dir, "**", f"*{base_name}*{ext}"), recursive=True)
@@ -1905,7 +1940,7 @@ def _merge_one_run(
1905
1940
 
1906
1941
  if not files:
1907
1942
  _write('[ WARNING ]: No output files in "%s"' % outs_dir, Color.YELLOW, level="warning")
1908
- return ""
1943
+ return "", 0
1909
1944
 
1910
1945
  def invalid_xml_callback():
1911
1946
  global _ABNORMAL_EXIT_HAPPENED
@@ -1921,7 +1956,7 @@ def _merge_one_run(
1921
1956
  resu.save(output_path, legacy_output=True)
1922
1957
  else:
1923
1958
  resu.save(output_path)
1924
- return output_path
1959
+ return output_path, len(files)
1925
1960
 
1926
1961
 
1927
1962
  def _update_stats(result, stats):
@@ -1987,11 +2022,11 @@ def _get_free_port():
1987
2022
  return s.getsockname()[1]
1988
2023
 
1989
2024
 
1990
- def _start_remote_library(pabot_args): # type: (dict) -> Optional[subprocess.Popen]
2025
+ def _start_remote_library(pabot_args): # type: (dict) -> Optional[Tuple[subprocess.Popen, threading.Thread]]
1991
2026
  global _PABOTLIBURI
1992
2027
  # If pabotlib is not enabled, do nothing
1993
2028
  if not pabot_args.get("pabotlib"):
1994
- return None
2029
+ return None, None
1995
2030
 
1996
2031
  host = pabot_args.get("pabotlibhost", "127.0.0.1")
1997
2032
  port = pabot_args.get("pabotlibport", 8270)
@@ -2045,11 +2080,20 @@ def _start_remote_library(pabot_args): # type: (dict) -> Optional[subprocess.Po
2045
2080
  process = subprocess.Popen(cmd, **kwargs)
2046
2081
 
2047
2082
  def _read_output(proc, writer):
2048
- for line in proc.stdout:
2049
- if line.strip(): # Skip empty lines
2050
- writer.write(line.rstrip('\n') + '\n', level="info")
2051
- writer.flush()
2052
- proc.stdout.close()
2083
+ try:
2084
+ for line in proc.stdout:
2085
+ if line.strip(): # Skip empty lines
2086
+ try:
2087
+ writer.write(line.rstrip('\n') + '\n', level="info")
2088
+ writer.flush()
2089
+ except (RuntimeError, ValueError):
2090
+ # Writer/stdout already closed during shutdown
2091
+ break
2092
+ finally:
2093
+ try:
2094
+ proc.stdout.close()
2095
+ except Exception:
2096
+ pass
2053
2097
 
2054
2098
  pabotlib_writer = ThreadSafeWriter(get_writer())
2055
2099
  thread = threading.Thread(
@@ -2059,7 +2103,7 @@ def _start_remote_library(pabot_args): # type: (dict) -> Optional[subprocess.Po
2059
2103
  )
2060
2104
  thread.start()
2061
2105
 
2062
- return process
2106
+ return process, thread
2063
2107
 
2064
2108
 
2065
2109
  def _stop_remote_library(process): # type: (subprocess.Popen) -> None
@@ -2126,6 +2170,7 @@ class QueueItem(object):
2126
2170
  outs_dir.encode("utf-8") if PY2 and is_unicode(outs_dir) else outs_dir
2127
2171
  )
2128
2172
  self.options = options
2173
+ self.options["output"] = "output.xml" # This is hardcoded output.xml inside pabot_results, not the final output
2129
2174
  self.execution_item = (
2130
2175
  execution_item if not hive else HivedItem(execution_item, hive)
2131
2176
  )
@@ -2366,7 +2411,7 @@ def main(args=None):
2366
2411
 
2367
2412
 
2368
2413
  def main_program(args):
2369
- global _PABOTLIBPROCESS, _PABOTCONSOLE, _PABOTWRITER
2414
+ global _PABOTLIBPROCESS, _PABOTCONSOLE, _PABOTWRITER, _PABOTLIBTHREAD, _USE_USER_COMMAND
2370
2415
  outs_dir = None
2371
2416
  args = args or sys.argv[1:]
2372
2417
  if len(args) == 0:
@@ -2383,13 +2428,14 @@ def main_program(args):
2383
2428
  original_signal_handler = signal.default_int_handler # Save default handler in case of early exit
2384
2429
  try:
2385
2430
  options, datasources, pabot_args, opts_for_run = parse_args(args)
2431
+ _USE_USER_COMMAND = pabot_args.get("use_user_command", False)
2386
2432
  _PABOTCONSOLE = pabot_args.get("pabotconsole", "verbose")
2387
2433
  if pabot_args["help"]:
2388
2434
  help_print = __doc__.replace(
2389
2435
  "PLACEHOLDER_README.MD",
2390
2436
  read_args_from_readme()
2391
2437
  )
2392
- print(help_print.replace("[PABOT_VERSION]", PABOT_VERSION))
2438
+ print(help_print.replace("[PABOT_VERSION]", PABOT_VERSION, 1))
2393
2439
  return 251
2394
2440
  if len(datasources) == 0:
2395
2441
  print("[ " + _wrap_with(Color.RED, "ERROR") + " ]: No datasources given.")
@@ -2402,7 +2448,7 @@ def main_program(args):
2402
2448
  _ensure_process_manager()
2403
2449
  _write(f"Initialized logging in {outs_dir}", level="info")
2404
2450
 
2405
- _PABOTLIBPROCESS = _start_remote_library(pabot_args)
2451
+ _PABOTLIBPROCESS, _PABOTLIBTHREAD = _start_remote_library(pabot_args)
2406
2452
  # Set up signal handler to keep PabotLib alive during CTRL+C
2407
2453
  # This ensures graceful shutdown in the finally block
2408
2454
  original_signal_handler = signal.signal(signal.SIGINT, keyboard_interrupt)
@@ -2553,12 +2599,31 @@ def main_program(args):
2553
2599
  _write(f"[ WARNING ] Failed to print elapsed time: {e}", Color.YELLOW, level="warning")
2554
2600
  else:
2555
2601
  print(f"[ WARNING ] Failed to print elapsed time: {e}")
2556
-
2602
+
2603
+ # Ensure pabotlib output reader thread has finished
2604
+ try:
2605
+ if _PABOTLIBTHREAD:
2606
+ _PABOTLIBTHREAD.join(timeout=5)
2607
+ if _PABOTLIBTHREAD.is_alive():
2608
+ if _PABOTWRITER:
2609
+ _write(
2610
+ "[ WARNING ] PabotLib output thread did not finish before timeout",
2611
+ Color.YELLOW,
2612
+ level="warning"
2613
+ )
2614
+ else:
2615
+ print("[ WARNING ] PabotLib output thread did not finish before timeout")
2616
+ except Exception as e:
2617
+ if _PABOTWRITER:
2618
+ _write(f"[ WARNING ] Could not join pabotlib output thread: {e}", Color.YELLOW, level="warning")
2619
+ else:
2620
+ print(f"[ WARNING ] Could not join pabotlib output thread: {e}")
2621
+
2557
2622
  # Flush and stop writer
2558
2623
  try:
2559
2624
  if _PABOTWRITER:
2560
- _PABOTWRITER.flush()
2561
2625
  _PABOTWRITER.write("Logs flushed successfully.", level="debug")
2626
+ _PABOTWRITER.flush()
2562
2627
  else:
2563
2628
  writer = get_writer()
2564
2629
  if writer:
pabot/result_merger.py CHANGED
@@ -290,12 +290,6 @@ def merge(
290
290
  rebot_options.get('legacyoutput')
291
291
  )
292
292
  if len(merged) == 1:
293
- if not merged[0].suite.doc:
294
- merged[
295
- 0
296
- ].suite.doc = "[https://pabot.org/?ref=log|Pabot] result from %d executions." % len(
297
- result_files
298
- )
299
293
  return merged[0]
300
294
  else:
301
295
  return ResultsCombiner(merged)
pabot/writer.py CHANGED
@@ -51,14 +51,21 @@ class BufferingWriter:
51
51
  if line: # Only write non-empty lines
52
52
  if self.original_stderr_name:
53
53
  line = f"From {self.original_stderr_name}: {line}"
54
- self._writer.write(line, level=self._level)
54
+ try:
55
+ self._writer.write(line, level=self._level)
56
+ except (RuntimeError, ValueError):
57
+ # Writer/stdout already closed
58
+ break
55
59
 
56
60
  # If buffer ends with partial content (no newline), keep it buffered
57
61
 
58
62
  def flush(self):
59
63
  with self._lock:
60
64
  if self._buffer:
61
- self._writer.write(self._buffer, level=self._level)
65
+ try:
66
+ self._writer.write(self._buffer, level=self._level)
67
+ except (RuntimeError, ValueError):
68
+ pass
62
69
  self._buffer = ""
63
70
 
64
71
 
@@ -81,7 +88,7 @@ class ThreadSafeWriter:
81
88
 
82
89
  class MessageWriter:
83
90
  def __init__(self, log_file=None, console_type="verbose"):
84
- self.queue = queue.Queue()
91
+ self.queue = queue.Queue(maxsize=10000)
85
92
  self.log_file = log_file
86
93
  self.console_type = console_type
87
94
  self.console = DottedConsole() if console_type == "dotted" else None
@@ -89,7 +96,7 @@ class MessageWriter:
89
96
  os.makedirs(os.path.dirname(log_file), exist_ok=True)
90
97
  self._stop_event = threading.Event()
91
98
  self.thread = threading.Thread(target=self._writer)
92
- self.thread.daemon = True
99
+ self.thread.daemon = False
93
100
  self.thread.start()
94
101
 
95
102
  def _is_output_coloring_supported(self):
@@ -127,78 +134,102 @@ class MessageWriter:
127
134
  # verbose mode - print everything
128
135
  return True
129
136
 
130
- def _writer(self):
131
- while not self._stop_event.is_set():
132
- try:
133
- message, color, level = self.queue.get(timeout=0.1)
134
- except queue.Empty:
135
- continue
136
- if message is None:
137
- self.queue.task_done()
138
- break
137
+ def _flush_batch(self, batch, log_f):
138
+ for message, color, level in batch:
139
+ # Log file
140
+ if log_f:
141
+ lvl = f"[{level.split('_')[0].upper()}]".ljust(9)
142
+ log_f.write(f"{lvl} {message}\n")
139
143
 
140
- message = message.rstrip("\n")
141
- # Always write to log file
142
- if self.log_file:
143
- with open(self.log_file, "a", encoding="utf-8") as f:
144
- lvl_msg = f"[{level.split('_')[0].upper()}]".ljust(9)
145
- f.write(f"{lvl_msg} {message}\n")
146
-
147
- # Print to console based on level
144
+ # Console
148
145
  if self._should_print_to_console(level=level):
149
- if self.console is not None:
150
- # In dotted mode, only print single character messages directly
146
+ if self.console:
151
147
  if level == "info_passed":
152
- self.console.dot(self._wrap_with(color, "."))
148
+ self.console.dot(self._wrap_with(color, "."))
153
149
  elif level == "info_failed":
154
150
  self.console.dot(self._wrap_with(color, "F"))
155
151
  elif level in ("info_ignored", "info_skipped"):
156
152
  self.console.dot(self._wrap_with(color, "s"))
157
153
  else:
158
154
  self.console.newline()
159
- print(self._wrap_with(color, message), flush=True)
155
+ print(self._wrap_with(color, message))
160
156
  else:
161
- print(self._wrap_with(color, message), flush=True)
162
-
163
- self.queue.task_done()
164
-
165
- def write(self, message, color=None, level="info"):
166
- self.queue.put((f"{message}", color, level))
157
+ print(self._wrap_with(color, message))
158
+
159
+ def _writer(self):
160
+ log_f = None
161
+ try:
162
+ if self.log_file:
163
+ log_f = open(self.log_file, "a", encoding="utf-8", buffering=1)
167
164
 
168
- def flush(self, timeout=5):
169
- """
170
- Wait until all queued messages have been written.
165
+ buffer = []
166
+ last_flush = time.time()
167
+ FLUSH_INTERVAL = 0.2 # secs
168
+ BATCH_SIZE = 50 # rows
171
169
 
172
- :param timeout: Optional timeout in seconds. If None, wait indefinitely.
173
- :return: True if queue drained before timeout (or no timeout), False if timed out.
174
- """
175
- start = time.time()
176
- try:
177
- # Loop until Queue reports no unfinished tasks
178
170
  while True:
179
- # If writer thread died, break to avoid infinite loop
180
- if not self.thread.is_alive():
181
- # Give one last moment for potential in-flight task_done()
182
- time.sleep(0.01)
183
- # If still unfinished, we can't do more
184
- return getattr(self.queue, "unfinished_tasks", 0) == 0
185
-
186
- unfinished = getattr(self.queue, "unfinished_tasks", None)
187
- if unfinished is None:
188
- # Fallback: call join once and return
189
- try:
190
- self.queue.join()
191
- return True
192
- except Exception:
193
- return False
171
+ try:
172
+ item = self.queue.get(timeout=0.1)
173
+ except queue.Empty:
174
+ # Timebased flush
175
+ if buffer and (time.time() - last_flush) > FLUSH_INTERVAL:
176
+ self._flush_batch(buffer, log_f)
177
+ buffer.clear()
178
+ last_flush = time.time()
179
+ if self._stop_event.is_set():
180
+ break
181
+ continue
182
+
183
+ message, color, level = item
184
+
185
+ if message is None:
186
+ self.queue.task_done()
187
+ break
188
+
189
+ message = message.rstrip("\n")
190
+
191
+ buffer.append((message, color, level))
192
+ self.queue.task_done()
194
193
 
195
- if unfinished == 0:
196
- return True
194
+ # Batch full → flush
195
+ if len(buffer) >= BATCH_SIZE:
196
+ self._flush_batch(buffer, log_f)
197
+ buffer.clear()
198
+ last_flush = time.time()
199
+
200
+ # Final flush
201
+ if buffer:
202
+ self._flush_batch(buffer, log_f)
197
203
 
198
- if timeout is not None and (time.time() - start) > timeout:
199
- return False
204
+ except Exception:
205
+ import traceback
206
+ traceback.print_exc()
200
207
 
208
+ finally:
209
+ if log_f:
210
+ try:
211
+ log_f.flush()
212
+ log_f.close()
213
+ except Exception:
214
+ pass
215
+
216
+
217
+ def write(self, message, color=None, level="info"):
218
+ if self._stop_event.is_set():
219
+ return
220
+ try:
221
+ self.queue.put((f"{message}", color, level), timeout=0.1)
222
+ except queue.Full:
223
+ pass # drop
224
+
225
+ def flush(self, timeout=5):
226
+ end = time.time() + timeout
227
+ try:
228
+ while time.time() < end:
229
+ if self.queue.unfinished_tasks == 0:
230
+ return True
201
231
  time.sleep(0.05)
232
+ return False
202
233
  except KeyboardInterrupt:
203
234
  # Allow tests/cli to interrupt flushing
204
235
  return False
@@ -207,10 +238,12 @@ class MessageWriter:
207
238
  """
208
239
  Gracefully stop the writer thread and flush remaining messages.
209
240
  """
210
- self.flush()
211
241
  self._stop_event.set()
212
- self.queue.put((None, None, None)) # sentinel to break thread loop
213
- self.thread.join(timeout=1.0)
242
+ try:
243
+ self.queue.put_nowait((None, None, None))
244
+ except queue.Full:
245
+ pass
246
+ self.thread.join(timeout=2)
214
247
 
215
248
 
216
249
  _writer_instance = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: robotframework-pabot
3
- Version: 5.2.0rc1
3
+ Version: 5.2.0rc2
4
4
  Summary: Parallel test runner for Robot Framework
5
5
  Home-page: https://pabot.org
6
6
  Download-URL: https://pypi.python.org/pypi/robotframework-pabot
@@ -194,7 +194,7 @@ Supports all [Robot Framework command line options](https://robotframework.org/r
194
194
 
195
195
  **--resourcefile [FILEPATH]**
196
196
  Indicator for a file that can contain shared variables for distributing resources. This needs to be used together with
197
- pabotlib option. Resource file syntax is same as Windows ini files. Where a section is a shared set of variables.
197
+ pabotlib option. Resource file syntax is same as Windows ini files where a section is a shared set of variables.
198
198
 
199
199
  **--argumentfile[INTEGER] [FILEPATH]**
200
200
  Run same suites with multiple [argumentfile](http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#argument-files) options.
@@ -1,23 +1,23 @@
1
1
  pabot/ProcessManager.py,sha256=Y4SUOLJ-AmQCc1Y49IYjZS34uqRUnlDt-G2AGymAdHg,13627
2
2
  pabot/SharedLibrary.py,sha256=mIipGs3ZhKYEakKprcbrMI4P_Un6qI8gE7086xpHaLY,2552
3
- pabot/__init__.py,sha256=3MzL6YP6ocsJT8YWQWOMv3XsyC6HvKBZkA07ZtDHD2s,203
4
- pabot/arguments.py,sha256=IBxkqa63hz5RvdnSZxhLjykkuMs91q8AF30QOzmRl_U,12034
3
+ pabot/__init__.py,sha256=Q8E5VA1T7yYCUTZrCg3pEtcKl4AAA1EhUA-PRA3RnRk,203
4
+ pabot/arguments.py,sha256=UAJyCb3F1--4qpYTuG6SL7rafPC3lLtQIEr9zJ0JNwU,12229
5
5
  pabot/clientwrapper.py,sha256=yz7battGs0exysnDeLDWJuzpb2Q-qSjitwxZMO2TlJw,231
6
6
  pabot/coordinatorwrapper.py,sha256=nQQ7IowD6c246y8y9nsx0HZbt8vS2XODhPVDjm-lyi0,195
7
7
  pabot/execution_items.py,sha256=zDVGW0AAeVbM-scC3Yui2TxvIPx1wYyFKHTPU2BkJkY,13329
8
- pabot/pabot.py,sha256=rF20VvPfcsPO2_d9FyFGwmoRRqfpoS2EFuATMblOHsc,97383
8
+ pabot/pabot.py,sha256=oHIvEHz1l27IIFjoAcU9bQw9Tf-m6o_v_mjAnTWKxKw,99944
9
9
  pabot/pabotlib.py,sha256=vHbqV7L7mIvDzXBh9UcdULrwhBHNn70EDXF_31MNFO4,22320
10
- pabot/result_merger.py,sha256=rRRSkQa6bdallwT4w9-jHJXvv7X866C1NwD0jdWdSaE,10177
10
+ pabot/result_merger.py,sha256=eqsF5FBqU_7N1Ti34nD2mrjVAr0uWTk67Ix582vCOaE,9963
11
11
  pabot/robotremoteserver.py,sha256=BdeIni9Q4LJKVDBUlG2uJ9tiyAjrPXwU_YsPq1THWoo,23296
12
12
  pabot/workerwrapper.py,sha256=BdELUVDs5BmEkdNBcYTlnP22Cj0tUpZEunYQMAKyKWU,185
13
- pabot/writer.py,sha256=tRlPI1jH9NWIYy-VkDsb2odDxZwem7ZgccRBOXZvy4w,8861
13
+ pabot/writer.py,sha256=91HrYV8fLOYYxGycTnO4h18EsCppLtwKTn2R4mfxKMk,9391
14
14
  pabot/py3/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
15
  pabot/py3/client.py,sha256=Od9L4vZ0sozMHq_W_ITQHBBt8kAej40DG58wnxmbHGM,1434
16
16
  pabot/py3/coordinator.py,sha256=kBshCzA_1QX_f0WNk42QBJyDYSwSlNM-UEBxOReOj6E,2313
17
17
  pabot/py3/messages.py,sha256=7mFr4_0x1JHm5sW8TvKq28Xs_JoeIGku2bX7AyO0kng,2557
18
18
  pabot/py3/worker.py,sha256=5rfp4ZiW6gf8GRz6eC0-KUkfx847A91lVtRYpLAv2sg,1612
19
- robotframework_pabot-5.2.0rc1.dist-info/METADATA,sha256=qQ3V9OzAxIwAZGpUtL9Gi2cetHLXDsTzCPR2Wt1huVw,24792
20
- robotframework_pabot-5.2.0rc1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
21
- robotframework_pabot-5.2.0rc1.dist-info/entry_points.txt,sha256=JpAIFADTeFOQWdwmn56KpAil8V3-41ZC5ICXCYm3Ng0,43
22
- robotframework_pabot-5.2.0rc1.dist-info/top_level.txt,sha256=t3OwfEAsSxyxrhjy_GCJYHKbV_X6AIsgeLhYeHvObG4,6
23
- robotframework_pabot-5.2.0rc1.dist-info/RECORD,,
19
+ robotframework_pabot-5.2.0rc2.dist-info/METADATA,sha256=81x6M9TGs27yKenKnTkaloq8nUQS52kzVQF9SNvq_0Y,24791
20
+ robotframework_pabot-5.2.0rc2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
21
+ robotframework_pabot-5.2.0rc2.dist-info/entry_points.txt,sha256=JpAIFADTeFOQWdwmn56KpAil8V3-41ZC5ICXCYm3Ng0,43
22
+ robotframework_pabot-5.2.0rc2.dist-info/top_level.txt,sha256=t3OwfEAsSxyxrhjy_GCJYHKbV_X6AIsgeLhYeHvObG4,6
23
+ robotframework_pabot-5.2.0rc2.dist-info/RECORD,,