robotframework-pabot 4.3.2__py3-none-any.whl → 5.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pabot/__init__.py +1 -1
- pabot/arguments.py +13 -1
- pabot/execution_items.py +67 -31
- pabot/pabot.py +247 -107
- pabot/pabotlib.py +1 -1
- pabot/result_merger.py +19 -5
- {robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/METADATA +145 -27
- {robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/RECORD +12 -12
- {robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/WHEEL +0 -0
- {robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/entry_points.txt +0 -0
- {robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/licenses/LICENSE.txt +0 -0
- {robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/top_level.txt +0 -0
pabot/__init__.py
CHANGED
pabot/arguments.py
CHANGED
|
@@ -142,6 +142,14 @@ def _parse_shard(arg):
|
|
|
142
142
|
return int(parts[0]), int(parts[1])
|
|
143
143
|
|
|
144
144
|
|
|
145
|
+
def _parse_artifacts(arg):
|
|
146
|
+
# type: (str) -> Tuple[List[str], bool]
|
|
147
|
+
artifacts = arg.split(',')
|
|
148
|
+
if artifacts[-1] == 'notimestamps':
|
|
149
|
+
return (artifacts[:-1], False)
|
|
150
|
+
return (artifacts, True)
|
|
151
|
+
|
|
152
|
+
|
|
145
153
|
def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str, object]]
|
|
146
154
|
pabot_args = {
|
|
147
155
|
"command": ["pybot" if ROBOT_VERSION < "3.1" else "robot"],
|
|
@@ -155,6 +163,7 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
|
|
|
155
163
|
"processes": _processes_count(),
|
|
156
164
|
"processtimeout": None,
|
|
157
165
|
"artifacts": ["png"],
|
|
166
|
+
"artifactstimestamps": True,
|
|
158
167
|
"artifactsinsubfolders": False,
|
|
159
168
|
"shardindex": 0,
|
|
160
169
|
"shardcount": 1,
|
|
@@ -181,7 +190,7 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
|
|
|
181
190
|
"processtimeout": int,
|
|
182
191
|
"ordering": str,
|
|
183
192
|
"suitesfrom": str,
|
|
184
|
-
"artifacts":
|
|
193
|
+
"artifacts": _parse_artifacts,
|
|
185
194
|
"shard": _parse_shard,
|
|
186
195
|
}
|
|
187
196
|
|
|
@@ -239,6 +248,9 @@ def _parse_pabot_args(args): # type: (List[str]) -> Tuple[List[str], Dict[str,
|
|
|
239
248
|
elif arg_name == "pabotlibhost":
|
|
240
249
|
pabot_args["pabotlib"] = False
|
|
241
250
|
pabot_args[arg_name] = value
|
|
251
|
+
elif arg_name == "artifacts":
|
|
252
|
+
pabot_args["artifacts"] = value[0]
|
|
253
|
+
pabot_args["artifactstimestamps"] = value[1]
|
|
242
254
|
else:
|
|
243
255
|
pabot_args[arg_name] = value
|
|
244
256
|
i += 2
|
pabot/execution_items.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from functools import total_ordering
|
|
2
|
-
from typing import Dict, List, Optional, Tuple, Union
|
|
2
|
+
from typing import Dict, List, Optional, Tuple, Union, Set
|
|
3
3
|
|
|
4
4
|
from robot import __version__ as ROBOT_VERSION
|
|
5
5
|
from robot.errors import DataError
|
|
@@ -8,36 +8,68 @@ from robot.utils import PY2, is_unicode
|
|
|
8
8
|
import re
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
def create_dependency_tree(items):
|
|
11
|
+
def create_dependency_tree(items):
|
|
12
12
|
# type: (List[ExecutionItem]) -> List[List[ExecutionItem]]
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
while
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
# All #DEPENDS test are already run:
|
|
26
|
-
if len(stage_indexes) == len(d.depends):
|
|
27
|
-
run_in_this_stage.append(d)
|
|
13
|
+
dependency_tree = [] # type: List[List[ExecutionItem]]
|
|
14
|
+
scheduled = set() # type: Set[str]
|
|
15
|
+
name_to_item = {item.name: item for item in items} # type: Dict[str, ExecutionItem]
|
|
16
|
+
|
|
17
|
+
while items:
|
|
18
|
+
stage = [] #type: List[ExecutionItem]
|
|
19
|
+
stage_names = set() # type: Set[str]
|
|
20
|
+
|
|
21
|
+
for item in items:
|
|
22
|
+
if all(dep in scheduled for dep in item.depends):
|
|
23
|
+
stage.append(item)
|
|
24
|
+
stage_names.add(item.name)
|
|
28
25
|
else:
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
if
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
26
|
+
break # Preserve input order
|
|
27
|
+
|
|
28
|
+
if not stage:
|
|
29
|
+
# Try to find any schedulable item even if it's out of order
|
|
30
|
+
for item in items:
|
|
31
|
+
if all(dep in scheduled for dep in item.depends):
|
|
32
|
+
stage = [item]
|
|
33
|
+
stage_names = {item.name}
|
|
34
|
+
break
|
|
35
|
+
|
|
36
|
+
if not stage:
|
|
37
|
+
# Prepare a detailed error message
|
|
38
|
+
unscheduled_items = [item.name for item in items]
|
|
39
|
+
unsatisfied_deps = {
|
|
40
|
+
item.name: [d for d in item.depends if d not in scheduled and d not in name_to_item]
|
|
41
|
+
for item in items
|
|
42
|
+
}
|
|
43
|
+
potential_cycles = {
|
|
44
|
+
item.name: [d for d in item.depends if d in unscheduled_items]
|
|
45
|
+
for item in items if item.depends
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
message = ["Invalid test configuration:"]
|
|
49
|
+
|
|
50
|
+
message_unsatisfied = []
|
|
51
|
+
for item, deps in unsatisfied_deps.items():
|
|
52
|
+
if deps:
|
|
53
|
+
message_unsatisfied.append(f" - {item} depends on missing: {', '.join(deps)}")
|
|
54
|
+
if message_unsatisfied:
|
|
55
|
+
message.append(" Unsatisfied dependencies:")
|
|
56
|
+
message.extend(message_unsatisfied)
|
|
57
|
+
message.append(" For these tests, check that there is not #WAIT between them and that they are not inside different groups { }")
|
|
58
|
+
|
|
59
|
+
message_cycles = []
|
|
60
|
+
for item, deps in potential_cycles.items():
|
|
61
|
+
if deps:
|
|
62
|
+
message_cycles.append(f" - {item} <-> {', '.join(deps)}")
|
|
63
|
+
if message_cycles:
|
|
64
|
+
message.append(" Possible circular dependencies:")
|
|
65
|
+
message.extend(message_cycles)
|
|
66
|
+
|
|
67
|
+
raise DataError("\n".join(message))
|
|
68
|
+
|
|
69
|
+
dependency_tree.append(stage)
|
|
70
|
+
scheduled.update(stage_names)
|
|
71
|
+
items = [item for item in items if item.name not in stage_names]
|
|
72
|
+
|
|
41
73
|
return dependency_tree
|
|
42
74
|
|
|
43
75
|
|
|
@@ -47,6 +79,7 @@ class ExecutionItem(object):
|
|
|
47
79
|
type = None # type: str
|
|
48
80
|
name = None # type: str
|
|
49
81
|
sleep = 0 # type: int
|
|
82
|
+
depends = [] # type: List[str] # Note that depends is used by RunnableItems.
|
|
50
83
|
|
|
51
84
|
def top_name(self):
|
|
52
85
|
# type: () -> str
|
|
@@ -156,7 +189,6 @@ class GroupItem(ExecutionItem):
|
|
|
156
189
|
class RunnableItem(ExecutionItem):
|
|
157
190
|
pass
|
|
158
191
|
|
|
159
|
-
depends = None # type: List[str]
|
|
160
192
|
depends_keyword = "#DEPENDS"
|
|
161
193
|
|
|
162
194
|
def _split_dependencies(self, line_name, depends_indexes):
|
|
@@ -182,7 +214,7 @@ class RunnableItem(ExecutionItem):
|
|
|
182
214
|
self.depends = (
|
|
183
215
|
self._split_dependencies(line_name, depends_indexes)
|
|
184
216
|
if len(depends_indexes) != 0
|
|
185
|
-
else
|
|
217
|
+
else []
|
|
186
218
|
)
|
|
187
219
|
|
|
188
220
|
def line(self):
|
|
@@ -243,6 +275,10 @@ class SuiteItem(RunnableItem):
|
|
|
243
275
|
# TODO Make this happen
|
|
244
276
|
return []
|
|
245
277
|
|
|
278
|
+
def modify_options_for_executor(self, options):
|
|
279
|
+
if not(options.get("runemptysuite") and options.get("suite")):
|
|
280
|
+
options[self.type] = self.name
|
|
281
|
+
|
|
246
282
|
|
|
247
283
|
class TestItem(RunnableItem):
|
|
248
284
|
type = "test"
|
pabot/pabot.py
CHANGED
|
@@ -109,10 +109,6 @@ EXECUTION_POOL_ID_LOCK = threading.Lock()
|
|
|
109
109
|
POPEN_LOCK = threading.Lock()
|
|
110
110
|
_PABOTLIBURI = "127.0.0.1:8270"
|
|
111
111
|
_PABOTLIBPROCESS = None # type: Optional[subprocess.Popen]
|
|
112
|
-
_BOURNELIKE_SHELL_BAD_CHARS_WITHOUT_DQUOTE = (
|
|
113
|
-
"!#$^&*?[(){}<>~;'`\\|= \t\n" # does not contain '"'
|
|
114
|
-
)
|
|
115
|
-
_BAD_CHARS_SET = set(_BOURNELIKE_SHELL_BAD_CHARS_WITHOUT_DQUOTE)
|
|
116
112
|
_NUMBER_OF_ITEMS_TO_BE_EXECUTED = 0
|
|
117
113
|
_ABNORMAL_EXIT_HAPPENED = False
|
|
118
114
|
|
|
@@ -198,8 +194,13 @@ def extract_section(lines, start_marker="<!-- START DOCSTRING -->", end_marker="
|
|
|
198
194
|
if end_marker in line:
|
|
199
195
|
break
|
|
200
196
|
if inside_section:
|
|
201
|
-
# Remove Markdown
|
|
202
|
-
|
|
197
|
+
# Remove Markdown hyperlinks but keep text
|
|
198
|
+
line = re.sub(r'\[([^\]]+)\]\(https?://[^\)]+\)', r'\1', line)
|
|
199
|
+
# Remove Markdown section links but keep text
|
|
200
|
+
line = re.sub(r'\[([^\]]+)\]\(#[^\)]+\)', r'\1', line)
|
|
201
|
+
# Remove ** and backticks `
|
|
202
|
+
line = re.sub(r'(\*\*|`)', '', line)
|
|
203
|
+
extracted_lines.append(line)
|
|
203
204
|
|
|
204
205
|
return "".join(extracted_lines).strip()
|
|
205
206
|
|
|
@@ -213,16 +214,6 @@ class Color:
|
|
|
213
214
|
YELLOW = "\033[93m"
|
|
214
215
|
|
|
215
216
|
|
|
216
|
-
def _mapOptionalQuote(command_args):
|
|
217
|
-
# type: (List[str]) -> List[str]
|
|
218
|
-
if os.name == "posix":
|
|
219
|
-
return [quote(arg) for arg in command_args]
|
|
220
|
-
return [
|
|
221
|
-
arg if set(arg).isdisjoint(_BAD_CHARS_SET) else '"%s"' % arg
|
|
222
|
-
for arg in command_args
|
|
223
|
-
]
|
|
224
|
-
|
|
225
|
-
|
|
226
217
|
def execute_and_wait_with(item):
|
|
227
218
|
# type: ('QueueItem') -> None
|
|
228
219
|
global CTRL_C_PRESSED, _NUMBER_OF_ITEMS_TO_BE_EXECUTED
|
|
@@ -240,13 +231,13 @@ def execute_and_wait_with(item):
|
|
|
240
231
|
name = item.display_name
|
|
241
232
|
outs_dir = os.path.join(item.outs_dir, item.argfile_index, str(item.index))
|
|
242
233
|
os.makedirs(outs_dir)
|
|
243
|
-
|
|
234
|
+
run_cmd, run_options = _create_command_for_execution(
|
|
244
235
|
caller_id, datasources, is_last, item, outs_dir
|
|
245
236
|
)
|
|
246
237
|
if item.hive:
|
|
247
238
|
_hived_execute(
|
|
248
239
|
item.hive,
|
|
249
|
-
|
|
240
|
+
run_cmd + run_options,
|
|
250
241
|
outs_dir,
|
|
251
242
|
name,
|
|
252
243
|
item.verbose,
|
|
@@ -256,7 +247,8 @@ def execute_and_wait_with(item):
|
|
|
256
247
|
)
|
|
257
248
|
else:
|
|
258
249
|
_try_execute_and_wait(
|
|
259
|
-
|
|
250
|
+
run_cmd,
|
|
251
|
+
run_options,
|
|
260
252
|
outs_dir,
|
|
261
253
|
name,
|
|
262
254
|
item.verbose,
|
|
@@ -268,7 +260,7 @@ def execute_and_wait_with(item):
|
|
|
268
260
|
sleep_before_start=item.sleep_before_start
|
|
269
261
|
)
|
|
270
262
|
outputxml_preprocessing(
|
|
271
|
-
item.options, outs_dir, name, item.verbose, _make_id(), caller_id
|
|
263
|
+
item.options, outs_dir, name, item.verbose, _make_id(), caller_id, item.index
|
|
272
264
|
)
|
|
273
265
|
except:
|
|
274
266
|
_write(traceback.format_exc())
|
|
@@ -278,9 +270,8 @@ def _create_command_for_execution(caller_id, datasources, is_last, item, outs_di
|
|
|
278
270
|
options = item.options.copy()
|
|
279
271
|
if item.command == ["robot"] and not options["listener"]:
|
|
280
272
|
options["listener"] = ["RobotStackTracer"]
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
+ _options_for_custom_executor(
|
|
273
|
+
run_options = (
|
|
274
|
+
_options_for_custom_executor(
|
|
284
275
|
options,
|
|
285
276
|
outs_dir,
|
|
286
277
|
item.execution_item,
|
|
@@ -291,12 +282,9 @@ def _create_command_for_execution(caller_id, datasources, is_last, item, outs_di
|
|
|
291
282
|
item.last_level,
|
|
292
283
|
item.processes,
|
|
293
284
|
)
|
|
294
|
-
|
|
295
|
-
# correct handling of the escape character later on.
|
|
296
|
-
+ [os.path.normpath(s) for s in datasources]
|
|
285
|
+
+ datasources
|
|
297
286
|
)
|
|
298
|
-
return
|
|
299
|
-
|
|
287
|
+
return item.command, run_options
|
|
300
288
|
|
|
301
289
|
def _pabotlib_in_use():
|
|
302
290
|
return _PABOTLIBPROCESS or _PABOTLIBURI != "127.0.0.1:8270"
|
|
@@ -317,7 +305,8 @@ def _hived_execute(
|
|
|
317
305
|
|
|
318
306
|
|
|
319
307
|
def _try_execute_and_wait(
|
|
320
|
-
|
|
308
|
+
run_cmd,
|
|
309
|
+
run_options,
|
|
321
310
|
outs_dir,
|
|
322
311
|
item_name,
|
|
323
312
|
verbose,
|
|
@@ -328,16 +317,17 @@ def _try_execute_and_wait(
|
|
|
328
317
|
process_timeout=None,
|
|
329
318
|
sleep_before_start=0
|
|
330
319
|
):
|
|
331
|
-
# type: (List[str], str, str, bool, int, str, int, bool, Optional[int], int) -> None
|
|
320
|
+
# type: (List[str], List[str], str, str, bool, int, str, int, bool, Optional[int], int) -> None
|
|
332
321
|
plib = None
|
|
333
322
|
is_ignored = False
|
|
334
323
|
if _pabotlib_in_use():
|
|
335
324
|
plib = Remote(_PABOTLIBURI)
|
|
336
325
|
try:
|
|
337
|
-
with open(os.path.join(outs_dir,
|
|
338
|
-
with open(os.path.join(outs_dir,
|
|
326
|
+
with open(os.path.join(outs_dir, run_cmd[-1] + "_stdout.out"), "w") as stdout:
|
|
327
|
+
with open(os.path.join(outs_dir, run_cmd[-1] + "_stderr.out"), "w") as stderr:
|
|
339
328
|
process, (rc, elapsed) = _run(
|
|
340
|
-
|
|
329
|
+
run_cmd,
|
|
330
|
+
run_options,
|
|
341
331
|
stderr,
|
|
342
332
|
stdout,
|
|
343
333
|
item_name,
|
|
@@ -369,7 +359,7 @@ def _try_execute_and_wait(
|
|
|
369
359
|
show_stdout_on_failure,
|
|
370
360
|
)
|
|
371
361
|
if is_ignored and os.path.isdir(outs_dir):
|
|
372
|
-
|
|
362
|
+
_rmtree_with_path(outs_dir)
|
|
373
363
|
|
|
374
364
|
|
|
375
365
|
def _result_to_stdout(
|
|
@@ -418,8 +408,8 @@ def _is_ignored(plib, caller_id): # type: (Remote, str) -> bool
|
|
|
418
408
|
|
|
419
409
|
# optionally invoke rebot for output.xml preprocessing to get --RemoveKeywords
|
|
420
410
|
# and --flattenkeywords applied => result: much smaller output.xml files + faster merging + avoid MemoryErrors
|
|
421
|
-
def outputxml_preprocessing(options, outs_dir, item_name, verbose, pool_id, caller_id):
|
|
422
|
-
# type: (Dict[str, Any], str, str, bool, int, str) -> None
|
|
411
|
+
def outputxml_preprocessing(options, outs_dir, item_name, verbose, pool_id, caller_id, item_id):
|
|
412
|
+
# type: (Dict[str, Any], str, str, bool, int, str, int) -> None
|
|
423
413
|
try:
|
|
424
414
|
remove_keywords = options["removekeywords"]
|
|
425
415
|
flatten_keywords = options["flattenkeywords"]
|
|
@@ -432,11 +422,15 @@ def outputxml_preprocessing(options, outs_dir, item_name, verbose, pool_id, call
|
|
|
432
422
|
remove_keywords_args += ["--removekeywords", k]
|
|
433
423
|
for k in flatten_keywords:
|
|
434
424
|
flatten_keywords_args += ["--flattenkeywords", k]
|
|
435
|
-
|
|
425
|
+
output_name = options.get("output", "output.xml")
|
|
426
|
+
outputxmlfile = os.path.join(outs_dir, output_name)
|
|
427
|
+
if not os.path.isfile(outputxmlfile):
|
|
428
|
+
raise DataError(f"Preprosessing cannot be done because file {outputxmlfile} not exists.")
|
|
436
429
|
oldsize = os.path.getsize(outputxmlfile)
|
|
437
|
-
|
|
430
|
+
process_empty = ["--processemptysuite"] if options.get("runemptysuite") else []
|
|
431
|
+
run_cmd = ["rebot"]
|
|
432
|
+
run_options = (
|
|
438
433
|
[
|
|
439
|
-
"rebot",
|
|
440
434
|
"--log",
|
|
441
435
|
"NONE",
|
|
442
436
|
"--report",
|
|
@@ -447,18 +441,20 @@ def outputxml_preprocessing(options, outs_dir, item_name, verbose, pool_id, call
|
|
|
447
441
|
"off",
|
|
448
442
|
"--NoStatusRC",
|
|
449
443
|
]
|
|
444
|
+
+ process_empty
|
|
450
445
|
+ remove_keywords_args
|
|
451
446
|
+ flatten_keywords_args
|
|
452
447
|
+ ["--output", outputxmlfile, outputxmlfile]
|
|
453
448
|
)
|
|
454
|
-
cmd = _mapOptionalQuote(cmd)
|
|
455
449
|
_try_execute_and_wait(
|
|
456
|
-
|
|
450
|
+
run_cmd,
|
|
451
|
+
run_options,
|
|
457
452
|
outs_dir,
|
|
458
|
-
"preprocessing
|
|
453
|
+
f"preprocessing {output_name} on " + item_name,
|
|
459
454
|
verbose,
|
|
460
455
|
pool_id,
|
|
461
456
|
caller_id,
|
|
457
|
+
item_id,
|
|
462
458
|
)
|
|
463
459
|
newsize = os.path.getsize(outputxmlfile)
|
|
464
460
|
perc = 100 * newsize / oldsize
|
|
@@ -519,8 +515,37 @@ def _increase_completed(plib, my_index):
|
|
|
519
515
|
)
|
|
520
516
|
|
|
521
517
|
|
|
518
|
+
def _write_internal_argument_file(cmd_args, filename):
|
|
519
|
+
# type: (List[str], str) -> None
|
|
520
|
+
"""
|
|
521
|
+
Writes a list of command-line arguments to a file.
|
|
522
|
+
If an argument starts with '-' or '--', its value (the next item) is written on the same line.
|
|
523
|
+
|
|
524
|
+
Example:
|
|
525
|
+
['--name', 'value', '--flag', '--other', 'x']
|
|
526
|
+
becomes:
|
|
527
|
+
--name value
|
|
528
|
+
--flag
|
|
529
|
+
--other x
|
|
530
|
+
|
|
531
|
+
:param cmd_args: List of argument strings to write
|
|
532
|
+
:param filename: Target filename
|
|
533
|
+
"""
|
|
534
|
+
with open(filename, "w", encoding="utf-8") as f:
|
|
535
|
+
i = 0
|
|
536
|
+
while i < len(cmd_args):
|
|
537
|
+
current = cmd_args[i]
|
|
538
|
+
if current.startswith("-") and i + 1 < len(cmd_args) and not cmd_args[i + 1].startswith("-"):
|
|
539
|
+
f.write(f"{current} {cmd_args[i + 1]}\n")
|
|
540
|
+
i += 2
|
|
541
|
+
else:
|
|
542
|
+
f.write(f"{current}\n")
|
|
543
|
+
i += 1
|
|
544
|
+
|
|
545
|
+
|
|
522
546
|
def _run(
|
|
523
|
-
|
|
547
|
+
run_command,
|
|
548
|
+
run_options,
|
|
524
549
|
stderr,
|
|
525
550
|
stdout,
|
|
526
551
|
item_name,
|
|
@@ -531,7 +556,7 @@ def _run(
|
|
|
531
556
|
process_timeout,
|
|
532
557
|
sleep_before_start,
|
|
533
558
|
):
|
|
534
|
-
# type: (List[str], IO[Any], IO[Any], str, bool, int, int, str, Optional[int], int) -> Tuple[Union[subprocess.Popen[bytes], subprocess.Popen], Tuple[int, float]]
|
|
559
|
+
# type: (List[str], List[str], IO[Any], IO[Any], str, bool, int, int, str, Optional[int], int) -> Tuple[Union[subprocess.Popen[bytes], subprocess.Popen], Tuple[int, float]]
|
|
535
560
|
timestamp = datetime.datetime.now()
|
|
536
561
|
if sleep_before_start > 0:
|
|
537
562
|
_write(
|
|
@@ -540,7 +565,10 @@ def _run(
|
|
|
540
565
|
)
|
|
541
566
|
time.sleep(sleep_before_start)
|
|
542
567
|
timestamp = datetime.datetime.now()
|
|
543
|
-
|
|
568
|
+
command_name = run_command[-1].replace(" ", "_")
|
|
569
|
+
argfile_path = os.path.join(outs_dir, f"{command_name}_argfile.txt")
|
|
570
|
+
_write_internal_argument_file(run_options, filename=argfile_path)
|
|
571
|
+
cmd = ' '.join(run_command + ['-A'] + [argfile_path])
|
|
544
572
|
if PY2:
|
|
545
573
|
cmd = cmd.decode("utf-8").encode(SYSTEM_ENCODING)
|
|
546
574
|
# avoid hitting https://bugs.python.org/issue10394
|
|
@@ -701,14 +729,14 @@ def _options_for_executor(
|
|
|
701
729
|
if pabotLastLevel not in options["variable"]:
|
|
702
730
|
options["variable"].append(pabotLastLevel)
|
|
703
731
|
if argfile:
|
|
704
|
-
_modify_options_for_argfile_use(argfile, options
|
|
732
|
+
_modify_options_for_argfile_use(argfile, options)
|
|
705
733
|
options["argumentfile"] = argfile
|
|
706
734
|
if options.get("test", False) and options.get("include", []):
|
|
707
735
|
del options["include"]
|
|
708
736
|
return _set_terminal_coloring_options(options)
|
|
709
737
|
|
|
710
738
|
|
|
711
|
-
def _modify_options_for_argfile_use(argfile, options
|
|
739
|
+
def _modify_options_for_argfile_use(argfile, options):
|
|
712
740
|
argfile_opts, _ = ArgumentParser(
|
|
713
741
|
USAGE,
|
|
714
742
|
**_filter_argument_parser_options(
|
|
@@ -717,21 +745,23 @@ def _modify_options_for_argfile_use(argfile, options, root_name):
|
|
|
717
745
|
env_options="ROBOT_OPTIONS",
|
|
718
746
|
),
|
|
719
747
|
).parse_args(["--argumentfile", argfile])
|
|
720
|
-
old_name = options.get("name", root_name)
|
|
721
748
|
if argfile_opts["name"]:
|
|
722
749
|
new_name = argfile_opts["name"]
|
|
723
|
-
_replace_base_name(new_name,
|
|
750
|
+
_replace_base_name(new_name, options, "suite")
|
|
724
751
|
if not options["suite"]:
|
|
725
|
-
_replace_base_name(new_name,
|
|
752
|
+
_replace_base_name(new_name, options, "test")
|
|
726
753
|
if "name" in options:
|
|
727
754
|
del options["name"]
|
|
728
755
|
|
|
729
756
|
|
|
730
|
-
def _replace_base_name(new_name,
|
|
731
|
-
if isinstance(options.get(key
|
|
732
|
-
options[key] = new_name
|
|
757
|
+
def _replace_base_name(new_name, options, key):
|
|
758
|
+
if isinstance(options.get(key), str):
|
|
759
|
+
options[key] = f"{new_name}.{options[key].split('.', 1)[1]}" if '.' in options[key] else new_name
|
|
733
760
|
elif key in options:
|
|
734
|
-
options[key] = [
|
|
761
|
+
options[key] = [
|
|
762
|
+
f"{new_name}.{s.split('.', 1)[1]}" if '.' in s else new_name
|
|
763
|
+
for s in options.get(key, [])
|
|
764
|
+
]
|
|
735
765
|
|
|
736
766
|
|
|
737
767
|
def _set_terminal_coloring_options(options):
|
|
@@ -985,7 +1015,9 @@ def _levelsplit(
|
|
|
985
1015
|
tests = [] # type: List[ExecutionItem]
|
|
986
1016
|
for s in suites:
|
|
987
1017
|
tests.extend(s.tests)
|
|
988
|
-
|
|
1018
|
+
# If there are no tests, it may be that --runemptysuite option is used, so fallback suites
|
|
1019
|
+
if tests:
|
|
1020
|
+
return tests
|
|
989
1021
|
return list(suites)
|
|
990
1022
|
|
|
991
1023
|
|
|
@@ -1449,20 +1481,42 @@ def _output_dir(options, cleanup=True):
|
|
|
1449
1481
|
outputdir = options.get("outputdir", ".")
|
|
1450
1482
|
outpath = os.path.join(outputdir, "pabot_results")
|
|
1451
1483
|
if cleanup and os.path.isdir(outpath):
|
|
1452
|
-
|
|
1484
|
+
_rmtree_with_path(outpath)
|
|
1453
1485
|
return outpath
|
|
1454
1486
|
|
|
1455
1487
|
|
|
1456
|
-
def
|
|
1488
|
+
def _rmtree_with_path(path):
|
|
1489
|
+
"""
|
|
1490
|
+
Remove a directory tree and, if a PermissionError occurs,
|
|
1491
|
+
re-raise it with the absolute path included in the message.
|
|
1492
|
+
"""
|
|
1493
|
+
try:
|
|
1494
|
+
shutil.rmtree(path)
|
|
1495
|
+
except PermissionError as e:
|
|
1496
|
+
abs_path = os.path.abspath(path)
|
|
1497
|
+
raise PermissionError(f"Failed to delete path {abs_path}") from e
|
|
1498
|
+
|
|
1499
|
+
|
|
1500
|
+
def _get_timestamp_id(timestamp_str, add_timestamp):
|
|
1501
|
+
# type: (str, bool) -> Optional[str]
|
|
1502
|
+
if add_timestamp:
|
|
1503
|
+
return str(datetime.datetime.strptime(timestamp_str, "%Y-%m-%d %H:%M:%S.%f").strftime("%Y%m%d_%H%M%S"))
|
|
1504
|
+
return None
|
|
1505
|
+
|
|
1506
|
+
|
|
1507
|
+
def _copy_output_artifacts(options, timestamp_id=None, file_extensions=None, include_subfolders=False, index=None):
|
|
1457
1508
|
file_extensions = file_extensions or ["png"]
|
|
1458
1509
|
pabot_outputdir = _output_dir(options, cleanup=False)
|
|
1459
1510
|
outputdir = options.get("outputdir", ".")
|
|
1460
1511
|
copied_artifacts = []
|
|
1461
|
-
|
|
1512
|
+
one_run_outputdir = pabot_outputdir
|
|
1513
|
+
if index: # For argumentfileN option:
|
|
1514
|
+
one_run_outputdir = os.path.join(pabot_outputdir, index)
|
|
1515
|
+
for location, _, file_names in os.walk(one_run_outputdir):
|
|
1462
1516
|
for file_name in file_names:
|
|
1463
1517
|
file_ext = file_name.split(".")[-1]
|
|
1464
1518
|
if file_ext in file_extensions:
|
|
1465
|
-
rel_path = os.path.relpath(location,
|
|
1519
|
+
rel_path = os.path.relpath(location, one_run_outputdir)
|
|
1466
1520
|
prefix = rel_path.split(os.sep)[0] # folders named "process-id"
|
|
1467
1521
|
dst_folder_path = outputdir
|
|
1468
1522
|
# if it is a file from sub-folders of "location"
|
|
@@ -1474,7 +1528,9 @@ def _copy_output_artifacts(options, file_extensions=None, include_subfolders=Fal
|
|
|
1474
1528
|
dst_folder_path = os.path.join(outputdir, subfolder_path)
|
|
1475
1529
|
if not os.path.isdir(dst_folder_path):
|
|
1476
1530
|
os.makedirs(dst_folder_path)
|
|
1477
|
-
|
|
1531
|
+
dst_file_name_parts = [timestamp_id, index, prefix, file_name]
|
|
1532
|
+
filtered_name = [str(p) for p in dst_file_name_parts if p is not None]
|
|
1533
|
+
dst_file_name = "-".join(filtered_name)
|
|
1478
1534
|
shutil.copy2(
|
|
1479
1535
|
os.path.join(location, file_name),
|
|
1480
1536
|
os.path.join(dst_folder_path, dst_file_name),
|
|
@@ -1483,20 +1539,22 @@ def _copy_output_artifacts(options, file_extensions=None, include_subfolders=Fal
|
|
|
1483
1539
|
return copied_artifacts
|
|
1484
1540
|
|
|
1485
1541
|
|
|
1486
|
-
def _check_pabot_results_for_missing_xml(base_dir):
|
|
1542
|
+
def _check_pabot_results_for_missing_xml(base_dir, command_name, output_xml_name):
|
|
1487
1543
|
missing = []
|
|
1488
1544
|
for root, dirs, _ in os.walk(base_dir):
|
|
1489
1545
|
if root == base_dir:
|
|
1490
1546
|
for subdir in dirs:
|
|
1491
1547
|
subdir_path = os.path.join(base_dir, subdir)
|
|
1492
|
-
has_xml = any(fname.endswith(
|
|
1548
|
+
has_xml = any(fname.endswith(output_xml_name) for fname in os.listdir(subdir_path))
|
|
1493
1549
|
if not has_xml:
|
|
1494
|
-
|
|
1550
|
+
command_name = command_name.replace(" ", "_")
|
|
1551
|
+
missing.append(os.path.join(subdir_path, f'{command_name}_stderr.out'))
|
|
1495
1552
|
break
|
|
1496
1553
|
return missing
|
|
1497
1554
|
|
|
1498
1555
|
|
|
1499
1556
|
def _report_results(outs_dir, pabot_args, options, start_time_string, tests_root_name):
|
|
1557
|
+
output_xml_name = options.get("output") or "output.xml"
|
|
1500
1558
|
if "pythonpath" in options:
|
|
1501
1559
|
del options["pythonpath"]
|
|
1502
1560
|
if ROBOT_VERSION < "4.0":
|
|
@@ -1516,7 +1574,7 @@ def _report_results(outs_dir, pabot_args, options, start_time_string, tests_root
|
|
|
1516
1574
|
outputs = [] # type: List[str]
|
|
1517
1575
|
for index, _ in pabot_args["argumentfiles"]:
|
|
1518
1576
|
copied_artifacts = _copy_output_artifacts(
|
|
1519
|
-
options, pabot_args["artifacts"], pabot_args["artifactsinsubfolders"]
|
|
1577
|
+
options, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]), pabot_args["artifacts"], pabot_args["artifactsinsubfolders"], index
|
|
1520
1578
|
)
|
|
1521
1579
|
outputs += [
|
|
1522
1580
|
_merge_one_run(
|
|
@@ -1525,10 +1583,11 @@ def _report_results(outs_dir, pabot_args, options, start_time_string, tests_root
|
|
|
1525
1583
|
tests_root_name,
|
|
1526
1584
|
stats,
|
|
1527
1585
|
copied_artifacts,
|
|
1586
|
+
timestamp_id=_get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]),
|
|
1528
1587
|
outputfile=os.path.join("pabot_results", "output%s.xml" % index),
|
|
1529
1588
|
)
|
|
1530
1589
|
]
|
|
1531
|
-
missing_outputs.extend(_check_pabot_results_for_missing_xml(os.path.join(outs_dir, index)))
|
|
1590
|
+
missing_outputs.extend(_check_pabot_results_for_missing_xml(os.path.join(outs_dir, index), pabot_args.get('command')[-1], output_xml_name))
|
|
1532
1591
|
if "output" not in options:
|
|
1533
1592
|
options["output"] = "output.xml"
|
|
1534
1593
|
_write_stats(stats)
|
|
@@ -1537,7 +1596,7 @@ def _report_results(outs_dir, pabot_args, options, start_time_string, tests_root
|
|
|
1537
1596
|
exit_code = _report_results_for_one_run(
|
|
1538
1597
|
outs_dir, pabot_args, options, start_time_string, tests_root_name, stats
|
|
1539
1598
|
)
|
|
1540
|
-
missing_outputs.extend(_check_pabot_results_for_missing_xml(outs_dir))
|
|
1599
|
+
missing_outputs.extend(_check_pabot_results_for_missing_xml(outs_dir, pabot_args.get('command')[-1], output_xml_name))
|
|
1541
1600
|
if missing_outputs:
|
|
1542
1601
|
_write(("[ " + _wrap_with(Color.YELLOW, 'WARNING') + " ] "
|
|
1543
1602
|
"One or more subprocesses encountered an error and the "
|
|
@@ -1574,11 +1633,12 @@ def _write_stats(stats):
|
|
|
1574
1633
|
def _report_results_for_one_run(
|
|
1575
1634
|
outs_dir, pabot_args, options, start_time_string, tests_root_name, stats
|
|
1576
1635
|
):
|
|
1636
|
+
_write(pabot_args)
|
|
1577
1637
|
copied_artifacts = _copy_output_artifacts(
|
|
1578
|
-
options, pabot_args["artifacts"], pabot_args["artifactsinsubfolders"]
|
|
1638
|
+
options, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"]), pabot_args["artifacts"], pabot_args["artifactsinsubfolders"]
|
|
1579
1639
|
)
|
|
1580
1640
|
output_path = _merge_one_run(
|
|
1581
|
-
outs_dir, options, tests_root_name, stats, copied_artifacts
|
|
1641
|
+
outs_dir, options, tests_root_name, stats, copied_artifacts, _get_timestamp_id(start_time_string, pabot_args["artifactstimestamps"])
|
|
1582
1642
|
)
|
|
1583
1643
|
_write_stats(stats)
|
|
1584
1644
|
if (
|
|
@@ -1597,13 +1657,14 @@ def _report_results_for_one_run(
|
|
|
1597
1657
|
|
|
1598
1658
|
|
|
1599
1659
|
def _merge_one_run(
|
|
1600
|
-
outs_dir, options, tests_root_name, stats, copied_artifacts, outputfile=None
|
|
1660
|
+
outs_dir, options, tests_root_name, stats, copied_artifacts, timestamp_id, outputfile=None
|
|
1601
1661
|
):
|
|
1602
1662
|
outputfile = outputfile or options.get("output", "output.xml")
|
|
1603
1663
|
output_path = os.path.abspath(
|
|
1604
1664
|
os.path.join(options.get("outputdir", "."), outputfile)
|
|
1605
1665
|
)
|
|
1606
|
-
|
|
1666
|
+
filename = options.get("output") or "output.xml"
|
|
1667
|
+
files = natsorted(glob(os.path.join(_glob_escape(outs_dir), f"**/*{filename}"), recursive=True))
|
|
1607
1668
|
if not files:
|
|
1608
1669
|
_write('WARN: No output files in "%s"' % outs_dir, Color.YELLOW)
|
|
1609
1670
|
return ""
|
|
@@ -1615,7 +1676,7 @@ def _merge_one_run(
|
|
|
1615
1676
|
if PY2:
|
|
1616
1677
|
files = [f.decode(SYSTEM_ENCODING) if not is_unicode(f) else f for f in files]
|
|
1617
1678
|
resu = merge(
|
|
1618
|
-
files, options, tests_root_name, copied_artifacts, invalid_xml_callback
|
|
1679
|
+
files, options, tests_root_name, copied_artifacts, timestamp_id, invalid_xml_callback
|
|
1619
1680
|
)
|
|
1620
1681
|
_update_stats(resu, stats)
|
|
1621
1682
|
if ROBOT_VERSION >= "7.0" and options.get("legacyoutput"):
|
|
@@ -1689,9 +1750,19 @@ def _stop_message_writer():
|
|
|
1689
1750
|
MESSAGE_QUEUE.join()
|
|
1690
1751
|
|
|
1691
1752
|
|
|
1692
|
-
def
|
|
1693
|
-
if
|
|
1694
|
-
|
|
1753
|
+
def _is_port_available(port):
|
|
1754
|
+
"""Check if a given port on localhost is available."""
|
|
1755
|
+
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
|
|
1756
|
+
try:
|
|
1757
|
+
s.bind(("localhost", port))
|
|
1758
|
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
1759
|
+
return True
|
|
1760
|
+
except OSError:
|
|
1761
|
+
return False
|
|
1762
|
+
|
|
1763
|
+
|
|
1764
|
+
def _get_free_port():
|
|
1765
|
+
"""Return a free TCP port on localhost."""
|
|
1695
1766
|
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
|
|
1696
1767
|
s.bind(("localhost", 0))
|
|
1697
1768
|
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
@@ -1700,29 +1771,43 @@ def _get_free_port(pabot_args):
|
|
|
1700
1771
|
|
|
1701
1772
|
def _start_remote_library(pabot_args): # type: (dict) -> Optional[subprocess.Popen]
|
|
1702
1773
|
global _PABOTLIBURI
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
if not pabot_args["pabotlib"]:
|
|
1774
|
+
# If pabotlib is not enabled, do nothing
|
|
1775
|
+
if not pabot_args.get("pabotlib"):
|
|
1706
1776
|
return None
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
)
|
|
1777
|
+
|
|
1778
|
+
host = pabot_args.get("pabotlibhost", "127.0.0.1")
|
|
1779
|
+
port = pabot_args.get("pabotlibport", 8270)
|
|
1780
|
+
|
|
1781
|
+
# If host is default and user specified a non-zero port, check if it's available
|
|
1782
|
+
if host == "127.0.0.1" and port != 0 and not _is_port_available(port):
|
|
1783
|
+
_write(
|
|
1784
|
+
f"Warning: specified pabotlibport {port} is already in use. "
|
|
1785
|
+
"A free port will be assigned automatically.",
|
|
1786
|
+
Color.YELLOW,
|
|
1787
|
+
)
|
|
1788
|
+
port = _get_free_port()
|
|
1789
|
+
|
|
1790
|
+
# If host is default and port = 0, assign a free port
|
|
1791
|
+
if host == "127.0.0.1" and port == 0:
|
|
1792
|
+
port = _get_free_port()
|
|
1793
|
+
|
|
1794
|
+
_PABOTLIBURI = f"{host}:{port}"
|
|
1795
|
+
resourcefile = pabot_args.get("resourcefile") or ""
|
|
1796
|
+
if resourcefile and not os.path.exists(resourcefile):
|
|
1710
1797
|
_write(
|
|
1711
1798
|
"Warning: specified resource file doesn't exist."
|
|
1712
1799
|
" Some tests may fail or continue forever.",
|
|
1713
1800
|
Color.YELLOW,
|
|
1714
1801
|
)
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
shell=True,
|
|
1725
|
-
)
|
|
1802
|
+
resourcefile = ""
|
|
1803
|
+
cmd = [
|
|
1804
|
+
sys.executable,
|
|
1805
|
+
"-m", pabotlib.__name__,
|
|
1806
|
+
resourcefile,
|
|
1807
|
+
pabot_args["pabotlibhost"],
|
|
1808
|
+
str(port),
|
|
1809
|
+
]
|
|
1810
|
+
return subprocess.Popen(cmd)
|
|
1726
1811
|
|
|
1727
1812
|
|
|
1728
1813
|
def _stop_remote_library(process): # type: (subprocess.Popen) -> None
|
|
@@ -2122,10 +2207,11 @@ def _parse_ordering(filename): # type: (str) -> List[ExecutionItem]
|
|
|
2122
2207
|
raise DataError("Error parsing ordering file '%s'" % filename)
|
|
2123
2208
|
|
|
2124
2209
|
|
|
2125
|
-
# TODO: After issue #646, it seems necessary to thoroughly rethink how this functionality should work.
|
|
2126
2210
|
def _check_ordering(ordering_file, suite_names): # type: (List[ExecutionItem], List[ExecutionItem]) -> None
|
|
2127
2211
|
list_of_suite_names = [s.name for s in suite_names]
|
|
2128
2212
|
skipped_runnable_items = []
|
|
2213
|
+
suite_and_test_names = []
|
|
2214
|
+
duplicates = []
|
|
2129
2215
|
if ordering_file:
|
|
2130
2216
|
for item in ordering_file:
|
|
2131
2217
|
if item.type in ['suite', 'test']:
|
|
@@ -2135,10 +2221,17 @@ def _check_ordering(ordering_file, suite_names): # type: (List[ExecutionItem],
|
|
|
2135
2221
|
# the --suite option, and the given name is part of the full name of any test or suite.
|
|
2136
2222
|
if item.name != ' Invalid' and not (item.type == 'suite' and any((s == item.name or s.startswith(item.name + ".")) for s in list_of_suite_names)):
|
|
2137
2223
|
skipped_runnable_items.append(f"{item.type.title()} item: '{item.name}'")
|
|
2224
|
+
if item.name in suite_and_test_names:
|
|
2225
|
+
duplicates.append(f"{item.type.title()} item: '{item.name}'")
|
|
2226
|
+
suite_and_test_names.append(item.name)
|
|
2138
2227
|
if skipped_runnable_items:
|
|
2139
2228
|
_write("Note: The ordering file contains test or suite items that are not included in the current test run. The following items will be ignored/skipped:")
|
|
2140
2229
|
for item in skipped_runnable_items:
|
|
2141
2230
|
_write(f" - {item}")
|
|
2231
|
+
if duplicates:
|
|
2232
|
+
_write("Note: The ordering file contains duplicate suite or test items. Only the first occurrence is taken into account. These are duplicates:")
|
|
2233
|
+
for item in duplicates:
|
|
2234
|
+
_write(f" - {item}")
|
|
2142
2235
|
|
|
2143
2236
|
|
|
2144
2237
|
def _group_suites(outs_dir, datasources, options, pabot_args):
|
|
@@ -2147,8 +2240,15 @@ def _group_suites(outs_dir, datasources, options, pabot_args):
|
|
|
2147
2240
|
ordering_arg = _parse_ordering(pabot_args.get("ordering")) if (pabot_args.get("ordering")) is not None else None
|
|
2148
2241
|
if ordering_arg:
|
|
2149
2242
|
_verify_depends(ordering_arg)
|
|
2150
|
-
|
|
2151
|
-
|
|
2243
|
+
if options.get("name"):
|
|
2244
|
+
ordering_arg = _update_ordering_names(ordering_arg, options['name'])
|
|
2245
|
+
_check_ordering(ordering_arg, suite_names)
|
|
2246
|
+
if pabot_args.get("testlevelsplit") and ordering_arg and any(item.type == 'suite' for item in ordering_arg):
|
|
2247
|
+
reduced_suite_names = _reduce_items(suite_names, ordering_arg)
|
|
2248
|
+
if options.get("runemptysuite") and not reduced_suite_names:
|
|
2249
|
+
return [suite_names]
|
|
2250
|
+
if reduced_suite_names:
|
|
2251
|
+
suite_names = reduced_suite_names
|
|
2152
2252
|
ordering_arg_with_sleep = _set_sleep_times(ordering_arg)
|
|
2153
2253
|
ordered_suites = _preserve_order(suite_names, ordering_arg_with_sleep)
|
|
2154
2254
|
shard_suites = solve_shard_suites(ordered_suites, pabot_args)
|
|
@@ -2161,6 +2261,58 @@ def _group_suites(outs_dir, datasources, options, pabot_args):
|
|
|
2161
2261
|
return grouped_by_depend
|
|
2162
2262
|
|
|
2163
2263
|
|
|
2264
|
+
def _update_ordering_names(ordering, new_top_name):
|
|
2265
|
+
# type: (List[ExecutionItem], str) -> List[ExecutionItem]
|
|
2266
|
+
output = []
|
|
2267
|
+
for item in ordering:
|
|
2268
|
+
if item.type in ['suite', 'test']:
|
|
2269
|
+
splitted_name = item.name.split('.')
|
|
2270
|
+
splitted_name[0] = new_top_name
|
|
2271
|
+
item.name = '.'.join(splitted_name)
|
|
2272
|
+
output.append(item)
|
|
2273
|
+
return output
|
|
2274
|
+
|
|
2275
|
+
|
|
2276
|
+
def _reduce_items(items, selected_suites):
|
|
2277
|
+
# type: (List[ExecutionItem], List[ExecutionItem]) -> List[ExecutionItem]
|
|
2278
|
+
"""
|
|
2279
|
+
Reduce a list of test items by replacing covered test cases with suite items from selected_suites.
|
|
2280
|
+
Raises DataError if:
|
|
2281
|
+
- Any test is covered by more than one selected suite.
|
|
2282
|
+
"""
|
|
2283
|
+
reduced = []
|
|
2284
|
+
suite_coverage = {}
|
|
2285
|
+
test_to_suite = {}
|
|
2286
|
+
|
|
2287
|
+
for suite in selected_suites:
|
|
2288
|
+
if suite.type == 'suite':
|
|
2289
|
+
suite_name = str(suite.name)
|
|
2290
|
+
covered_tests = [
|
|
2291
|
+
item for item in items
|
|
2292
|
+
if item.type == "test" and str(item.name).startswith(suite_name + ".")
|
|
2293
|
+
]
|
|
2294
|
+
|
|
2295
|
+
if covered_tests:
|
|
2296
|
+
for test in covered_tests:
|
|
2297
|
+
test_name = str(test.name)
|
|
2298
|
+
if test_name in test_to_suite:
|
|
2299
|
+
raise DataError(
|
|
2300
|
+
f"Invalid test configuration: Test '{test_name}' is matched by multiple suites: "
|
|
2301
|
+
f"'{test_to_suite[test_name]}' and '{suite_name}'."
|
|
2302
|
+
)
|
|
2303
|
+
test_to_suite[test_name] = suite_name
|
|
2304
|
+
|
|
2305
|
+
suite_coverage[suite_name] = set(str(t.name) for t in covered_tests)
|
|
2306
|
+
reduced.append(suite)
|
|
2307
|
+
|
|
2308
|
+
# Add tests not covered by any suite
|
|
2309
|
+
for item in items:
|
|
2310
|
+
if item.type == "test" and str(item.name) not in test_to_suite:
|
|
2311
|
+
reduced.append(item)
|
|
2312
|
+
|
|
2313
|
+
return reduced
|
|
2314
|
+
|
|
2315
|
+
|
|
2164
2316
|
def _set_sleep_times(ordering_arg):
|
|
2165
2317
|
# type: (List[ExecutionItem]) -> List[ExecutionItem]
|
|
2166
2318
|
set_sleep_value = 0
|
|
@@ -2227,23 +2379,11 @@ def _verify_depends(suite_names):
|
|
|
2227
2379
|
)
|
|
2228
2380
|
|
|
2229
2381
|
|
|
2230
|
-
def _group_by_depend(suite_names):
|
|
2231
|
-
# type: (List[ExecutionItem]) -> List[List[ExecutionItem]]
|
|
2232
|
-
group_items = list(filter(lambda suite: isinstance(suite, GroupItem), suite_names))
|
|
2233
|
-
runnable_suites = list(
|
|
2234
|
-
filter(lambda suite: isinstance(suite, RunnableItem), suite_names)
|
|
2235
|
-
)
|
|
2236
|
-
dependency_tree = create_dependency_tree(runnable_suites)
|
|
2237
|
-
# Since groups cannot depend on others, they are placed at the beginning.
|
|
2238
|
-
dependency_tree[0][0:0] = group_items
|
|
2239
|
-
return dependency_tree
|
|
2240
|
-
|
|
2241
|
-
|
|
2242
2382
|
def _all_grouped_suites_by_depend(grouped_suites):
|
|
2243
2383
|
# type: (List[List[ExecutionItem]]) -> List[List[ExecutionItem]]
|
|
2244
2384
|
grouped_by_depend = []
|
|
2245
2385
|
for group_suite in grouped_suites: # These groups are divided by #WAIT
|
|
2246
|
-
grouped_by_depend.extend(
|
|
2386
|
+
grouped_by_depend.extend(create_dependency_tree(group_suite))
|
|
2247
2387
|
return grouped_by_depend
|
|
2248
2388
|
|
|
2249
2389
|
|
pabot/pabotlib.py
CHANGED
|
@@ -60,7 +60,7 @@ class _PabotLib(object):
|
|
|
60
60
|
self, resourcefile
|
|
61
61
|
): # type: (Optional[str]) -> Dict[str, Dict[str, Any]]
|
|
62
62
|
vals = {} # type: Dict[str, Dict[str, Any]]
|
|
63
|
-
if resourcefile
|
|
63
|
+
if not resourcefile:
|
|
64
64
|
return vals
|
|
65
65
|
conf = configparser.ConfigParser()
|
|
66
66
|
conf.read(resourcefile)
|
pabot/result_merger.py
CHANGED
|
@@ -35,7 +35,7 @@ from robot.model import SuiteVisitor
|
|
|
35
35
|
|
|
36
36
|
|
|
37
37
|
class ResultMerger(SuiteVisitor):
|
|
38
|
-
def __init__(self, result, tests_root_name, out_dir, copied_artifacts, legacy_output):
|
|
38
|
+
def __init__(self, result, tests_root_name, out_dir, copied_artifacts, timestamp_id, legacy_output):
|
|
39
39
|
self.root = result.suite
|
|
40
40
|
self.errors = result.errors
|
|
41
41
|
self.current = None
|
|
@@ -44,6 +44,7 @@ class ResultMerger(SuiteVisitor):
|
|
|
44
44
|
self._prefix = ""
|
|
45
45
|
self._out_dir = out_dir
|
|
46
46
|
self.legacy_output = legacy_output
|
|
47
|
+
self.timestamp_id = timestamp_id
|
|
47
48
|
|
|
48
49
|
self._patterns = []
|
|
49
50
|
regexp_template = (
|
|
@@ -65,7 +66,7 @@ class ResultMerger(SuiteVisitor):
|
|
|
65
66
|
raise
|
|
66
67
|
|
|
67
68
|
def _set_prefix(self, source):
|
|
68
|
-
self._prefix = prefix(source)
|
|
69
|
+
self._prefix = prefix(source, self.timestamp_id)
|
|
69
70
|
|
|
70
71
|
def start_suite(self, suite):
|
|
71
72
|
if self._skip_until and self._skip_until != suite:
|
|
@@ -194,9 +195,19 @@ class ResultsCombiner(CombinedResult):
|
|
|
194
195
|
self.errors.add(other.errors)
|
|
195
196
|
|
|
196
197
|
|
|
197
|
-
def prefix(source):
|
|
198
|
+
def prefix(source, timestamp_id):
|
|
198
199
|
try:
|
|
199
|
-
|
|
200
|
+
path_without_id, id = os.path.split(os.path.dirname(source))
|
|
201
|
+
if not id:
|
|
202
|
+
return ""
|
|
203
|
+
if os.path.split(path_without_id)[1] == 'pabot_results':
|
|
204
|
+
return "-".join([str(p) for p in [timestamp_id, id] if p is not None])
|
|
205
|
+
else:
|
|
206
|
+
# --argumentfileN in use: (there should be one subdir level more)
|
|
207
|
+
_, index = os.path.split(path_without_id)
|
|
208
|
+
if not index:
|
|
209
|
+
return ""
|
|
210
|
+
return "-".join([str(p) for p in [timestamp_id, index, id] if p is not None])
|
|
200
211
|
except:
|
|
201
212
|
return ""
|
|
202
213
|
|
|
@@ -225,6 +236,7 @@ def merge_groups(
|
|
|
225
236
|
invalid_xml_callback,
|
|
226
237
|
out_dir,
|
|
227
238
|
copied_artifacts,
|
|
239
|
+
timestamp_id,
|
|
228
240
|
legacy_output
|
|
229
241
|
):
|
|
230
242
|
merged = []
|
|
@@ -232,7 +244,7 @@ def merge_groups(
|
|
|
232
244
|
results, critical_tags, non_critical_tags, invalid_xml_callback
|
|
233
245
|
).values():
|
|
234
246
|
base = group[0]
|
|
235
|
-
merger = ResultMerger(base, tests_root_name, out_dir, copied_artifacts, legacy_output)
|
|
247
|
+
merger = ResultMerger(base, tests_root_name, out_dir, copied_artifacts, timestamp_id, legacy_output)
|
|
236
248
|
for out in group:
|
|
237
249
|
merger.merge(out)
|
|
238
250
|
merged.append(base)
|
|
@@ -244,6 +256,7 @@ def merge(
|
|
|
244
256
|
rebot_options,
|
|
245
257
|
tests_root_name,
|
|
246
258
|
copied_artifacts,
|
|
259
|
+
timestamp_id,
|
|
247
260
|
invalid_xml_callback=None,
|
|
248
261
|
):
|
|
249
262
|
assert len(result_files) > 0
|
|
@@ -263,6 +276,7 @@ def merge(
|
|
|
263
276
|
invalid_xml_callback,
|
|
264
277
|
settings.output_directory,
|
|
265
278
|
copied_artifacts,
|
|
279
|
+
timestamp_id,
|
|
266
280
|
rebot_options.get('legacyoutput')
|
|
267
281
|
)
|
|
268
282
|
if len(merged) == 1:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: robotframework-pabot
|
|
3
|
-
Version:
|
|
3
|
+
Version: 5.1.0
|
|
4
4
|
Summary: Parallel test runner for Robot Framework
|
|
5
5
|
Home-page: https://pabot.org
|
|
6
6
|
Download-URL: https://pypi.python.org/pypi/robotframework-pabot
|
|
@@ -39,6 +39,22 @@ A parallel executor for [Robot Framework](http://www.robotframework.org) tests.
|
|
|
39
39
|
|
|
40
40
|
[](https://youtu.be/i0RV6SJSIn8 "Pabot presentation at robocon.io 2018")
|
|
41
41
|
|
|
42
|
+
## Table of Contents
|
|
43
|
+
|
|
44
|
+
- [Installation](#installation)
|
|
45
|
+
- [Basic use](#basic-use)
|
|
46
|
+
- [Contact](#contact)
|
|
47
|
+
- [Contributing](#contributing-to-the-project)
|
|
48
|
+
- [Command-line options](#command-line-options)
|
|
49
|
+
- [PabotLib](#pabotlib)
|
|
50
|
+
- [Controlling execution order](#controlling-execution-order-and-level-of-parallelism)
|
|
51
|
+
- [Programmatic use](#programmatic-use)
|
|
52
|
+
- [Global variables](#global-variables)
|
|
53
|
+
- [Output Files Generated by Pabot](#output-files-generated-by-pabot)
|
|
54
|
+
- [Artifacts Handling and Parallel Execution Notes](#artifacts-handling-and-parallel-execution-notes)
|
|
55
|
+
|
|
56
|
+
----
|
|
57
|
+
|
|
42
58
|
## Installation:
|
|
43
59
|
|
|
44
60
|
From PyPi:
|
|
@@ -83,7 +99,16 @@ There are several ways you can help in improving this tool:
|
|
|
83
99
|
- Contribute by programming and making a pull request (easiest way is to work on an issue from the issue tracker)
|
|
84
100
|
|
|
85
101
|
## Command-line options
|
|
102
|
+
<!-- NOTE:
|
|
103
|
+
The sections inside these docstring markers are also used in Pabot's --help output.
|
|
104
|
+
Currently, the following transformations are applied:
|
|
105
|
+
- Remove Markdown links but keep the text
|
|
106
|
+
- Remove ** and backticks `
|
|
107
|
+
|
|
108
|
+
If you modify this part, make sure the Markdown section still looks clean and readable in the --help output. -->
|
|
109
|
+
|
|
86
110
|
<!-- START DOCSTRING -->
|
|
111
|
+
```
|
|
87
112
|
pabot [--verbose|--testlevelsplit|--command .. --end-command|
|
|
88
113
|
--processes num|--no-pabotlib|--pabotlibhost host|--pabotlibport port|
|
|
89
114
|
--processtimeout num|
|
|
@@ -95,52 +120,60 @@ pabot [--verbose|--testlevelsplit|--command .. --end-command|
|
|
|
95
120
|
--no-rebot|
|
|
96
121
|
--help|--version]
|
|
97
122
|
[robot options] [path ...]
|
|
123
|
+
```
|
|
98
124
|
|
|
99
125
|
PabotLib remote server is started by default to enable locking and resource distribution between parallel test executions.
|
|
100
126
|
|
|
101
127
|
Supports all [Robot Framework command line options](https://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#all-command-line-options) and also following pabot options:
|
|
102
128
|
|
|
103
|
-
|
|
129
|
+
**--verbose**
|
|
104
130
|
More output from the parallel execution.
|
|
105
131
|
|
|
106
|
-
|
|
132
|
+
**--testlevelsplit**
|
|
107
133
|
Split execution on test level instead of default suite level. If .pabotsuitenames contains both tests and suites then
|
|
108
134
|
this will only affect new suites and split only them. Leaving this flag out when both suites and tests in
|
|
109
135
|
.pabotsuitenames file will also only affect new suites and add them as suite files.
|
|
110
136
|
|
|
111
|
-
|
|
137
|
+
**--command [ACTUAL COMMANDS TO START ROBOT EXECUTOR] --end-command**
|
|
112
138
|
RF script for situations where robot is not used directly.
|
|
113
139
|
|
|
114
|
-
|
|
140
|
+
**--processes [NUMBER OF PROCESSES]**
|
|
115
141
|
How many parallel executors to use (default max of 2 and cpu count). Special option "all" will use as many processes as
|
|
116
142
|
there are executable suites or tests.
|
|
117
143
|
|
|
118
|
-
|
|
144
|
+
**--no-pabotlib**
|
|
119
145
|
Disable the PabotLib remote server if you don't need locking or resource distribution features.
|
|
120
146
|
|
|
121
|
-
|
|
147
|
+
**--pabotlibhost [HOSTNAME]**
|
|
122
148
|
Connect to an already running instance of the PabotLib remote server at the given host (disables the local PabotLib
|
|
123
149
|
server start). For example, to connect to a remote PabotLib server running on another machine:
|
|
124
150
|
|
|
125
151
|
pabot --pabotlibhost 192.168.1.123 --pabotlibport 8271 tests/
|
|
126
152
|
|
|
127
|
-
The remote server can be
|
|
153
|
+
The remote server can also be started and executed separately from pabot instances:
|
|
128
154
|
|
|
129
155
|
python -m pabot.pabotlib <path_to_resourcefile> <host> <port>
|
|
130
156
|
python -m pabot.pabotlib resource.txt 192.168.1.123 8271
|
|
131
157
|
|
|
132
158
|
This enables sharing a resource with multiple Robot Framework instances.
|
|
133
159
|
|
|
134
|
-
|
|
160
|
+
Additional details:
|
|
161
|
+
- The default value for --pabotlibhost is 127.0.0.1.
|
|
162
|
+
- If you provide a hostname other than 127.0.0.1, the local PabotLib server startup is automatically disabled.
|
|
163
|
+
|
|
164
|
+
**--pabotlibport [PORT]**
|
|
135
165
|
Port number of the PabotLib remote server (default is 8270). See --pabotlibhost for more information.
|
|
136
166
|
|
|
137
|
-
|
|
167
|
+
Behavior with port and host settings:
|
|
168
|
+
- If you set the port value to 0 and --pabotlibhost is 127.0.0.1 (default), a free port on localhost will be assigned automatically.
|
|
169
|
+
|
|
170
|
+
**--processtimeout [TIMEOUT]**
|
|
138
171
|
Maximum time in seconds to wait for a process before killing it. If not set, there's no timeout.
|
|
139
172
|
|
|
140
|
-
|
|
173
|
+
**--shard [INDEX]/[TOTAL]**
|
|
141
174
|
Optionally split execution into smaller pieces. This can be used for distributing testing to multiple machines.
|
|
142
175
|
|
|
143
|
-
|
|
176
|
+
**--artifacts [FILE EXTENSIONS]**
|
|
144
177
|
List of file extensions (comma separated). Defines which files (screenshots, videos etc.) from separate reporting
|
|
145
178
|
directories would be copied and included in a final report. Possible links to copied files in RF log would be updated
|
|
146
179
|
(only relative paths supported). The default value is `png`.
|
|
@@ -149,49 +182,51 @@ Supports all [Robot Framework command line options](https://robotframework.org/r
|
|
|
149
182
|
|
|
150
183
|
--artifacts png,mp4,txt
|
|
151
184
|
|
|
152
|
-
|
|
185
|
+
The artifact naming conventions are described in the README.md section: [Output Files Generated by Pabot](#output-files-generated-by-pabot).
|
|
186
|
+
|
|
187
|
+
**--artifactsinsubfolders**
|
|
153
188
|
Copy artifacts located not only directly in the RF output dir, but also in it's sub-folders.
|
|
154
189
|
|
|
155
|
-
|
|
190
|
+
**--resourcefile [FILEPATH]**
|
|
156
191
|
Indicator for a file that can contain shared variables for distributing resources. This needs to be used together with
|
|
157
192
|
pabotlib option. Resource file syntax is same as Windows ini files. Where a section is a shared set of variables.
|
|
158
193
|
|
|
159
|
-
|
|
194
|
+
**--argumentfile[INTEGER] [FILEPATH]**
|
|
160
195
|
Run same suites with multiple [argumentfile](http://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#argument-files) options.
|
|
161
196
|
|
|
162
197
|
For example:
|
|
163
198
|
|
|
164
199
|
--argumentfile1 arg1.txt --argumentfile2 arg2.txt
|
|
165
200
|
|
|
166
|
-
|
|
201
|
+
**--suitesfrom [FILEPATH TO OUTPUTXML]**
|
|
167
202
|
Optionally read suites from output.xml file. Failed suites will run first and longer running ones will be executed
|
|
168
203
|
before shorter ones.
|
|
169
204
|
|
|
170
|
-
|
|
205
|
+
**--ordering [FILE PATH]**
|
|
171
206
|
Optionally give execution order from a file.
|
|
172
207
|
|
|
173
|
-
|
|
208
|
+
**--chunk**
|
|
174
209
|
Optionally chunk tests to PROCESSES number of robot runs. This can save time because all the suites will share the same
|
|
175
210
|
setups and teardowns.
|
|
176
211
|
|
|
177
|
-
|
|
212
|
+
**--pabotprerunmodifier [PRERUNMODIFIER MODULE OR CLASS]**
|
|
178
213
|
Like Robot Framework's --prerunmodifier, but executed only once in the pabot's main process after all other
|
|
179
214
|
--prerunmodifiers. But unlike the regular --prerunmodifier command, --pabotprerunmodifier is not executed again in each
|
|
180
215
|
pabot subprocesses. Depending on the intended use, this may be desirable as well as more efficient. Can be used, for
|
|
181
216
|
example, to modify the list of tests to be performed.
|
|
182
217
|
|
|
183
|
-
|
|
218
|
+
**--no-rebot**
|
|
184
219
|
If specified, the tests will execute as usual, but Rebot will not be called to merge the logs. This option is designed
|
|
185
220
|
for scenarios where Rebot should be run later due to large log files, ensuring better memory and resource availability.
|
|
186
221
|
Subprocess results are stored in the pabot_results folder.
|
|
187
222
|
|
|
188
|
-
|
|
223
|
+
**--help**
|
|
189
224
|
Print usage instructions.
|
|
190
225
|
|
|
191
|
-
|
|
226
|
+
**--version**
|
|
192
227
|
Print version information.
|
|
193
228
|
|
|
194
|
-
Example usages
|
|
229
|
+
**Example usages:**
|
|
195
230
|
|
|
196
231
|
pabot test_directory
|
|
197
232
|
pabot --exclude FOO directory_to_tests
|
|
@@ -271,11 +306,24 @@ Note: The `--ordering` file is intended only for defining the execution order of
|
|
|
271
306
|
There different possibilities to influence the execution:
|
|
272
307
|
|
|
273
308
|
* The order of suites can be changed.
|
|
274
|
-
* If a directory (or a directory structure) should be executed sequentially, add the directory suite name to a row as a ```--suite``` option.
|
|
275
|
-
|
|
309
|
+
* If a directory (or a directory structure) should be executed sequentially, add the directory suite name to a row as a ```--suite``` option. This usage is also supported when `--testlevelsplit` is enabled. As an alternative to using `--suite` options, you can also group tests into sequential batches using `{}` braces. (See below for details.) Note that if multiple `--suite` options are used, they must not reference the same test case. This means you cannot specify both parent and child suite names at the same time. For instance:
|
|
310
|
+
|
|
311
|
+
```
|
|
312
|
+
--suite Top Suite.Sub Suite
|
|
313
|
+
--suite Top Suite
|
|
314
|
+
```
|
|
315
|
+
|
|
316
|
+
* If the base suite name is changing with robot option [```--name / -N```](https://robotframework.org/robotframework/latest/RobotFrameworkUserGuide.html#setting-the-name) you can use either the new or old full test path. For example:
|
|
317
|
+
|
|
318
|
+
```
|
|
319
|
+
--test New Suite Name.Sub Suite.Test 1
|
|
320
|
+
OR
|
|
321
|
+
--test Old Suite Name.Sub Suite.Test 1
|
|
322
|
+
```
|
|
323
|
+
|
|
276
324
|
* You can add a line with text `#WAIT` to force executor to wait until all previous suites have been executed.
|
|
277
325
|
* You can group suites and tests together to same executor process by adding line `{` before the group and `}` after. Note that `#WAIT` cannot be used inside a group.
|
|
278
|
-
* You can introduce dependencies using the word `#DEPENDS` after a test declaration. This keyword can be used several times if it is necessary to refer to several different tests. Please take care that in case of circular dependencies an exception will be thrown. Note that each `#WAIT` splits suites into separate execution blocks, and it's not possible to define dependencies for suites or tests that are inside another `#WAIT` block or inside another `{}`
|
|
326
|
+
* You can introduce dependencies using the word `#DEPENDS` after a test declaration. This keyword can be used several times if it is necessary to refer to several different tests. The ordering algorithm is designed to preserve the exact user-defined order as closely as possible. However, if a test's execution dependencies are not yet satisfied, the test is postponed and moved to the earliest possible stage where all its dependencies are fulfilled. Please take care that in case of circular dependencies an exception will be thrown. Note that each `#WAIT` splits suites into separate execution blocks, and it's not possible to define dependencies for suites or tests that are inside another `#WAIT` block or inside another `{}` braces.
|
|
279
327
|
* Note: Within a group `{}`, neither execution order nor the `#DEPENDS` keyword currently works. This is due to limitations in Robot Framework, which is invoked within Pabot subprocesses. These limitations may be addressed in a future release of Robot Framework. For now, tests or suites within a group will be executed in the order Robot Framework discovers them — typically in alphabetical order.
|
|
280
328
|
* An example could be:
|
|
281
329
|
|
|
@@ -369,4 +417,74 @@ Pabot will insert following global variables to Robot Framework namespace. These
|
|
|
369
417
|
PABOTEXECUTIONPOOLID - this contains the pool id (an integer) for the current Robot Framework executor. This is helpful for example when visualizing the execution flow from your own listener.
|
|
370
418
|
PABOTNUMBEROFPROCESSES - max number of concurrent processes that pabot may use in execution.
|
|
371
419
|
CALLER_ID - a universally unique identifier for this execution.
|
|
372
|
-
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
### Output Files Generated by Pabot
|
|
423
|
+
|
|
424
|
+
Pabot generates several output files and folders during execution, both for internal use and for analysis purposes.
|
|
425
|
+
|
|
426
|
+
#### Internal File: `.pabotsuitenames`
|
|
427
|
+
|
|
428
|
+
Pabot creates a `.pabotsuitenames` file in the working directory. This is an internal hash file used to speed up execution in certain scenarios.
|
|
429
|
+
This file can also be used as a base for the `--ordering` file as described earlier. Although technically it can be modified, it will be overwritten during the next execution.
|
|
430
|
+
Therefore, it is **recommended** to maintain a separate file for the `--ordering` option if needed.
|
|
431
|
+
|
|
432
|
+
#### Output Directory Structure
|
|
433
|
+
|
|
434
|
+
In addition to the standard `log.html`, `report.html`, and `output.xml` files, the specified `--outputdir` will contain:
|
|
435
|
+
|
|
436
|
+
- A folder named `pabot_results`, and
|
|
437
|
+
- All defined artifacts (default: `.png` files)
|
|
438
|
+
- Optionally, artifacts from subfolders if `--artifactsinsubfolders` is used
|
|
439
|
+
|
|
440
|
+
Artifacts are **copied** into the output directory and renamed with the following structure:
|
|
441
|
+
|
|
442
|
+
```
|
|
443
|
+
TIMESTAMP-ARGUMENT_INDEX-PABOTQUEUEINDEX
|
|
444
|
+
```
|
|
445
|
+
|
|
446
|
+
If you use the special option `notimestamps` at the end of the `--artifacts` command, (For example: `--artifacts png,txt,notimestamps`) the timestamp part will be omitted, and the name will be in the format:
|
|
447
|
+
|
|
448
|
+
```
|
|
449
|
+
ARGUMENT_INDEX-PABOTQUEUEINDEX
|
|
450
|
+
```
|
|
451
|
+
|
|
452
|
+
- **TIMESTAMP** = Time of `pabot` command invocation (not the screenshot's actual timestamp), format: `YYYYmmdd_HHMMSS`
|
|
453
|
+
- **ARGUMENT_INDEX** = Optional index number, only used if `--argumentfileN` options are given
|
|
454
|
+
- **PABOTQUEUEINDEX** = Process queue index (see section [Global Variables](#global-variables))
|
|
455
|
+
|
|
456
|
+
#### `pabot_results` Folder Structure
|
|
457
|
+
|
|
458
|
+
The structure of the `pabot_results` folder is as follows:
|
|
459
|
+
|
|
460
|
+
```
|
|
461
|
+
pabot_results/
|
|
462
|
+
├── [N]/ # Optional: N = argument file index (if --argumentfileN is used)
|
|
463
|
+
│ └── PABOTQUEUEINDEX/ # One per subprocess
|
|
464
|
+
│ ├── output.xml
|
|
465
|
+
│ ├── robot_argfile.txt
|
|
466
|
+
│ ├── robot_stdout.out
|
|
467
|
+
│ ├── robot_stderr.out
|
|
468
|
+
│ └── artifacts...
|
|
469
|
+
```
|
|
470
|
+
|
|
471
|
+
Each `PABOTQUEUEINDEX` folder contains as default:
|
|
472
|
+
|
|
473
|
+
- `robot_argfile.txt` – Arguments used in that subprocess
|
|
474
|
+
- `robot_stdout.out` and `robot_stderr.out` – Stdout and stderr of the subprocess
|
|
475
|
+
- `output.xml` – The partial output file to be merged later
|
|
476
|
+
- Artifacts – Screenshots or other files copied from subprocess folders
|
|
477
|
+
|
|
478
|
+
> **Note:** The entire `pabot_results` folder is considered temporary and will be **deleted/overwritten** on the next `pabot` run using the same `--outputdir`.
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
### Artifacts Handling and Parallel Execution Notes
|
|
482
|
+
|
|
483
|
+
Due to parallel execution, artifacts like screenshots should ideally be:
|
|
484
|
+
|
|
485
|
+
- Embedded directly into the XML using tools like [SeleniumLibrary](https://robotframework.org/SeleniumLibrary/SeleniumLibrary.html#Set%20Screenshot%20Directory) with the `EMBED` option
|
|
486
|
+
_Example:_
|
|
487
|
+
`Library SeleniumLibrary screenshot_root_directory=EMBED`
|
|
488
|
+
- Or saved to the subprocess’s working directory (usually default behavior), ensuring separation across processes
|
|
489
|
+
|
|
490
|
+
If you manually specify a shared screenshot directory in your test code, **all processes will write to it concurrently**, which may cause issues such as overwriting or missing files if screenshots are taken simultaneously.
|
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
pabot/SharedLibrary.py,sha256=mIipGs3ZhKYEakKprcbrMI4P_Un6qI8gE7086xpHaLY,2552
|
|
2
|
-
pabot/__init__.py,sha256=
|
|
3
|
-
pabot/arguments.py,sha256=
|
|
2
|
+
pabot/__init__.py,sha256=0g7UY0dKCwXzo3sH_STKWVLBEVtZnj96gmuak8fdlf0,200
|
|
3
|
+
pabot/arguments.py,sha256=M1T2QAA0v2BO1bbryLC82RIA0VZZaEGfXnQiXfNcHOU,9577
|
|
4
4
|
pabot/clientwrapper.py,sha256=yz7battGs0exysnDeLDWJuzpb2Q-qSjitwxZMO2TlJw,231
|
|
5
5
|
pabot/coordinatorwrapper.py,sha256=nQQ7IowD6c246y8y9nsx0HZbt8vS2XODhPVDjm-lyi0,195
|
|
6
|
-
pabot/execution_items.py,sha256=
|
|
7
|
-
pabot/pabot.py,sha256=
|
|
8
|
-
pabot/pabotlib.py,sha256=
|
|
9
|
-
pabot/result_merger.py,sha256=
|
|
6
|
+
pabot/execution_items.py,sha256=zDVGW0AAeVbM-scC3Yui2TxvIPx1wYyFKHTPU2BkJkY,13329
|
|
7
|
+
pabot/pabot.py,sha256=wxkCGUzvibj7Jtdqhuyzo7F5k5xOPgVXICWZXbt7cn8,81246
|
|
8
|
+
pabot/pabotlib.py,sha256=vHbqV7L7mIvDzXBh9UcdULrwhBHNn70EDXF_31MNFO4,22320
|
|
9
|
+
pabot/result_merger.py,sha256=g4mm-BhhMK57Z6j6dpvfL5El1g5onOtfV4RByNrO8g0,9744
|
|
10
10
|
pabot/robotremoteserver.py,sha256=L3O2QRKSGSE4ux5M1ip5XJMaelqaxQWJxd9wLLdtpzM,22272
|
|
11
11
|
pabot/workerwrapper.py,sha256=BdELUVDs5BmEkdNBcYTlnP22Cj0tUpZEunYQMAKyKWU,185
|
|
12
12
|
pabot/py3/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -14,9 +14,9 @@ pabot/py3/client.py,sha256=Od9L4vZ0sozMHq_W_ITQHBBt8kAej40DG58wnxmbHGM,1434
|
|
|
14
14
|
pabot/py3/coordinator.py,sha256=kBshCzA_1QX_f0WNk42QBJyDYSwSlNM-UEBxOReOj6E,2313
|
|
15
15
|
pabot/py3/messages.py,sha256=7mFr4_0x1JHm5sW8TvKq28Xs_JoeIGku2bX7AyO0kng,2557
|
|
16
16
|
pabot/py3/worker.py,sha256=5rfp4ZiW6gf8GRz6eC0-KUkfx847A91lVtRYpLAv2sg,1612
|
|
17
|
-
robotframework_pabot-
|
|
18
|
-
robotframework_pabot-
|
|
19
|
-
robotframework_pabot-
|
|
20
|
-
robotframework_pabot-
|
|
21
|
-
robotframework_pabot-
|
|
22
|
-
robotframework_pabot-
|
|
17
|
+
robotframework_pabot-5.1.0.dist-info/licenses/LICENSE.txt,sha256=WNHhf_5RCaeuKWyq_K39vmp9F28LxKsB4SpomwSZ2L0,11357
|
|
18
|
+
robotframework_pabot-5.1.0.dist-info/METADATA,sha256=-3nvXfoJrNCoqf6XVZccsiGhqiU8quQWI3cGk_RV0hY,22070
|
|
19
|
+
robotframework_pabot-5.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
20
|
+
robotframework_pabot-5.1.0.dist-info/entry_points.txt,sha256=JpAIFADTeFOQWdwmn56KpAil8V3-41ZC5ICXCYm3Ng0,43
|
|
21
|
+
robotframework_pabot-5.1.0.dist-info/top_level.txt,sha256=t3OwfEAsSxyxrhjy_GCJYHKbV_X6AIsgeLhYeHvObG4,6
|
|
22
|
+
robotframework_pabot-5.1.0.dist-info/RECORD,,
|
|
File without changes
|
{robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/entry_points.txt
RENAMED
|
File without changes
|
{robotframework_pabot-4.3.2.dist-info → robotframework_pabot-5.1.0.dist-info}/licenses/LICENSE.txt
RENAMED
|
File without changes
|
|
File without changes
|