siliconcompiler 0.35.2__py3-none-any.whl → 0.35.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. siliconcompiler/_metadata.py +1 -1
  2. siliconcompiler/apps/sc_issue.py +18 -2
  3. siliconcompiler/apps/smake.py +106 -100
  4. siliconcompiler/checklist.py +2 -1
  5. siliconcompiler/constraints/asic_component.py +49 -11
  6. siliconcompiler/constraints/asic_floorplan.py +23 -21
  7. siliconcompiler/constraints/asic_pins.py +55 -17
  8. siliconcompiler/constraints/asic_timing.py +53 -22
  9. siliconcompiler/constraints/fpga_timing.py +5 -6
  10. siliconcompiler/data/templates/replay/replay.sh.j2 +27 -14
  11. siliconcompiler/flowgraph.py +418 -129
  12. siliconcompiler/library.py +5 -4
  13. siliconcompiler/package/__init__.py +17 -6
  14. siliconcompiler/package/https.py +10 -5
  15. siliconcompiler/project.py +92 -33
  16. siliconcompiler/remote/client.py +17 -6
  17. siliconcompiler/scheduler/docker.py +24 -25
  18. siliconcompiler/scheduler/scheduler.py +284 -121
  19. siliconcompiler/scheduler/schedulernode.py +196 -90
  20. siliconcompiler/scheduler/slurm.py +113 -29
  21. siliconcompiler/scheduler/taskscheduler.py +0 -7
  22. siliconcompiler/schema/__init__.py +3 -2
  23. siliconcompiler/schema/_metadata.py +1 -1
  24. siliconcompiler/schema/baseschema.py +205 -93
  25. siliconcompiler/schema/editableschema.py +29 -0
  26. siliconcompiler/schema/namedschema.py +21 -13
  27. siliconcompiler/schema/parametervalue.py +14 -2
  28. siliconcompiler/schema/safeschema.py +18 -7
  29. siliconcompiler/schema_support/dependencyschema.py +4 -3
  30. siliconcompiler/schema_support/option.py +82 -1
  31. siliconcompiler/schema_support/pathschema.py +14 -15
  32. siliconcompiler/schema_support/record.py +5 -4
  33. siliconcompiler/targets/asap7_demo.py +4 -1
  34. siliconcompiler/tool.py +56 -29
  35. siliconcompiler/tools/builtin/__init__.py +2 -0
  36. siliconcompiler/tools/builtin/filter.py +8 -1
  37. siliconcompiler/tools/builtin/importfiles.py +2 -0
  38. siliconcompiler/tools/klayout/__init__.py +3 -0
  39. siliconcompiler/tools/klayout/scripts/klayout_convert_drc_db.py +1 -0
  40. siliconcompiler/tools/klayout/scripts/klayout_export.py +1 -0
  41. siliconcompiler/tools/klayout/scripts/klayout_operations.py +1 -0
  42. siliconcompiler/tools/klayout/scripts/klayout_show.py +2 -1
  43. siliconcompiler/tools/klayout/scripts/klayout_utils.py +3 -4
  44. siliconcompiler/tools/klayout/show.py +17 -5
  45. siliconcompiler/tools/openroad/__init__.py +27 -1
  46. siliconcompiler/tools/openroad/_apr.py +81 -4
  47. siliconcompiler/tools/openroad/clock_tree_synthesis.py +1 -0
  48. siliconcompiler/tools/openroad/global_placement.py +1 -0
  49. siliconcompiler/tools/openroad/init_floorplan.py +116 -7
  50. siliconcompiler/tools/openroad/power_grid_analysis.py +174 -0
  51. siliconcompiler/tools/openroad/repair_design.py +1 -0
  52. siliconcompiler/tools/openroad/repair_timing.py +1 -0
  53. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +1 -1
  54. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +42 -4
  55. siliconcompiler/tools/openroad/scripts/apr/sc_irdrop.tcl +146 -0
  56. siliconcompiler/tools/openroad/scripts/apr/sc_repair_design.tcl +1 -1
  57. siliconcompiler/tools/openroad/scripts/apr/sc_write_data.tcl +4 -6
  58. siliconcompiler/tools/openroad/scripts/common/procs.tcl +1 -1
  59. siliconcompiler/tools/openroad/scripts/common/reports.tcl +1 -1
  60. siliconcompiler/tools/openroad/scripts/rcx/sc_rcx_bench.tcl +2 -4
  61. siliconcompiler/tools/opensta/__init__.py +1 -1
  62. siliconcompiler/tools/opensta/scripts/sc_timing.tcl +17 -12
  63. siliconcompiler/tools/vivado/scripts/sc_bitstream.tcl +11 -0
  64. siliconcompiler/tools/vivado/scripts/sc_place.tcl +11 -0
  65. siliconcompiler/tools/vivado/scripts/sc_route.tcl +11 -0
  66. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +10 -0
  67. siliconcompiler/tools/vpr/__init__.py +28 -0
  68. siliconcompiler/tools/yosys/prepareLib.py +7 -2
  69. siliconcompiler/tools/yosys/scripts/sc_screenshot.tcl +1 -1
  70. siliconcompiler/tools/yosys/scripts/sc_synth_asic.tcl +40 -4
  71. siliconcompiler/tools/yosys/scripts/sc_synth_fpga.tcl +15 -5
  72. siliconcompiler/tools/yosys/syn_asic.py +62 -2
  73. siliconcompiler/tools/yosys/syn_fpga.py +8 -0
  74. siliconcompiler/toolscripts/_tools.json +6 -6
  75. siliconcompiler/utils/__init__.py +243 -51
  76. siliconcompiler/utils/curation.py +89 -56
  77. siliconcompiler/utils/issue.py +6 -1
  78. siliconcompiler/utils/multiprocessing.py +35 -2
  79. siliconcompiler/utils/paths.py +21 -0
  80. siliconcompiler/utils/settings.py +141 -0
  81. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/METADATA +5 -4
  82. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/RECORD +86 -83
  83. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/WHEEL +0 -0
  84. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/entry_points.txt +0 -0
  85. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/licenses/LICENSE +0 -0
  86. {siliconcompiler-0.35.2.dist-info → siliconcompiler-0.35.4.dist-info}/top_level.txt +0 -0
@@ -9,7 +9,7 @@ import time
9
9
 
10
10
  import os.path
11
11
 
12
- from logging.handlers import QueueHandler
12
+ from siliconcompiler.utils.multiprocessing import MPQueueHandler as QueueHandler
13
13
 
14
14
  from typing import List, Optional, Set, Tuple, TYPE_CHECKING
15
15
 
@@ -19,7 +19,7 @@ from siliconcompiler.utils.logging import get_console_formatter, SCInRunLoggerFo
19
19
 
20
20
  from siliconcompiler.package import Resolver
21
21
  from siliconcompiler.schema_support.record import RecordTime, RecordTool
22
- from siliconcompiler.schema import Journal
22
+ from siliconcompiler.schema import Journal, Parameter
23
23
  from siliconcompiler.scheduler import send_messages
24
24
  from siliconcompiler.utils.paths import workdir, jobdir, collectiondir, cwdir
25
25
 
@@ -30,10 +30,36 @@ if TYPE_CHECKING:
30
30
  from siliconcompiler.schema_support.metric import MetricSchema
31
31
 
32
32
 
33
- class SchedulerFlowReset(Exception):
33
+ class _SchedulerReset(Exception):
34
+ def __init__(self, msg: str, *args: object) -> None:
35
+ super().__init__(msg, *args)
36
+ self.__msg = msg
37
+
38
+ @property
39
+ def msg(self) -> str:
40
+ return self.__msg
41
+
42
+ def log(self, logger: logging.Logger) -> None:
43
+ logger.debug(self.msg)
44
+
45
+
46
+ class SchedulerFlowReset(_SchedulerReset):
34
47
  pass
35
48
 
36
49
 
50
+ class SchedulerNodeReset(_SchedulerReset):
51
+ def log(self, logger: logging.Logger) -> None:
52
+ logger.warning(self.msg)
53
+
54
+
55
+ class SchedulerNodeResetSilent(SchedulerNodeReset):
56
+ def __init__(self, msg: str, *args: object) -> None:
57
+ super().__init__(msg, *args)
58
+
59
+ def log(self, logger: logging.Logger) -> None:
60
+ _SchedulerReset.log(self, logger)
61
+
62
+
37
63
  class SchedulerNode:
38
64
  """
39
65
  A class for managing and executing a single node in the compilation flow graph.
@@ -41,9 +67,10 @@ class SchedulerNode:
41
67
  This class encapsulates the state and logic required to run a specific
42
68
  step and index, including setting up directories, handling file I/O,
43
69
  executing the associated tool, and recording results.
44
-
45
70
  """
46
71
 
72
+ __MAX_LOG_PRINT = 100 # Maximum number of warnings/error to print to log
73
+
47
74
  def __init__(self, project: "Project", step: str, index: str, replay: bool = False):
48
75
  """
49
76
  Initializes a SchedulerNode.
@@ -74,7 +101,6 @@ class SchedulerNode:
74
101
  self.__project.get("option", "fileset")[0],
75
102
  "topmodule")
76
103
 
77
- self.__job: str = self.__project.get('option', 'jobname')
78
104
  self.__record_user_info: bool = self.__project.get(
79
105
  "option", "track", step=self.__step, index=self.__index)
80
106
  self.__pipe = None
@@ -90,24 +116,12 @@ class SchedulerNode:
90
116
  self.__enforce_inputfiles = True
91
117
  self.__enforce_outputfiles = True
92
118
 
119
+ self._update_job()
120
+
93
121
  flow: str = self.__project.get('option', 'flow')
94
122
  self.__is_entry_node: bool = (self.__step, self.__index) in \
95
123
  self.__project.get("flowgraph", flow, field="schema").get_entry_nodes()
96
124
 
97
- self.__cwd = cwdir(self.__project)
98
- self.__jobworkdir = jobdir(self.__project)
99
- self.__workdir = workdir(self.__project, step=self.__step, index=self.__index)
100
- self.__manifests = {
101
- "input": os.path.join(self.__workdir, "inputs", f"{self.__name}.pkg.json"),
102
- "output": os.path.join(self.__workdir, "outputs", f"{self.__name}.pkg.json")
103
- }
104
- self.__logs = {
105
- "sc": os.path.join(self.__workdir, f"sc_{self.__step}_{self.__index}.log"),
106
- "exe": os.path.join(self.__workdir, f"{self.__step}.log")
107
- }
108
- self.__replay_script = os.path.join(self.__workdir, "replay.sh")
109
- self.__collection_path = collectiondir(self.__project)
110
-
111
125
  self.set_queue(None, None)
112
126
  self.__setup_schema_access()
113
127
 
@@ -120,10 +134,12 @@ class SchedulerNode:
120
134
  are directed to the correct task's schema.
121
135
  """
122
136
  prev_task = self.__task
123
- with self.__task.runtime(self) as runtask:
124
- self.__task = runtask
125
- yield
126
- self.__task = prev_task
137
+ try:
138
+ with self.__task.runtime(self) as runtask:
139
+ self.__task = runtask
140
+ yield
141
+ finally:
142
+ self.__task = prev_task
127
143
 
128
144
  @staticmethod
129
145
  def init(project: "Project") -> None:
@@ -230,6 +246,22 @@ class SchedulerNode:
230
246
  """Task: The task object associated with this node."""
231
247
  return self.__task
232
248
 
249
+ def _update_job(self):
250
+ self.__job: str = self.__project.get('option', 'jobname')
251
+ self.__cwd = cwdir(self.__project)
252
+ self.__jobworkdir = jobdir(self.__project)
253
+ self.__workdir = workdir(self.__project, step=self.__step, index=self.__index)
254
+ self.__manifests = {
255
+ "input": os.path.join(self.__workdir, "inputs", f"{self.__name}.pkg.json"),
256
+ "output": os.path.join(self.__workdir, "outputs", f"{self.__name}.pkg.json")
257
+ }
258
+ self.__logs = {
259
+ "sc": os.path.join(self.__workdir, f"sc_{self.__step}_{self.__index}.log"),
260
+ "exe": os.path.join(self.__workdir, f"{self.__step}.log")
261
+ }
262
+ self.__replay_script = os.path.join(self.__workdir, "replay.sh")
263
+ self.__collection_path = collectiondir(self.__project)
264
+
233
265
  def get_manifest(self, input: bool = False) -> str:
234
266
  """
235
267
  Gets the path to the input or output manifest file for this node.
@@ -382,7 +414,7 @@ class SchedulerNode:
382
414
 
383
415
  return True
384
416
 
385
- def check_previous_run_status(self, previous_run: "SchedulerNode") -> bool:
417
+ def check_previous_run_status(self, previous_run: "SchedulerNode") -> None:
386
418
  """
387
419
  Determine whether a prior run is compatible and completed successfully for use as
388
420
  an incremental build starting point.
@@ -406,25 +438,19 @@ class SchedulerNode:
406
438
 
407
439
  # Tool name
408
440
  if self.__task.tool() != previous_run.__task.tool():
409
- self.logger.debug("Tool name changed")
410
- return False
441
+ raise SchedulerNodeResetSilent("Tool name changed")
411
442
 
412
443
  # Task name
413
444
  if self.__task.task() != previous_run.__task.task():
414
- self.logger.debug("Task name changed")
415
- return False
445
+ raise SchedulerNodeResetSilent("Task name changed")
416
446
 
417
447
  previous_status = previous_run.__project.get("record", "status",
418
448
  step=self.__step, index=self.__index)
419
449
  if not NodeStatus.is_done(previous_status):
420
- self.logger.debug("Previous step did not complete")
421
- # Not complete
422
- return False
450
+ raise SchedulerNodeResetSilent("Previous step did not complete")
423
451
 
424
452
  if not NodeStatus.is_success(previous_status):
425
- self.logger.debug("Previous step was not successful")
426
- # Not a success
427
- return False
453
+ raise SchedulerNodeResetSilent("Previous step was not successful")
428
454
 
429
455
  # Check input nodes
430
456
  log_level = self.logger.level
@@ -433,16 +459,11 @@ class SchedulerNode:
433
459
  self.logger.setLevel(log_level)
434
460
  if set(previous_run.__project.get("record", "inputnode",
435
461
  step=self.__step, index=self.__index)) != set(sel_inputs):
436
- self.logger.warning(f'inputs to {self.__step}/{self.__index} has been modified from '
437
- 'previous run')
438
- return False
439
-
440
- # Check that all output files are present?
441
-
442
- return True
462
+ raise SchedulerNodeReset(f'inputs to {self.__step}/{self.__index} has been '
463
+ 'modified from previous run')
443
464
 
444
465
  def check_values_changed(self, previous_run: "SchedulerNode", keys: Set[Tuple[str, ...]]) \
445
- -> bool:
466
+ -> None:
446
467
  """
447
468
  Checks if any specified schema parameter values have changed.
448
469
 
@@ -453,15 +474,14 @@ class SchedulerNode:
453
474
  Returns:
454
475
  bool: True if any value has changed, False otherwise.
455
476
  """
456
- def print_warning(key):
457
- self.logger.warning(f'[{",".join(key)}] in {self.__step}/{self.__index} has been '
458
- 'modified from previous run')
477
+ def gen_reset(key):
478
+ raise SchedulerNodeReset(f'[{",".join(key)}] in {self.__step}/{self.__index} has been '
479
+ 'modified from previous run')
459
480
 
460
481
  for key in sorted(keys):
461
482
  if not self.__project.valid(*key) or not previous_run.__project.valid(*key):
462
483
  # Key is missing in either run
463
- print_warning(key)
464
- return True
484
+ gen_reset(key)
465
485
 
466
486
  param = self.__project.get(*key, field=None)
467
487
  step, index = self.__step, self.__index
@@ -472,13 +492,10 @@ class SchedulerNode:
472
492
  prev_val = previous_run.__project.get(*key, step=step, index=index)
473
493
 
474
494
  if check_val != prev_val:
475
- print_warning(key)
476
- return True
477
-
478
- return False
495
+ gen_reset(key)
479
496
 
480
497
  def check_files_changed(self, previous_run: "SchedulerNode",
481
- previous_time: float, keys: Set[Tuple[str, ...]]) -> bool:
498
+ previous_time: float, keys: Set[Tuple[str, ...]]) -> None:
482
499
  """
483
500
  Checks if any specified file-based parameters have changed.
484
501
 
@@ -494,9 +511,9 @@ class SchedulerNode:
494
511
  """
495
512
  use_hash = self.__hash and previous_run.__hash
496
513
 
497
- def print_warning(key, reason):
498
- self.logger.warning(f'[{",".join(key)}] ({reason}) in {self.__step}/{self.__index} has '
499
- 'been modified from previous run')
514
+ def gen_warning(key, reason):
515
+ raise SchedulerNodeReset(f'[{",".join(key)}] ({reason}) in {self.__step}/'
516
+ f'{self.__index} has been modified from previous run')
500
517
 
501
518
  def get_file_time(path):
502
519
  times = [os.path.getmtime(path)]
@@ -521,8 +538,7 @@ class SchedulerNode:
521
538
  step=step, index=index)
522
539
 
523
540
  if check_hash != prev_hash:
524
- print_warning(key, "file hash")
525
- return True
541
+ gen_warning(key, "file hash")
526
542
  else:
527
543
  # check package values
528
544
  check_val = self.__project.get(*key, field='dataroot',
@@ -531,8 +547,7 @@ class SchedulerNode:
531
547
  step=step, index=index)
532
548
 
533
549
  if check_val != prev_val:
534
- print_warning(key, "file dataroot")
535
- return True
550
+ gen_warning(key, "file dataroot")
536
551
 
537
552
  files = self.__project.find_files(*key, step=step, index=index)
538
553
  if not isinstance(files, (list, set, tuple)):
@@ -540,10 +555,7 @@ class SchedulerNode:
540
555
 
541
556
  for check_file in files:
542
557
  if get_file_time(check_file) > previous_time:
543
- print_warning(key, "timestamp")
544
- return True
545
-
546
- return False
558
+ gen_warning(key, "timestamp")
547
559
 
548
560
  def get_check_changed_keys(self) -> Tuple[Set[Tuple[str, ...]], Set[Tuple[str, ...]]]:
549
561
  """
@@ -585,7 +597,7 @@ class SchedulerNode:
585
597
 
586
598
  return value_keys, path_keys
587
599
 
588
- def requires_run(self) -> bool:
600
+ def requires_run(self) -> None:
589
601
  """
590
602
  Determines if the node needs to be re-run.
591
603
 
@@ -601,8 +613,7 @@ class SchedulerNode:
601
613
 
602
614
  if self.__breakpoint:
603
615
  # Breakpoint is set to must run
604
- self.logger.debug("Breakpoint is set")
605
- return True
616
+ raise SchedulerNodeResetSilent(f"Breakpoint is set on {self.__step}/{self.__index}")
606
617
 
607
618
  # Load previous manifest
608
619
  previous_node = None
@@ -612,33 +623,27 @@ class SchedulerNode:
612
623
  try:
613
624
  i_project: Project = Project.from_manifest(filepath=self.__manifests["input"])
614
625
  except: # noqa E722
615
- self.logger.debug("Input manifest failed to load")
616
- return True
626
+ raise SchedulerNodeResetSilent("Input manifest failed to load")
617
627
  previous_node = SchedulerNode(i_project, self.__step, self.__index)
618
628
  else:
619
629
  # No manifest found so assume rerun is needed
620
- self.logger.debug("Previous run did not generate input manifest")
621
- return True
630
+ raise SchedulerNodeResetSilent("Previous run did not generate input manifest")
622
631
 
623
632
  previous_node_end = None
624
633
  if os.path.exists(self.__manifests["output"]):
625
634
  try:
626
635
  o_project = Project.from_manifest(filepath=self.__manifests["output"])
627
636
  except: # noqa E722
628
- self.logger.debug("Output manifest failed to load")
629
- return True
637
+ raise SchedulerNodeResetSilent("Output manifest failed to load")
630
638
  previous_node_end = SchedulerNode(o_project, self.__step, self.__index)
631
639
  else:
632
640
  # No manifest found so assume rerun is needed
633
- self.logger.debug("Previous run did not generate output manifest")
634
- return True
641
+ raise SchedulerNodeResetSilent("Previous run did not generate output manifest")
635
642
 
636
643
  with self.runtime():
637
644
  if previous_node_end:
638
645
  with previous_node_end.runtime():
639
- if not self.check_previous_run_status(previous_node_end):
640
- self.logger.debug("Previous run state failed")
641
- return True
646
+ self.check_previous_run_status(previous_node_end)
642
647
 
643
648
  if previous_node:
644
649
  with previous_node.runtime():
@@ -650,18 +655,10 @@ class SchedulerNode:
650
655
  value_keys.update(previous_value_keys)
651
656
  path_keys.update(previous_path_keys)
652
657
  except KeyError:
653
- self.logger.debug("Failed to acquire keys")
654
- return True
655
-
656
- if self.check_values_changed(previous_node, value_keys.union(path_keys)):
657
- self.logger.debug("Key values changed")
658
- return True
658
+ raise SchedulerNodeResetSilent("Failed to acquire keys")
659
659
 
660
- if self.check_files_changed(previous_node, previous_node_time, path_keys):
661
- self.logger.debug("Files changed")
662
- return True
663
-
664
- return False
660
+ self.check_values_changed(previous_node, value_keys.union(path_keys))
661
+ self.check_files_changed(previous_node, previous_node_time, path_keys)
665
662
 
666
663
  def setup_input_directory(self) -> None:
667
664
  """
@@ -1163,11 +1160,13 @@ class SchedulerNode:
1163
1160
  if 'errors' in checks:
1164
1161
  ordered_suffixes.append('errors')
1165
1162
 
1163
+ print_paths = {}
1166
1164
  # Looping through patterns for each line
1167
1165
  with sc_open(self.__logs["exe"]) as f:
1168
1166
  line_count = sum(1 for _ in f)
1169
1167
  right_align = len(str(line_count))
1170
1168
  for suffix in ordered_suffixes:
1169
+ print_paths[suffix] = False
1171
1170
  # Start at the beginning of file again
1172
1171
  f.seek(0)
1173
1172
  for num, line in enumerate(f, start=1):
@@ -1176,7 +1175,7 @@ class SchedulerNode:
1176
1175
  if string is None:
1177
1176
  break
1178
1177
  else:
1179
- string = utils.grep(self.__project, item, string)
1178
+ string = utils.grep(self.__project.logger, item, string)
1180
1179
  if string is not None:
1181
1180
  matches[suffix] += 1
1182
1181
  # always print to file
@@ -1184,11 +1183,21 @@ class SchedulerNode:
1184
1183
  print(line_with_num, file=checks[suffix]['report'])
1185
1184
  # selectively print to display
1186
1185
  if checks[suffix]["display"]:
1187
- checks[suffix]["display"](suffix, line_with_num)
1186
+ if matches[suffix] <= SchedulerNode.__MAX_LOG_PRINT:
1187
+ checks[suffix]["display"](suffix, line_with_num)
1188
+ else:
1189
+ if not print_paths[suffix]:
1190
+ checks[suffix]["display"](suffix, "print limit reached")
1191
+ print_paths[suffix] = True
1188
1192
 
1189
1193
  for check in checks.values():
1190
1194
  check['report'].close()
1191
1195
 
1196
+ for suffix in ordered_suffixes:
1197
+ if print_paths[suffix]:
1198
+ self.logger.info(f"All {suffix} can be viewed at: "
1199
+ f"{os.path.abspath(f'{self.__step}.{suffix}')}")
1200
+
1192
1201
  for metric in ("errors", "warnings"):
1193
1202
  if metric in matches:
1194
1203
  value = self.__metrics.get(metric, step=self.__step, index=self.__index)
@@ -1377,3 +1386,100 @@ class SchedulerNode:
1377
1386
  for logfile in self.__logs.values():
1378
1387
  if os.path.isfile(logfile):
1379
1388
  tar.add(logfile, arcname=arcname(logfile))
1389
+
1390
+ def get_required_keys(self) -> Set[Tuple[str, ...]]:
1391
+ """
1392
+ This function walks through the 'require' keys and returns the
1393
+ keys.
1394
+ """
1395
+ path_keys = set()
1396
+ with self.runtime():
1397
+ task = self.task
1398
+ for key in task.get('require'):
1399
+ path_keys.add(tuple(key.split(",")))
1400
+ if task.has_prescript():
1401
+ path_keys.add((*task._keypath, "prescript"))
1402
+ if task.has_postscript():
1403
+ path_keys.add((*task._keypath, "postscript"))
1404
+ if task.get("refdir"):
1405
+ path_keys.add((*task._keypath, "refdir"))
1406
+ if task.get("script"):
1407
+ path_keys.add((*task._keypath, "script"))
1408
+ if task.get("exe"):
1409
+ path_keys.add((*task._keypath, "exe"))
1410
+
1411
+ return path_keys
1412
+
1413
+ def get_required_path_keys(self) -> Set[Tuple[str, ...]]:
1414
+ """
1415
+ This function walks through the 'require' keys and returns the
1416
+ keys that are of type path (file/dir).
1417
+ """
1418
+ path_keys = set()
1419
+ for key in self.get_required_keys():
1420
+ try:
1421
+ param_type: str = self.__project.get(*key, field="type")
1422
+ if "file" in param_type or "dir" in param_type:
1423
+ path_keys.add(key)
1424
+ except KeyError:
1425
+ # Key does not exist
1426
+ pass
1427
+
1428
+ return path_keys
1429
+
1430
+ def mark_copy(self) -> bool:
1431
+ """Marks files from the 'require' path keys for copying."""
1432
+ return False
1433
+
1434
+ def check_required_values(self) -> bool:
1435
+ requires = self.get_required_keys()
1436
+
1437
+ error = False
1438
+ for key in sorted(requires):
1439
+ if not self.__project.valid(*key):
1440
+ self.logger.error(f'Cannot resolve required keypath [{",".join(key)}] '
1441
+ f'for {self.step}/{self.index}.')
1442
+ error = True
1443
+ continue
1444
+
1445
+ param: Parameter = self.__project.get(*key, field=None)
1446
+ check_step, check_index = self.step, self.index
1447
+ if param.get(field='pernode').is_never():
1448
+ check_step, check_index = None, None
1449
+
1450
+ if not param.has_value(step=check_step, index=check_index):
1451
+ self.logger.error('No value set for required keypath '
1452
+ f'[{",".join(key)}] for {self.step}/{self.index}.')
1453
+ error = True
1454
+ continue
1455
+ return not error
1456
+
1457
+ def check_required_paths(self) -> bool:
1458
+ if self.__project.option.get_remote():
1459
+ return True
1460
+
1461
+ requires = self.get_required_path_keys()
1462
+
1463
+ error = False
1464
+ for key in sorted(requires):
1465
+ param: Parameter = self.__project.get(*key, field=None)
1466
+ check_step, check_index = self.step, self.index
1467
+ if param.get(field='pernode').is_never():
1468
+ check_step, check_index = None, None
1469
+
1470
+ abspath = self.__project.find_files(*key,
1471
+ missing_ok=True,
1472
+ step=check_step, index=check_index)
1473
+
1474
+ unresolved_paths = param.get(step=check_step, index=check_index)
1475
+ if not isinstance(abspath, list):
1476
+ abspath = [abspath]
1477
+ unresolved_paths = [unresolved_paths]
1478
+
1479
+ for path, setpath in zip(abspath, unresolved_paths):
1480
+ if path is None:
1481
+ self.logger.error(f'Cannot resolve path {setpath} in '
1482
+ f'required file keypath [{",".join(key)}] '
1483
+ f'for {self.step}/{self.index}.')
1484
+ error = True
1485
+ return not error