siliconcompiler 0.34.1__py3-none-any.whl → 0.34.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. siliconcompiler/__init__.py +23 -4
  2. siliconcompiler/__main__.py +1 -7
  3. siliconcompiler/_metadata.py +1 -1
  4. siliconcompiler/apps/_common.py +104 -23
  5. siliconcompiler/apps/sc.py +4 -8
  6. siliconcompiler/apps/sc_dashboard.py +6 -4
  7. siliconcompiler/apps/sc_install.py +10 -6
  8. siliconcompiler/apps/sc_issue.py +7 -5
  9. siliconcompiler/apps/sc_remote.py +1 -1
  10. siliconcompiler/apps/sc_server.py +9 -14
  11. siliconcompiler/apps/sc_show.py +7 -6
  12. siliconcompiler/apps/smake.py +130 -94
  13. siliconcompiler/apps/utils/replay.py +4 -7
  14. siliconcompiler/apps/utils/summarize.py +3 -5
  15. siliconcompiler/asic.py +420 -0
  16. siliconcompiler/checklist.py +25 -2
  17. siliconcompiler/cmdlineschema.py +534 -0
  18. siliconcompiler/constraints/__init__.py +17 -0
  19. siliconcompiler/constraints/asic_component.py +378 -0
  20. siliconcompiler/constraints/asic_floorplan.py +449 -0
  21. siliconcompiler/constraints/asic_pins.py +489 -0
  22. siliconcompiler/constraints/asic_timing.py +517 -0
  23. siliconcompiler/core.py +10 -35
  24. siliconcompiler/data/templates/tcl/manifest.tcl.j2 +8 -0
  25. siliconcompiler/dependencyschema.py +96 -202
  26. siliconcompiler/design.py +327 -241
  27. siliconcompiler/filesetschema.py +250 -0
  28. siliconcompiler/flowgraph.py +298 -106
  29. siliconcompiler/fpga.py +124 -1
  30. siliconcompiler/library.py +331 -0
  31. siliconcompiler/metric.py +327 -92
  32. siliconcompiler/metrics/__init__.py +7 -0
  33. siliconcompiler/metrics/asic.py +245 -0
  34. siliconcompiler/metrics/fpga.py +220 -0
  35. siliconcompiler/package/__init__.py +391 -67
  36. siliconcompiler/package/git.py +92 -16
  37. siliconcompiler/package/github.py +114 -22
  38. siliconcompiler/package/https.py +79 -16
  39. siliconcompiler/packageschema.py +341 -16
  40. siliconcompiler/pathschema.py +255 -0
  41. siliconcompiler/pdk.py +566 -1
  42. siliconcompiler/project.py +1460 -0
  43. siliconcompiler/record.py +38 -1
  44. siliconcompiler/remote/__init__.py +5 -2
  45. siliconcompiler/remote/client.py +11 -6
  46. siliconcompiler/remote/schema.py +5 -23
  47. siliconcompiler/remote/server.py +41 -54
  48. siliconcompiler/report/__init__.py +3 -3
  49. siliconcompiler/report/dashboard/__init__.py +48 -14
  50. siliconcompiler/report/dashboard/cli/__init__.py +99 -21
  51. siliconcompiler/report/dashboard/cli/board.py +364 -179
  52. siliconcompiler/report/dashboard/web/__init__.py +90 -12
  53. siliconcompiler/report/dashboard/web/components/__init__.py +219 -240
  54. siliconcompiler/report/dashboard/web/components/flowgraph.py +49 -26
  55. siliconcompiler/report/dashboard/web/components/graph.py +139 -100
  56. siliconcompiler/report/dashboard/web/layouts/__init__.py +29 -1
  57. siliconcompiler/report/dashboard/web/layouts/_common.py +38 -2
  58. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph.py +39 -26
  59. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_node_tab.py +50 -50
  60. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_sac_tabs.py +49 -46
  61. siliconcompiler/report/dashboard/web/state.py +141 -14
  62. siliconcompiler/report/dashboard/web/utils/__init__.py +79 -16
  63. siliconcompiler/report/dashboard/web/utils/file_utils.py +74 -11
  64. siliconcompiler/report/dashboard/web/viewer.py +25 -1
  65. siliconcompiler/report/report.py +5 -2
  66. siliconcompiler/report/summary_image.py +29 -11
  67. siliconcompiler/scheduler/__init__.py +9 -1
  68. siliconcompiler/scheduler/docker.py +81 -4
  69. siliconcompiler/scheduler/run_node.py +37 -20
  70. siliconcompiler/scheduler/scheduler.py +211 -36
  71. siliconcompiler/scheduler/schedulernode.py +394 -60
  72. siliconcompiler/scheduler/send_messages.py +77 -29
  73. siliconcompiler/scheduler/slurm.py +76 -12
  74. siliconcompiler/scheduler/taskscheduler.py +142 -21
  75. siliconcompiler/schema/__init__.py +0 -4
  76. siliconcompiler/schema/baseschema.py +338 -59
  77. siliconcompiler/schema/editableschema.py +14 -6
  78. siliconcompiler/schema/journal.py +28 -17
  79. siliconcompiler/schema/namedschema.py +22 -14
  80. siliconcompiler/schema/parameter.py +89 -28
  81. siliconcompiler/schema/parametertype.py +2 -0
  82. siliconcompiler/schema/parametervalue.py +258 -15
  83. siliconcompiler/schema/safeschema.py +25 -2
  84. siliconcompiler/schema/schema_cfg.py +23 -19
  85. siliconcompiler/schema/utils.py +2 -2
  86. siliconcompiler/schema_obj.py +24 -5
  87. siliconcompiler/tool.py +1131 -265
  88. siliconcompiler/tools/bambu/__init__.py +41 -0
  89. siliconcompiler/tools/builtin/concatenate.py +2 -2
  90. siliconcompiler/tools/builtin/minimum.py +2 -1
  91. siliconcompiler/tools/builtin/mux.py +2 -1
  92. siliconcompiler/tools/builtin/nop.py +2 -1
  93. siliconcompiler/tools/builtin/verify.py +2 -1
  94. siliconcompiler/tools/klayout/__init__.py +95 -0
  95. siliconcompiler/tools/openroad/__init__.py +289 -0
  96. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +3 -0
  97. siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +7 -2
  98. siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +8 -4
  99. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +9 -5
  100. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +5 -1
  101. siliconcompiler/tools/slang/__init__.py +1 -1
  102. siliconcompiler/tools/slang/elaborate.py +2 -1
  103. siliconcompiler/tools/vivado/scripts/sc_run.tcl +1 -1
  104. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +8 -1
  105. siliconcompiler/tools/vivado/syn_fpga.py +6 -0
  106. siliconcompiler/tools/vivado/vivado.py +35 -2
  107. siliconcompiler/tools/vpr/__init__.py +150 -0
  108. siliconcompiler/tools/yosys/__init__.py +369 -1
  109. siliconcompiler/tools/yosys/scripts/procs.tcl +0 -1
  110. siliconcompiler/toolscripts/_tools.json +5 -10
  111. siliconcompiler/utils/__init__.py +66 -0
  112. siliconcompiler/utils/flowgraph.py +2 -2
  113. siliconcompiler/utils/issue.py +2 -1
  114. siliconcompiler/utils/logging.py +14 -0
  115. siliconcompiler/utils/multiprocessing.py +256 -0
  116. siliconcompiler/utils/showtools.py +10 -0
  117. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/METADATA +6 -6
  118. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/RECORD +122 -115
  119. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/entry_points.txt +3 -0
  120. siliconcompiler/schema/cmdlineschema.py +0 -250
  121. siliconcompiler/schema/packageschema.py +0 -101
  122. siliconcompiler/toolscripts/rhel8/install-slang.sh +0 -40
  123. siliconcompiler/toolscripts/rhel9/install-slang.sh +0 -40
  124. siliconcompiler/toolscripts/ubuntu20/install-slang.sh +0 -47
  125. siliconcompiler/toolscripts/ubuntu22/install-slang.sh +0 -37
  126. siliconcompiler/toolscripts/ubuntu24/install-slang.sh +0 -37
  127. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/WHEEL +0 -0
  128. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/licenses/LICENSE +0 -0
  129. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/top_level.txt +0 -0
siliconcompiler/tool.py CHANGED
@@ -10,16 +10,18 @@ import shlex
10
10
  import shutil
11
11
  import subprocess
12
12
  import sys
13
+ import threading
13
14
  import time
14
15
  import yaml
15
16
 
16
17
  try:
18
+ # 'resource' is not available on Windows, so we handle its absence gracefully.
17
19
  import resource
18
20
  except ModuleNotFoundError:
19
21
  resource = None
20
22
 
21
23
  try:
22
- # Note: this import throws exception on Windows
24
+ # 'pty' is not available on Windows.
23
25
  import pty
24
26
  except ModuleNotFoundError:
25
27
  pty = None
@@ -29,7 +31,9 @@ import os.path
29
31
  from packaging.version import Version, InvalidVersion
30
32
  from packaging.specifiers import SpecifierSet, InvalidSpecifier
31
33
 
32
- from siliconcompiler.schema import NamedSchema, Journal
34
+ from typing import List, Dict, Tuple, Union
35
+
36
+ from siliconcompiler.schema import BaseSchema, NamedSchema, Journal
33
37
  from siliconcompiler.schema import EditableSchema, Parameter, PerNode, Scope
34
38
  from siliconcompiler.schema.parametertype import NodeType
35
39
  from siliconcompiler.schema.utils import trim
@@ -39,34 +43,60 @@ from siliconcompiler import sc_open
39
43
  from siliconcompiler import Schema
40
44
 
41
45
  from siliconcompiler.record import RecordTool
46
+ from siliconcompiler.scheduler import SchedulerNode
42
47
  from siliconcompiler.flowgraph import RuntimeFlowgraph
43
48
 
44
49
 
45
50
  class TaskError(Exception):
46
- '''
47
- Error indicates execution cannot continue and should be terminated
48
- '''
51
+ '''Error indicating that task execution cannot continue and should be terminated.'''
52
+ pass
49
53
 
50
54
 
51
55
  class TaskTimeout(TaskError):
52
- '''
53
- Error indicates a timeout has occurred
56
+ '''Error indicating a timeout has occurred during task execution.
54
57
 
55
58
  Args:
56
- timeout (float): execution time at timeout
59
+ timeout (float): The execution time in seconds at which the timeout occurred.
57
60
  '''
61
+
58
62
  def __init__(self, *args, timeout=None, **kwargs):
59
63
  super().__init__(*args, **kwargs)
60
64
  self.timeout = timeout
61
65
 
62
66
 
63
67
  class TaskExecutableNotFound(TaskError):
64
- '''
65
- Executable not found.
66
- '''
68
+ '''Error indicating that the required tool executable could not be found.'''
69
+ pass
70
+
71
+
72
+ class TaskSkip(TaskError):
73
+ """
74
+ Error raised to indicate that the current task should be skipped.
75
+
76
+ This exception is only intended to be used within the `setup()` and
77
+ `pre_process()` methods of a Task.
78
+ """
79
+
80
+ def __init__(self, why: str, *args):
81
+ super().__init__(why, *args)
82
+ self.__why = why
83
+
84
+ @property
85
+ def why(self):
86
+ """str: The reason why the task is being skipped."""
87
+ return self.__why
67
88
 
68
89
 
69
90
  class TaskSchema(NamedSchema):
91
+ """
92
+ A schema class that defines the parameters and methods for a single task
93
+ in a compilation flow.
94
+
95
+ This class provides the framework for setting up, running, and post-processing
96
+ a tool. It includes methods for managing executables, versions, runtime
97
+ arguments, and file I/O.
98
+ """
99
+ # Regex for parsing version check strings like ">=1.2.3"
70
100
  __parse_version_check_str = r"""
71
101
  (?P<operator>(==|!=|<=|>=|<|>|~=))
72
102
  \s*
@@ -82,52 +112,100 @@ class TaskSchema(NamedSchema):
82
112
  r"^\s*" + __parse_version_check_str + r"\s*$",
83
113
  re.VERBOSE | re.IGNORECASE)
84
114
 
85
- def __init__(self, name=None):
115
+ def __init__(self):
86
116
  super().__init__()
87
- self.set_name(name)
88
117
 
89
118
  schema_task(self)
90
119
 
91
120
  self.__set_runtime(None)
92
121
 
122
+ @classmethod
123
+ def _getdict_type(cls) -> str:
124
+ """Returns the metadata for getdict."""
125
+ return TaskSchema.__name__
126
+
127
+ def _from_dict(self, manifest, keypath, version=None):
128
+ """
129
+ Populates the schema from a dictionary, dynamically adding 'var'
130
+ parameters found in the manifest that are not already defined.
131
+ """
132
+ if "var" in manifest:
133
+ # Collect existing and manifest var keys
134
+ var_keys = [k[0] for k in self.allkeys("var")]
135
+ manifest_keys = set(manifest["var"].keys())
136
+
137
+ # Add new vars found in the manifest to the schema
138
+ edit = EditableSchema(self)
139
+ for var in sorted(manifest_keys.difference(var_keys)):
140
+ edit.insert("var", var,
141
+ Parameter.from_dict(
142
+ manifest["var"][var],
143
+ keypath=keypath + [var],
144
+ version=version))
145
+ del manifest["var"][var]
146
+
147
+ if not manifest["var"]:
148
+ del manifest["var"]
149
+
150
+ return super()._from_dict(manifest, keypath, version)
151
+
93
152
  @contextlib.contextmanager
94
- def runtime(self, chip, step=None, index=None, relpath=None):
95
- '''
96
- Sets the runtime information needed to properly execute a task.
97
- Note: unstable API
153
+ def runtime(self, node, step=None, index=None, relpath=None):
154
+ """
155
+ A context manager to set the runtime information for a task.
156
+
157
+ This method creates a temporary copy of the task object with runtime
158
+ information (like the current step, index, and working directories)
159
+ populated from a SchedulerNode. This allows methods within the context
160
+ to access runtime-specific configuration and paths.
98
161
 
99
162
  Args:
100
- chip (:class:`Chip`): root schema for the runtime information
101
- '''
163
+ node (SchedulerNode): The scheduler node for this runtime context.
164
+ """
165
+ if node and not isinstance(node, SchedulerNode):
166
+ raise TypeError("node must be a scheduler node")
167
+
102
168
  obj_copy = copy.copy(self)
103
- obj_copy.__set_runtime(chip, step=step, index=index, relpath=relpath)
169
+ obj_copy.__set_runtime(node, step=step, index=index, relpath=relpath)
104
170
  yield obj_copy
105
171
 
106
- def __set_runtime(self, chip, step=None, index=None, relpath=None):
107
- '''
108
- Sets the runtime information needed to properly execute a task.
109
- Note: unstable API
172
+ def __set_runtime(self, node: SchedulerNode, step=None, index=None, relpath=None):
173
+ """
174
+ Private helper to set the runtime information for executing a task.
110
175
 
111
176
  Args:
112
- chip (:class:`Chip`): root schema for the runtime information
113
- '''
177
+ node (SchedulerNode): The scheduler node for this runtime.
178
+ """
179
+ self.__node = node
114
180
  self.__chip = None
115
181
  self.__schema_full = None
116
182
  self.__logger = None
117
183
  self.__design_name = None
118
184
  self.__design_top = None
185
+ self.__design_top_global = None
119
186
  self.__cwd = None
120
187
  self.__relpath = relpath
121
- if chip:
122
- self.__chip = chip
123
- self.__schema_full = chip.schema
124
- self.__logger = chip.logger
125
- self.__design_name = chip.design
126
- self.__design_top = chip.top()
127
- self.__cwd = chip.cwd
128
-
129
- self.__step = step
130
- self.__index = index
188
+ self.__collection_path = None
189
+ self.__jobdir = None
190
+ if node:
191
+ if step is not None or index is not None:
192
+ raise RuntimeError("step and index cannot be provided with node")
193
+
194
+ self.__chip = node.chip
195
+ self.__schema_full = node.chip.schema
196
+ self.__logger = node.chip.logger
197
+ self.__design_name = node.name
198
+ self.__design_top = node.topmodule
199
+ self.__design_top_global = node.topmodule_global
200
+ self.__cwd = node.project_cwd
201
+ self.__collection_path = node.collection_dir
202
+ self.__jobdir = node.workdir
203
+
204
+ self.__step = node.step
205
+ self.__index = node.index
206
+ else:
207
+ self.__step = step
208
+ self.__index = index
131
209
 
132
210
  self.__schema_record = None
133
211
  self.__schema_metric = None
@@ -157,47 +235,63 @@ class TaskSchema(NamedSchema):
157
235
  from_steps=set([step for step, _ in self.__schema_flow.get_entry_nodes()]),
158
236
  prune_nodes=self.__schema_full.get('option', 'prune'))
159
237
 
160
- def node(self):
161
- '''
162
- Returns:
163
- step and index for the current runtime
164
- '''
165
-
166
- return self.__step, self.__index
167
-
168
- def tool(self):
169
- '''
170
- Returns:
171
- tool name
172
- '''
173
-
238
+ @property
239
+ def design_name(self) -> str:
240
+ """str: The name of the design."""
241
+ return self.__design_name
242
+
243
+ @property
244
+ def design_topmodule(self) -> str:
245
+ """str: The top module of the design for the current node."""
246
+ return self.__design_top
247
+
248
+ @property
249
+ def node(self) -> SchedulerNode:
250
+ """SchedulerNode: The scheduler node for the current runtime."""
251
+ return self.__node
252
+
253
+ @property
254
+ def step(self) -> str:
255
+ """str: The step for the current runtime."""
256
+ return self.__step
257
+
258
+ @property
259
+ def index(self) -> str:
260
+ """str: The index for the current runtime."""
261
+ return self.__index
262
+
263
+ def tool(self) -> str:
264
+ """str: The name of the tool associated with this task."""
174
265
  raise NotImplementedError("tool name must be implemented by the child class")
175
266
 
176
- def task(self):
177
- '''
178
- Returns:
179
- task name
180
- '''
181
-
267
+ def task(self) -> str:
268
+ """str: The name of this task."""
269
+ if self.name:
270
+ return self.name
182
271
  raise NotImplementedError("task name must be implemented by the child class")
183
272
 
184
- def logger(self):
185
- '''
186
- Returns:
187
- logger
188
- '''
273
+ @property
274
+ def logger(self) -> logging.Logger:
275
+ """logging.Logger: The logger instance."""
189
276
  return self.__logger
190
277
 
278
+ @property
279
+ def nodeworkdir(self) -> str:
280
+ """str: The path to the node's working directory."""
281
+ return self.__jobdir
282
+
191
283
  def schema(self, type=None):
192
- '''
193
- Get useful section of the schema.
284
+ """
285
+ Gets a specific section of the schema.
194
286
 
195
287
  Args:
196
- type (str): schema section to find, if None returns the root schema.
288
+ type (str, optional): The schema section to retrieve. If None,
289
+ returns the root schema. Valid types include "record",
290
+ "metric", "flow", "runtimeflow", and "tool".
197
291
 
198
292
  Returns:
199
- schema section.
200
- '''
293
+ The requested schema section object.
294
+ """
201
295
  if type is None:
202
296
  return self.__schema_full
203
297
  elif type == "record":
@@ -213,23 +307,44 @@ class TaskSchema(NamedSchema):
213
307
  else:
214
308
  raise ValueError(f"{type} is not a schema section")
215
309
 
216
- def get_exe(self):
217
- '''
218
- Determines the absolute path for the specified executable.
310
+ def get_logpath(self, log: str) -> str:
311
+ """
312
+ Returns the relative path to a specified log file.
313
+
314
+ Args:
315
+ log (str): The type of log file (e.g., 'exe', 'sc').
316
+
317
+ Returns:
318
+ str: The relative path to the log file from the node's workdir.
319
+ """
320
+ return os.path.relpath(self.__node.get_log(log), self.__jobdir)
321
+
322
+ def has_breakpoint(self) -> bool:
323
+ """
324
+ Checks if a breakpoint is set for this task.
325
+
326
+ Returns:
327
+ bool: True if a breakpoint is active, False otherwise.
328
+ """
329
+ return self.schema().get("option", "breakpoint", step=self.__step, index=self.__index)
330
+
331
+ def get_exe(self) -> str:
332
+ """
333
+ Determines the absolute path for the task's executable.
219
334
 
220
335
  Raises:
221
- :class:`TaskExecutableNotFound`: if executable not found.
336
+ TaskExecutableNotFound: If the executable cannot be found in the system PATH.
222
337
 
223
338
  Returns:
224
- path to executable, or None if not specified
225
- '''
339
+ str: The absolute path to the executable, or None if not specified.
340
+ """
226
341
 
227
342
  exe = self.schema("tool").get('exe')
228
343
 
229
344
  if exe is None:
230
345
  return None
231
346
 
232
- # Collect path
347
+ # Collect PATH from environment variables
233
348
  env = self.get_runtime_environmental_variables(include_path=True)
234
349
 
235
350
  fullexe = shutil.which(exe, path=env["PATH"])
@@ -239,17 +354,17 @@ class TaskSchema(NamedSchema):
239
354
 
240
355
  return fullexe
241
356
 
242
- def get_exe_version(self):
243
- '''
244
- Gets the version of the specified executable.
357
+ def get_exe_version(self) -> str:
358
+ """
359
+ Gets the version of the task's executable by running it with a version switch.
245
360
 
246
361
  Raises:
247
- :class:`TaskExecutableNotFound`: if executable not found.
248
- :class:`NotImplementedError`: if :meth:`.parse_version` has not be implemented.
362
+ TaskExecutableNotFound: If the executable is not found.
363
+ NotImplementedError: If the `parse_version` method is not implemented.
249
364
 
250
365
  Returns:
251
- version determined by :meth:`.parse_version`.
252
- '''
366
+ str: The parsed version string.
367
+ """
253
368
 
254
369
  veropt = self.schema("tool").get('vswitch')
255
370
  if not veropt:
@@ -292,22 +407,21 @@ class TaskSchema(NamedSchema):
292
407
 
293
408
  return version
294
409
 
295
- def check_exe_version(self, reported_version):
296
- '''
297
- Check if the reported version matches the versions specified in
298
- :keypath:`tool,<tool>,version`.
410
+ def check_exe_version(self, reported_version) -> bool:
411
+ """
412
+ Checks if the reported version of a tool satisfies the requirements
413
+ specified in the schema.
299
414
 
300
415
  Args:
301
- reported_version (str): version to check
416
+ reported_version (str): The version string reported by the tool.
302
417
 
303
418
  Returns:
304
- True if the version matched, false otherwise
305
-
306
- '''
419
+ bool: True if the version is acceptable, False otherwise.
420
+ """
307
421
 
308
422
  spec_sets = self.schema("tool").get('version', step=self.__step, index=self.__index)
309
423
  if not spec_sets:
310
- # No requirement so always true
424
+ # No requirement, so always true
311
425
  return True
312
426
 
313
427
  for spec_set in spec_sets:
@@ -367,22 +481,22 @@ class TaskSchema(NamedSchema):
367
481
  return False
368
482
 
369
483
  def get_runtime_environmental_variables(self, include_path=True):
370
- '''
371
- Determine the environmental variables needed for the task
484
+ """
485
+ Determines the environment variables needed for the task.
372
486
 
373
487
  Args:
374
- include_path (bool): if True, includes PATH variable
488
+ include_path (bool): If True, includes the PATH variable.
375
489
 
376
490
  Returns:
377
- dict of str: dictionary of environmental variable to value mapping
378
- '''
491
+ dict: A dictionary of environment variable names to their values.
492
+ """
379
493
 
380
494
  # Add global environmental vars
381
495
  envvars = {}
382
496
  for env in self.__schema_full.getkeys('option', 'env'):
383
497
  envvars[env] = self.__schema_full.get('option', 'env', env)
384
498
 
385
- # Add tool specific vars
499
+ # Add tool-specific license server vars
386
500
  for lic_env in self.schema("tool").getkeys('licenseserver'):
387
501
  license_file = self.schema("tool").get('licenseserver', lic_env,
388
502
  step=self.__step, index=self.__index)
@@ -392,8 +506,8 @@ class TaskSchema(NamedSchema):
392
506
  if include_path:
393
507
  path = self.schema("tool").find_files(
394
508
  "path", step=self.__step, index=self.__index,
395
- packages=self.schema().get("package", field="schema").get_resolvers(),
396
509
  cwd=self.__cwd,
510
+ collection_dir=self.__collection_path,
397
511
  missing_ok=True)
398
512
 
399
513
  envvars["PATH"] = os.getenv("PATH", os.defpath)
@@ -401,31 +515,32 @@ class TaskSchema(NamedSchema):
401
515
  if path:
402
516
  envvars["PATH"] = path + os.pathsep + envvars["PATH"]
403
517
 
404
- # Forward additional variables
518
+ # Forward additional variables like LD_LIBRARY_PATH
405
519
  for var in ('LD_LIBRARY_PATH',):
406
520
  val = os.getenv(var, None)
407
521
  if val:
408
522
  envvars[var] = val
409
523
 
410
- # Add task specific vars
524
+ # Add task-specific vars
411
525
  for env in self.getkeys("env"):
412
526
  envvars[env] = self.get("env", env)
413
527
 
414
528
  return envvars
415
529
 
416
530
  def get_runtime_arguments(self):
417
- '''
418
- Constructs the arguments needed to run the task.
531
+ """
532
+ Constructs the command-line arguments needed to run the task.
419
533
 
420
534
  Returns:
421
- command (list)
422
- '''
535
+ list: A list of command-line arguments.
536
+ """
423
537
 
424
538
  cmdargs = []
425
539
  try:
426
540
  if self.__relpath:
427
541
  args = []
428
542
  for arg in self.runtime_options():
543
+ arg = str(arg)
429
544
  if os.path.isabs(arg) and os.path.exists(arg):
430
545
  args.append(os.path.relpath(arg, self.__relpath))
431
546
  else:
@@ -444,14 +559,14 @@ class TaskSchema(NamedSchema):
444
559
  return cmdargs
445
560
 
446
561
  def generate_replay_script(self, filepath, workdir, include_path=True):
447
- '''
448
- Generate a replay script for the task.
562
+ """
563
+ Generates a shell script to replay the task's execution.
449
564
 
450
565
  Args:
451
- filepath (path): path to the file to write
452
- workdir (path): path to the run work directory
453
- include_path (bool): include path information in environmental variables
454
- '''
566
+ filepath (str): The path to write the replay script to.
567
+ workdir (str): The path to the run's working directory.
568
+ include_path (bool): If True, includes PATH information.
569
+ """
455
570
  replay_opts = {}
456
571
  replay_opts["work_dir"] = workdir
457
572
  replay_opts["exports"] = self.get_runtime_environmental_variables(include_path=include_path)
@@ -466,10 +581,8 @@ class TaskSchema(NamedSchema):
466
581
  if vswitch:
467
582
  replay_opts["version_flag"] = shlex.join(vswitch)
468
583
 
469
- # detect arguments
584
+ # Regex to detect arguments and file paths for formatting
470
585
  arg_test = re.compile(r'^[-+]')
471
-
472
- # detect file paths
473
586
  file_test = re.compile(r'^[/\.]')
474
587
 
475
588
  if replay_opts["executable"]:
@@ -493,7 +606,7 @@ class TaskSchema(NamedSchema):
493
606
  format_cmd = []
494
607
  replay_opts["cmds"] = format_cmd
495
608
 
496
- # create replay file
609
+ # Create replay file from template
497
610
  with open(filepath, 'w') as f:
498
611
  f.write(utils.get_file_template("replay/replay.sh.j2").render(replay_opts))
499
612
  f.write("\n")
@@ -501,25 +614,26 @@ class TaskSchema(NamedSchema):
501
614
  os.chmod(filepath, 0o755)
502
615
 
503
616
  def setup_work_directory(self, workdir, remove_exist=True):
504
- '''
505
- Create the runtime directories needed to execute a task.
617
+ """
618
+ Creates the runtime directories needed to execute a task.
506
619
 
507
620
  Args:
508
- workdir (path): path to the run work directory
509
- remove_exist (bool): if True, removes the existing directory
510
- '''
621
+ workdir (str): The path to the node's working directory.
622
+ remove_exist (bool): If True, removes the directory if it already exists.
623
+ """
511
624
 
512
- # Delete existing directory
625
+ # Delete existing directory if requested
513
626
  if os.path.isdir(workdir) and remove_exist:
514
627
  shutil.rmtree(workdir)
515
628
 
516
- # Create directories
629
+ # Create standard subdirectories
517
630
  os.makedirs(workdir, exist_ok=True)
518
631
  os.makedirs(os.path.join(workdir, 'inputs'), exist_ok=True)
519
632
  os.makedirs(os.path.join(workdir, 'outputs'), exist_ok=True)
520
633
  os.makedirs(os.path.join(workdir, 'reports'), exist_ok=True)
521
634
 
522
635
  def __write_yaml_manifest(self, fout, manifest):
636
+ """Private helper to write a manifest in YAML format."""
523
637
  class YamlIndentDumper(yaml.Dumper):
524
638
  def increase_indent(self, flow=False, indentless=False):
525
639
  return super().increase_indent(flow=flow, indentless=indentless)
@@ -527,24 +641,51 @@ class TaskSchema(NamedSchema):
527
641
  fout.write(yaml.dump(manifest.getdict(), Dumper=YamlIndentDumper,
528
642
  default_flow_style=False))
529
643
 
644
+ def get_tcl_variables(self, manifest: BaseSchema = None) -> Dict[str, str]:
645
+ """
646
+ Gets a dictionary of variables to define for the task in a Tcl manifest.
647
+
648
+ Args:
649
+ manifest (BaseSchema, optional): The manifest to retrieve values from.
650
+
651
+ Returns:
652
+ dict: A dictionary of variable names and their Tcl-formatted values.
653
+ """
654
+
655
+ if manifest is None:
656
+ manifest = self.schema()
657
+
658
+ vars = {
659
+ "sc_tool": NodeType.to_tcl(self.tool(), "str"),
660
+ "sc_task": NodeType.to_tcl(self.task(), "str"),
661
+ "sc_topmodule": NodeType.to_tcl(self.design_topmodule, "str")
662
+ }
663
+
664
+ refdir = manifest.get("tool", self.tool(), "task", self.task(), "refdir", field=None)
665
+ if refdir.get(step=self.__step, index=self.__index):
666
+ vars["sc_refdir"] = refdir.gettcl(step=self.__step, index=self.__index)
667
+
668
+ return vars
669
+
530
670
  def __write_tcl_manifest(self, fout, manifest):
671
+ """Private helper to write a manifest in Tcl format."""
531
672
  template = utils.get_file_template('tcl/manifest.tcl.j2')
532
673
  tcl_set_cmds = []
533
674
  for key in sorted(manifest.allkeys()):
534
- # print out all non default values
675
+ # Skip default values
535
676
  if 'default' in key:
536
677
  continue
537
678
 
538
679
  param = manifest.get(*key, field=None)
539
680
 
540
- # create a TCL dict
681
+ # Create a Tcl dict key string
541
682
  keystr = ' '.join([NodeType.to_tcl(keypart, 'str') for keypart in key])
542
683
 
543
684
  valstr = param.gettcl(step=self.__step, index=self.__index)
544
685
  if valstr is None:
545
686
  continue
546
687
 
547
- # Ensure empty values get something
688
+ # Ensure empty values are represented as empty Tcl lists
548
689
  if valstr == '':
549
690
  valstr = '{}'
550
691
 
@@ -553,7 +694,8 @@ class TaskSchema(NamedSchema):
553
694
  if template:
554
695
  fout.write(template.render(manifest_dict='\n'.join(tcl_set_cmds),
555
696
  scroot=os.path.abspath(
556
- os.path.join(os.path.dirname(__file__))),
697
+ os.path.join(os.path.dirname(__file__))),
698
+ toolvars=self.get_tcl_variables(manifest),
557
699
  record_access="get" in Journal.access(self).get_types(),
558
700
  record_access_id=Schema._RECORD_ACCESS_IDENTIFIER))
559
701
  else:
@@ -562,6 +704,7 @@ class TaskSchema(NamedSchema):
562
704
  fout.write('\n')
563
705
 
564
706
  def __write_csv_manifest(self, fout, manifest):
707
+ """Private helper to write a manifest in CSV format."""
565
708
  csvwriter = csv.writer(fout)
566
709
  csvwriter.writerow(['Keypath', 'Value'])
567
710
 
@@ -580,13 +723,13 @@ class TaskSchema(NamedSchema):
580
723
  csvwriter.writerow([keypath, value])
581
724
 
582
725
  def write_task_manifest(self, directory, backup=True):
583
- '''
584
- Write the manifest needed for the task
726
+ """
727
+ Writes the manifest needed for the task in the format specified by the tool.
585
728
 
586
729
  Args:
587
- directory (path): directory to write the manifest into.
588
- backup (bool): if True and an existing manifest is found a backup is kept.
589
- '''
730
+ directory (str): The directory to write the manifest into.
731
+ backup (bool): If True, backs up an existing manifest.
732
+ """
590
733
 
591
734
  suffix = self.schema("tool").get('format')
592
735
  if not suffix:
@@ -597,19 +740,17 @@ class TaskSchema(NamedSchema):
597
740
  if backup and os.path.exists(manifest_path):
598
741
  shutil.copyfile(manifest_path, f'{manifest_path}.bak')
599
742
 
600
- # Generate abs paths
743
+ # Generate a schema with absolute paths for the manifest
601
744
  schema = self.__abspath_schema()
602
745
 
603
746
  if re.search(r'\.json(\.gz)?$', manifest_path):
604
747
  schema.write_manifest(manifest_path)
605
748
  else:
606
749
  try:
607
- # format specific dumping
750
+ # Format-specific dumping
608
751
  if manifest_path.endswith('.gz'):
609
752
  fout = gzip.open(manifest_path, 'wt', encoding='UTF-8')
610
753
  elif re.search(r'\.csv$', manifest_path):
611
- # Files written using csv library should be opened with newline=''
612
- # https://docs.python.org/3/library/csv.html#id3
613
754
  fout = open(manifest_path, 'w', newline='')
614
755
  else:
615
756
  fout = open(manifest_path, 'w')
@@ -626,6 +767,10 @@ class TaskSchema(NamedSchema):
626
767
  fout.close()
627
768
 
628
769
  def __abspath_schema(self):
770
+ """
771
+ Private helper to create a copy of the schema with all file/dir paths
772
+ converted to absolute paths.
773
+ """
629
774
  root = self.schema()
630
775
  schema = root.copy()
631
776
 
@@ -635,7 +780,6 @@ class TaskSchema(NamedSchema):
635
780
  for keypath in root.allkeys():
636
781
  paramtype = schema.get(*keypath, field='type')
637
782
  if 'file' not in paramtype and 'dir' not in paramtype:
638
- # only do something if type is file or dir
639
783
  continue
640
784
 
641
785
  for value, step, index in root.get(*keypath, field=None).getvalues():
@@ -643,7 +787,6 @@ class TaskSchema(NamedSchema):
643
787
  continue
644
788
  abspaths = root.find_files(*keypath, missing_ok=True, step=step, index=index)
645
789
  if isinstance(abspaths, (set, list)) and None in abspaths:
646
- # Lists may not contain None
647
790
  schema.set(*keypath, [], step=step, index=index)
648
791
  else:
649
792
  if self.__relpath:
@@ -658,12 +801,12 @@ class TaskSchema(NamedSchema):
658
801
  return schema
659
802
 
660
803
  def __get_io_file(self, io_type):
661
- '''
662
- Get the runtime destination for the io type.
804
+ """
805
+ Private helper to get the runtime destination for stdout or stderr.
663
806
 
664
807
  Args:
665
- io_type (str): name of io type
666
- '''
808
+ io_type (str): The I/O type ('stdout' or 'stderr').
809
+ """
667
810
  suffix = self.get(io_type, "suffix")
668
811
  destination = self.get(io_type, "destination")
669
812
 
@@ -673,26 +816,24 @@ class TaskSchema(NamedSchema):
673
816
  io_file = f"{self.__step}.{suffix}"
674
817
  io_log = True
675
818
  elif destination == 'output':
676
- io_file = os.path.join('outputs', f"{self.__design_top}.{suffix}")
819
+ io_file = os.path.join('outputs', f"{self.__design_top_global}.{suffix}")
677
820
  elif destination == 'none':
678
821
  io_file = os.devnull
679
822
 
680
823
  return io_file, io_log
681
824
 
682
825
  def __terminate_exe(self, proc):
683
- '''
684
- Terminates a subprocess
826
+ """
827
+ Private helper to terminate a subprocess and its children.
685
828
 
686
829
  Args:
687
- proc (subprocess.Process): process to terminate
688
- '''
830
+ proc (subprocess.Process): The process to terminate.
831
+ """
689
832
 
690
833
  def terminate_process(pid, timeout=3):
691
- '''Terminates a process and all its (grand+)children.
692
-
834
+ """Terminates a process and all its (grand+)children.
693
835
  Based on https://psutil.readthedocs.io/en/latest/#psutil.wait_procs and
694
- https://psutil.readthedocs.io/en/latest/#kill-process-tree.
695
- '''
836
+ https://psutil.readthedocs.io/en/latest/#kill-process-tree."""
696
837
  parent = psutil.Process(pid)
697
838
  children = parent.children(recursive=True)
698
839
  children.append(parent)
@@ -700,13 +841,10 @@ class TaskSchema(NamedSchema):
700
841
  try:
701
842
  p.terminate()
702
843
  except psutil.NoSuchProcess:
703
- # Process may have terminated on its own in the meantime
704
844
  pass
705
845
 
706
846
  _, alive = psutil.wait_procs(children, timeout=timeout)
707
847
  for p in alive:
708
- # If processes are still alive after timeout seconds, send more
709
- # aggressive signal.
710
848
  p.kill()
711
849
 
712
850
  TERMINATE_TIMEOUT = 5
@@ -722,37 +860,35 @@ class TaskSchema(NamedSchema):
722
860
  terminate_process(proc.pid, timeout=TERMINATE_TIMEOUT)
723
861
 
724
862
  def run_task(self, workdir, quiet, loglevel, breakpoint, nice, timeout):
725
- '''
726
- Run the task.
863
+ """
864
+ Executes the task's main process.
727
865
 
728
- Raises:
729
- :class:`TaskError`: raised if the task failed to complete and
730
- should not be considered complete.
731
- :class:`TaskTimeout`: raised if the task reaches a timeout
866
+ This method handles the full lifecycle of running the tool, including
867
+ setting up the work directory, writing manifests, redirecting I/O,
868
+ monitoring for timeouts, and recording metrics.
732
869
 
733
870
  Args:
734
- workdir (path): path to the run work directory
735
- quiet (bool): if True, execution output is suppressed
736
- loglevel (str): logging level
737
- breakpoint (bool): if True, will attempt to execute with a breakpoint
738
- nice (int): POSIX nice level to use in execution
739
- timeout (int): timeout to use for execution
871
+ workdir (str): The path to the node's working directory.
872
+ quiet (bool): If True, suppresses execution output.
873
+ loglevel (str): The logging level.
874
+ breakpoint (bool): If True, attempts to run with a breakpoint.
875
+ nice (int): The POSIX nice level for the process.
876
+ timeout (int): The execution timeout in seconds.
740
877
 
741
878
  Returns:
742
- return code from the execution
743
- '''
879
+ int: The return code from the execution.
880
+ """
744
881
 
745
- # TODO: Currently no memory usage tracking in breakpoints, builtins, or unexpected errors.
746
882
  max_mem_bytes = 0
747
883
  cpu_start = time.time()
748
884
 
749
- # Ensure directories are setup
885
+ # Ensure directories are set up
750
886
  self.setup_work_directory(workdir, remove_exist=False)
751
887
 
752
- # Write task manifest
888
+ # Write task-specific manifest
753
889
  self.write_task_manifest(workdir)
754
890
 
755
- # Get file IO
891
+ # Get file I/O destinations
756
892
  stdout_file, is_stdout_log = self.__get_io_file("stdout")
757
893
  stderr_file, is_stderr_log = self.__get_io_file("stderr")
758
894
 
@@ -763,6 +899,7 @@ class TaskSchema(NamedSchema):
763
899
  stderr_print = self.__logger.error
764
900
 
765
901
  def read_stdio(stdout_reader, stderr_reader):
902
+ """Helper to read and print stdout/stderr streams."""
766
903
  if quiet:
767
904
  return
768
905
 
@@ -777,16 +914,16 @@ class TaskSchema(NamedSchema):
777
914
 
778
915
  retcode = 0
779
916
  if not exe:
780
- # No executable, so must call run()
917
+ # No executable defined, so call the Python `run()` method
781
918
  try:
782
919
  with open(stdout_file, 'w') as stdout_writer, \
783
- open(stderr_file, 'w') as stderr_writer:
920
+ open(stderr_file, 'w') as stderr_writer:
784
921
  if stderr_file == stdout_file:
785
922
  stderr_writer.close()
786
923
  stderr_writer = sys.stdout
787
924
 
788
925
  with contextlib.redirect_stderr(stderr_writer), \
789
- contextlib.redirect_stdout(stdout_writer):
926
+ contextlib.redirect_stdout(stdout_writer):
790
927
  retcode = self.run()
791
928
  except Exception as e:
792
929
  self.__logger.error(f'Failed in run() for {self.tool()}/{self.task()}: {e}')
@@ -794,22 +931,22 @@ class TaskSchema(NamedSchema):
794
931
  raise e
795
932
  finally:
796
933
  with sc_open(stdout_file) as stdout_reader, \
797
- sc_open(stderr_file) as stderr_reader:
934
+ sc_open(stderr_file) as stderr_reader:
798
935
  read_stdio(stdout_reader, stderr_reader)
799
936
 
800
937
  if resource:
801
938
  try:
802
- # Since memory collection is not possible, collect the current process
803
- # peak memory
939
+ # Collect peak memory usage of the current process
804
940
  max_mem_bytes = max(
805
941
  max_mem_bytes,
806
942
  1024 * resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
807
943
  except (OSError, ValueError, PermissionError):
808
944
  pass
809
945
  else:
946
+ # An executable is defined, run it as a subprocess
810
947
  cmdlist = self.get_runtime_arguments()
811
948
 
812
- # Make record of tool options
949
+ # Record tool options
813
950
  self.schema("record").record_tool(
814
951
  self.__step, self.__index,
815
952
  cmdlist, RecordTool.ARGS)
@@ -817,18 +954,10 @@ class TaskSchema(NamedSchema):
817
954
  self.__logger.info(shlex.join([os.path.basename(exe), *cmdlist]))
818
955
 
819
956
  if not pty and breakpoint:
820
- # pty not available
821
957
  breakpoint = False
822
958
 
823
959
  if breakpoint and sys.platform in ('darwin', 'linux'):
824
- # When we break on a step, the tool often drops into a shell.
825
- # However, our usual subprocess scheme seems to break terminal
826
- # echo for some tools. On POSIX-compatible systems, we can use
827
- # pty to connect the tool to our terminal instead. This code
828
- # doesn't handle quiet/timeout logic, since we don't want either
829
- # of these features for an interactive session. Logic for
830
- # forwarding to file based on
831
- # https://docs.python.org/3/library/pty.html#example.
960
+ # Use pty for interactive breakpoint sessions on POSIX systems
832
961
  with open(f"{self.__step}.log", 'wb') as log_writer:
833
962
  def read(fd):
834
963
  data = os.read(fd, 1024)
@@ -836,12 +965,11 @@ class TaskSchema(NamedSchema):
836
965
  return data
837
966
  retcode = pty.spawn([exe, *cmdlist], read)
838
967
  else:
968
+ # Standard subprocess execution
839
969
  with open(stdout_file, 'w') as stdout_writer, \
840
- open(stdout_file, 'r', errors='replace') as stdout_reader, \
841
- open(stderr_file, 'w') as stderr_writer, \
842
- open(stderr_file, 'r', errors='replace') as stderr_reader:
843
- # if STDOUT and STDERR are to be redirected to the same file,
844
- # use a single writer
970
+ open(stdout_file, 'r', errors='replace') as stdout_reader, \
971
+ open(stderr_file, 'w') as stderr_writer, \
972
+ open(stderr_file, 'r', errors='replace') as stderr_reader:
845
973
  if stderr_file == stdout_file:
846
974
  stderr_writer.close()
847
975
  stderr_reader.close()
@@ -863,13 +991,11 @@ class TaskSchema(NamedSchema):
863
991
  except Exception as e:
864
992
  raise TaskError(f"Unable to start {exe}: {str(e)}")
865
993
 
866
- # How long to wait for proc to quit on ctrl-c before force
867
- # terminating.
868
994
  POLL_INTERVAL = 0.1
869
995
  MEMORY_WARN_LIMIT = 90
870
996
  try:
871
997
  while proc.poll() is None:
872
- # Gather subprocess memory usage.
998
+ # Monitor subprocess memory usage
873
999
  try:
874
1000
  pproc = psutil.Process(proc.pid)
875
1001
  proc_mem_bytes = pproc.memory_full_info().uss
@@ -882,8 +1008,6 @@ class TaskSchema(NamedSchema):
882
1008
  self.__logger.warning(
883
1009
  'Current system memory usage is '
884
1010
  f'{memory_usage.percent:.1f}%')
885
-
886
- # increase limit warning
887
1011
  MEMORY_WARN_LIMIT = int(memory_usage.percent + 1)
888
1012
  except psutil.Error:
889
1013
  # Process may have already terminated or been killed.
@@ -894,9 +1018,9 @@ class TaskSchema(NamedSchema):
894
1018
  # be collected
895
1019
  pass
896
1020
 
897
- # Loop until process terminates
898
1021
  read_stdio(stdout_reader, stderr_reader)
899
1022
 
1023
+ # Check for timeout
900
1024
  duration = time.time() - cpu_start
901
1025
  if timeout is not None and duration > timeout:
902
1026
  raise TaskTimeout(timeout=duration)
@@ -911,17 +1035,16 @@ class TaskSchema(NamedSchema):
911
1035
  self.__terminate_exe(proc)
912
1036
  raise e from None
913
1037
 
914
- # Read the remaining io
1038
+ # Read any remaining I/O
915
1039
  read_stdio(stdout_reader, stderr_reader)
916
1040
 
917
1041
  retcode = proc.returncode
918
1042
 
919
- # Record record information
1043
+ # Record metrics
920
1044
  self.schema("record").record_tool(
921
1045
  self.__step, self.__index,
922
1046
  retcode, RecordTool.EXITCODE)
923
1047
 
924
- # Capture runtime metrics
925
1048
  self.schema("metric").record(
926
1049
  self.__step, self.__index,
927
1050
  'exetime', time.time() - cpu_start, unit='s')
@@ -932,45 +1055,40 @@ class TaskSchema(NamedSchema):
932
1055
  return retcode
933
1056
 
934
1057
  def __getstate__(self):
1058
+ """Custom state for pickling, removing runtime info."""
935
1059
  state = self.__dict__.copy()
936
-
937
- # Remove runtime information
938
1060
  for key in list(state.keys()):
939
1061
  if key.startswith("_TaskSchema__"):
940
1062
  del state[key]
941
-
942
1063
  return state
943
1064
 
944
1065
  def __setstate__(self, state):
1066
+ """Custom state for unpickling, re-initializing runtime info."""
945
1067
  self.__dict__ = state
946
-
947
- # Reinit runtime information
948
1068
  self.__set_runtime(None)
949
1069
 
950
1070
  def get_output_files(self):
1071
+ """Gets the set of output files defined for this task."""
951
1072
  return set(self.get("output"))
952
1073
 
953
1074
  def get_files_from_input_nodes(self):
954
1075
  """
955
- Returns a dictionary of files with the node they originated from
1076
+ Returns a dictionary of files from input nodes, mapped to the node
1077
+ they originated from.
956
1078
  """
957
-
958
1079
  nodes = self.schema("runtimeflow").get_nodes()
959
-
960
1080
  inputs = {}
961
- for in_step, in_index in self.schema("flow").get(*self.node(), 'input'):
1081
+ for in_step, in_index in self.schema("flow").get(self.step, self.index, 'input'):
962
1082
  if (in_step, in_index) not in nodes:
963
- # node has been pruned so will not provide anything
964
1083
  continue
965
1084
 
966
1085
  in_tool = self.schema("flow").get(in_step, in_index, "tool")
967
1086
  in_task = self.schema("flow").get(in_step, in_index, "task")
968
-
969
1087
  task_obj = self.schema().get("tool", in_tool, "task", in_task, field="schema")
970
1088
 
971
1089
  if self.schema("record").get('status', step=in_step, index=in_index) == \
972
1090
  NodeStatus.SKIPPED:
973
- with task_obj.runtime(self.__chip, step=in_step, index=in_index) as task:
1091
+ with task_obj.runtime(self.__node.switch_node(in_step, in_index)) as task:
974
1092
  for file, nodes in task.get_files_from_input_nodes().items():
975
1093
  inputs.setdefault(file, []).extend(nodes)
976
1094
  continue
@@ -982,110 +1100,840 @@ class TaskSchema(NamedSchema):
982
1100
 
983
1101
  def compute_input_file_node_name(self, filename, step, index):
984
1102
  """
985
- Generate a unique name for in input file based on the originating node.
1103
+ Generates a unique name for an input file based on its originating node.
986
1104
 
987
1105
  Args:
988
- filename (str): name of inputfile
989
- step (str): Step name
990
- index (str): Index name
1106
+ filename (str): The original name of the input file.
1107
+ step (str): The step name of the originating node.
1108
+ index (str): The index of the originating node.
991
1109
  """
992
-
993
1110
  _, file_type = os.path.splitext(filename)
994
-
995
1111
  if file_type:
996
1112
  base = filename
997
1113
  total_ext = []
998
1114
  while file_type:
999
1115
  base, file_type = os.path.splitext(base)
1000
1116
  total_ext.append(file_type)
1001
-
1002
1117
  total_ext.reverse()
1003
-
1004
1118
  return f'{base}.{step}{index}{"".join(total_ext)}'
1005
1119
  else:
1006
1120
  return f'{filename}.{step}{index}'
1007
1121
 
1008
- def add_parameter(self, name, type, help, defvalue=None):
1009
- '''
1010
- Adds a parameter to the task definition.
1122
+ def add_parameter(self, name, type, help, defvalue=None, **kwargs):
1123
+ """
1124
+ Adds a custom parameter ('var') to the task definition.
1011
1125
 
1012
1126
  Args:
1013
- name (str): name of parameter
1014
- type (str): schema type of the parameter
1015
- help (str): help string for this parameter
1016
- defvalue (any): default value for the parameter
1017
- '''
1127
+ name (str): The name of the parameter.
1128
+ type (str): The schema type of the parameter.
1129
+ help (str): The help string for the parameter.
1130
+ defvalue: The default value for the parameter.
1131
+ """
1018
1132
  help = trim(help)
1019
1133
  param = Parameter(
1020
1134
  type,
1135
+ **kwargs,
1021
1136
  defvalue=defvalue,
1022
1137
  scope=Scope.JOB,
1023
1138
  pernode=PerNode.OPTIONAL,
1024
1139
  shorthelp=help,
1025
1140
  help=help
1026
1141
  )
1027
-
1028
1142
  EditableSchema(self).insert("var", name, param)
1029
-
1030
1143
  return param
1031
1144
 
1032
1145
  ###############################################################
1033
- def get(self, *keypath, field='value'):
1034
- return super().get(*keypath, field=field,
1035
- step=self.__step, index=self.__index)
1146
+ # Task settings
1147
+ ###############################################################
1148
+ def add_required_tool_key(self, *key: str, step: str = None, index: str = None):
1149
+ '''
1150
+ Adds a required tool keypath to the task driver.
1036
1151
 
1037
- def set(self, *args, field='value', clobber=True):
1038
- return super().set(*args, field=field, clobber=clobber,
1039
- step=self.__step, index=self.__index)
1152
+ Args:
1153
+ key (list of str): required key path
1154
+ '''
1155
+ return self.add_required_key(self, *key, step=step, index=index)
1040
1156
 
1041
- def add(self, *args, field='value'):
1042
- return super().add(*args, field=field, step=self.__step, index=self.__index)
1157
+ def add_required_key(self, obj: Union[BaseSchema, str], *key: str,
1158
+ step: str = None, index: str = None):
1159
+ '''
1160
+ Adds a required keypath to the task driver.
1161
+
1162
+ Args:
1163
+ obj (:class:`BaseSchema` or str): if this is a string it will be considered
1164
+ part of the key, otherwise the keypath to the obj will be prepended to
1165
+ the key
1166
+ key (list of str): required key path
1167
+ '''
1168
+
1169
+ if isinstance(obj, BaseSchema):
1170
+ key = (*obj._keypath, *key)
1171
+ else:
1172
+ key = (obj, *key)
1173
+
1174
+ if any([not isinstance(k, str) for k in key]):
1175
+ raise ValueError("key can only contain strings")
1176
+
1177
+ return self.add("require", ",".join(key), step=step, index=index)
1178
+
1179
+ def set_threads(self, max_threads: int = None,
1180
+ step: str = None, index: str = None,
1181
+ clobber: bool = False):
1182
+ """
1183
+ Sets the requested thread count for the task
1184
+
1185
+ Args:
1186
+ max_threads (int): if provided the requested thread count
1187
+ will be set this value, otherwise the current machines
1188
+ core count will be used.
1189
+ clobber (bool): overwrite existing value
1190
+ """
1191
+ if max_threads is None or max_threads <= 0:
1192
+ max_threads = utils.get_cores(None)
1193
+
1194
+ return self.set("threads", max_threads, step=step, index=index, clobber=clobber)
1195
+
1196
+ def get_threads(self, step: str = None, index: str = None) -> int:
1197
+ """
1198
+ Returns the number of threads requested.
1199
+ """
1200
+ return self.get("threads", step=step, index=index)
1201
+
1202
+ def add_commandline_option(self, option: Union[List[str], str],
1203
+ step: str = None, index: str = None,
1204
+ clobber: bool = False):
1205
+ """
1206
+ Add to the command line options for the task
1207
+
1208
+ Args:
1209
+ option (list of str or str): options to add to the commandline
1210
+ clobber (bool): overwrite existing value
1211
+ """
1212
+
1213
+ if clobber:
1214
+ return self.set("option", option, step=step, index=index)
1215
+ else:
1216
+ return self.add("option", option, step=step, index=index)
1217
+
1218
+ def get_commandline_options(self, step: str = None, index: str = None) -> List[str]:
1219
+ """
1220
+ Returns the command line options specified
1221
+ """
1222
+ return self.get("option", step=step, index=index)
1223
+
1224
+ def add_input_file(self, file: str = None, ext: str = None,
1225
+ step: str = None, index: str = None,
1226
+ clobber: bool = False):
1227
+ """
1228
+ Add a required input file from the previous step in the flow.
1229
+ file and ext are mutually exclusive.
1230
+
1231
+ Args:
1232
+ file (str): full filename
1233
+ ext (str): file extension, if specified, the filename will be <top>.<ext>
1234
+ clobber (bool): overwrite existing value
1235
+ """
1236
+ if file and ext:
1237
+ raise ValueError("only file or ext can be specified")
1238
+
1239
+ if ext:
1240
+ file = f"{self.design_topmodule}.{ext}"
1241
+
1242
+ if clobber:
1243
+ return self.set("input", file, step=step, index=index)
1244
+ else:
1245
+ return self.add("input", file, step=step, index=index)
1246
+
1247
+ def add_output_file(self, file: str = None, ext: str = None,
1248
+ step: str = None, index: str = None,
1249
+ clobber: bool = False):
1250
+ """
1251
+ Add an output file that this task will produce
1252
+ file and ext are mutually exclusive.
1253
+
1254
+ Args:
1255
+ file (str): full filename
1256
+ ext (str): file extension, if specified, the filename will be <top>.<ext>
1257
+ clobber (bool): overwrite existing value
1258
+ """
1259
+ if file and ext:
1260
+ raise ValueError("only file or ext can be specified")
1261
+
1262
+ if ext:
1263
+ file = f"{self.design_topmodule}.{ext}"
1264
+
1265
+ if clobber:
1266
+ return self.set("output", file, step=step, index=index)
1267
+ else:
1268
+ return self.add("output", file, step=step, index=index)
1269
+
1270
+ def set_environmentalvariable(self, name: str, value: str,
1271
+ step: str = None, index: str = None,
1272
+ clobber: bool = False):
1273
+ return self.set("env", name, value, step=step, index=index, clobber=clobber)
1274
+
1275
+ def add_prescript(self, script: str, dataroot: str = None,
1276
+ step: str = None, index: str = None,
1277
+ clobber: bool = False):
1278
+ if not dataroot:
1279
+ dataroot = self._get_active("package")
1280
+ with self._active(package=dataroot):
1281
+ if clobber:
1282
+ return self.set("prescript", script, step=step, index=index)
1283
+ else:
1284
+ return self.add("prescript", script, step=step, index=index)
1285
+
1286
+ def add_postscript(self, script: str, dataroot: str = None,
1287
+ step: str = None, index: str = None,
1288
+ clobber: bool = False):
1289
+ if not dataroot:
1290
+ dataroot = self._get_active("package")
1291
+ with self._active(package=dataroot):
1292
+ if clobber:
1293
+ return self.set("postscript", script, step=step, index=index)
1294
+ else:
1295
+ return self.add("postscript", script, step=step, index=index)
1296
+
1297
+ def has_prescript(self, step: str = None, index: str = None) -> bool:
1298
+ if self.get("prescript", step=step, index=index):
1299
+ return True
1300
+ return False
1301
+
1302
+ def has_postscript(self, step: str = None, index: str = None) -> bool:
1303
+ if self.get("postscript", step=step, index=index):
1304
+ return True
1305
+ return False
1306
+
1307
+ def set_refdir(self, dir: str, dataroot: str = None,
1308
+ step: str = None, index: str = None,
1309
+ clobber: bool = False):
1310
+ if not dataroot:
1311
+ dataroot = self._get_active("package")
1312
+ with self._active(package=dataroot):
1313
+ return self.set("refdir", dir, step=step, index=index, clobber=clobber)
1314
+
1315
+ def set_script(self, script: str, dataroot: str = None,
1316
+ step: str = None, index: str = None,
1317
+ clobber: bool = False):
1318
+ if not dataroot:
1319
+ dataroot = self._get_active("package")
1320
+ with self._active(package=dataroot):
1321
+ return self.set("script", script, step=step, index=index, clobber=clobber)
1322
+
1323
+ def add_regex(self, type: str, regex: str,
1324
+ step: str = None, index: str = None,
1325
+ clobber: bool = False):
1326
+ if clobber:
1327
+ return self.set("regex", type, regex, step=step, index=index)
1328
+ else:
1329
+ return self.add("regex", type, regex, step=step, index=index)
1330
+
1331
+ def set_logdestination(self, type: str, dest: str, suffix: str = None,
1332
+ step: str = None, index: str = None,
1333
+ clobber: bool = False):
1334
+ rets = []
1335
+ rets.append(self.set(type, "destination", dest, step=step, index=index, clobber=clobber))
1336
+ if suffix:
1337
+ rets.append(self.set(type, "suffix", suffix, step=step, index=index, clobber=clobber))
1338
+ return rets
1339
+
1340
+ def add_warningoff(self, type: str, step: str = None, index: str = None, clobber: bool = False):
1341
+ if clobber:
1342
+ return self.set("warningoff", type, step=step, index=index)
1343
+ else:
1344
+ return self.add("warningoff", type, step=step, index=index)
1345
+
1346
+ ###############################################################
1347
+ # Tool settings
1348
+ ###############################################################
1349
+ def set_exe(self, exe: str = None, vswitch: List[str] = None, format: str = None,
1350
+ step: str = None, index: str = None,
1351
+ clobber: bool = False):
1352
+ rets = []
1353
+ if exe:
1354
+ rets.append(self.schema("tool").set("exe", exe, clobber=clobber))
1355
+ if vswitch:
1356
+ switches = self.add_vswitch(vswitch, clobber=clobber)
1357
+ if not isinstance(switches, list):
1358
+ switches = list(switches)
1359
+ rets.extend(switches)
1360
+ if format:
1361
+ rets.append(self.schema("tool").set("format", format, clobber=clobber))
1362
+ return rets
1363
+
1364
+ def set_path(self, path: str, dataroot: str = None,
1365
+ step: str = None, index: str = None,
1366
+ clobber: bool = False):
1367
+ if not dataroot:
1368
+ dataroot = self.schema("tool")._get_active("package")
1369
+ with self.schema("tool")._active(package=dataroot):
1370
+ return self.schema("tool").set("path", path, step=step, index=index, clobber=clobber)
1371
+
1372
+ def add_version(self, version: str, step: str = None, index: str = None, clobber: bool = False):
1373
+ if clobber:
1374
+ return self.schema("tool").set("version", version, step=step, index=index)
1375
+ else:
1376
+ return self.schema("tool").add("version", version, step=step, index=index)
1377
+
1378
+ def add_vswitch(self, switch: str, clobber: bool = False):
1379
+ if clobber:
1380
+ return self.schema("tool").set("vswitch", switch)
1381
+ else:
1382
+ return self.schema("tool").add("vswitch", switch)
1383
+
1384
+ def add_licenseserver(self, name: str, server: str,
1385
+ step: str = None, index: str = None,
1386
+ clobber: bool = False):
1387
+ if clobber:
1388
+ return self.schema("tool").set("licenseserver", name, server, step=step, index=index)
1389
+ else:
1390
+ return self.schema("tool").add("licenseserver", name, server, step=step, index=index)
1391
+
1392
+ def add_sbom(self, version: str, sbom: str, dataroot: str = None, clobber: bool = False):
1393
+ if not dataroot:
1394
+ dataroot = self.schema("tool")._get_active("package")
1395
+ with self.schema("tool")._active(package=dataroot):
1396
+ if clobber:
1397
+ return self.schema("tool").set("sbom", version, sbom)
1398
+ else:
1399
+ return self.schema("tool").add("sbom", version, sbom)
1400
+
1401
+ def record_metric(self, metric, value, source_file=None, source_unit=None, quiet=False):
1402
+ '''
1403
+ Records a metric and associates the source file with it.
1404
+
1405
+ Args:
1406
+ metric (str): metric to record
1407
+ value (float/int): value of the metric that is being recorded
1408
+ source (str): file the value came from
1409
+ source_unit (str): unit of the value, if not provided it is assumed to have no units
1410
+ quiet (bool): dont generate warning on missing metric
1411
+
1412
+ Examples:
1413
+ >>> self.record_metric('cellarea', 500.0, 'reports/metrics.json', \\
1414
+ source_units='um^2')
1415
+ Records the metric cell area and notes the source as 'reports/metrics.json'
1416
+ '''
1417
+
1418
+ if metric not in self.schema("metric").getkeys():
1419
+ if not quiet:
1420
+ self.logger.warning(f"{metric} is not a valid metric")
1421
+ return
1422
+
1423
+ self.schema("metric").record(self.__step, self.__index, metric, value, unit=source_unit)
1424
+ if source_file:
1425
+ self.add("report", metric, source_file)
1426
+
1427
+ def get_fileset_file_keys(self, filetype: str) -> List[Tuple[NamedSchema, Tuple[str]]]:
1428
+ """
1429
+ Collect a set of keys for a particular filetype.
1430
+
1431
+ Args:
1432
+ filetype (str): Name of the filetype
1433
+
1434
+ Returns:
1435
+ list of (object, keypath)
1436
+ """
1437
+ if not isinstance(filetype, str):
1438
+ raise TypeError("filetype must be a string")
1439
+
1440
+ keys = []
1441
+ for obj, fileset in self.schema().get_filesets():
1442
+ key = ("fileset", fileset, "file", filetype)
1443
+ if obj.valid(*key, check_complete=True):
1444
+ keys.append((obj, key))
1445
+ return keys
1043
1446
 
1447
+ ###############################################################
1448
+ # Schema
1449
+ ###############################################################
1450
+ def get(self, *keypath, field='value', step: str = None, index: str = None):
1451
+ if not step:
1452
+ step = self.__step
1453
+ if not index:
1454
+ index = self.__index
1455
+ return super().get(*keypath, field=field, step=step, index=index)
1456
+
1457
+ def set(self, *args, field='value', step: str = None, index: str = None, clobber=True):
1458
+ if not step:
1459
+ step = self.__step
1460
+ if not index:
1461
+ index = self.__index
1462
+ return super().set(*args, field=field, clobber=clobber, step=step, index=index)
1463
+
1464
+ def add(self, *args, field='value', step: str = None, index: str = None):
1465
+ if not step:
1466
+ step = self.__step
1467
+ if not index:
1468
+ index = self.__index
1469
+ return super().add(*args, field=field, step=step, index=index)
1470
+
1471
+ def unset(self, *args, step: str = None, index: str = None):
1472
+ if not step:
1473
+ step = self.__step
1474
+ if not index:
1475
+ index = self.__index
1476
+ return super().unset(*args, step=step, index=index)
1477
+
1478
+ def find_files(self, *keypath, missing_ok=False, step=None, index=None):
1479
+ if not step:
1480
+ step = self.__step
1481
+ if not index:
1482
+ index = self.__index
1483
+ return super().find_files(*keypath, missing_ok=missing_ok,
1484
+ step=step, index=index,
1485
+ collection_dir=self.__collection_path,
1486
+ cwd=self.__cwd)
1487
+
1488
+ def _find_files_search_paths(self, keypath, step, index):
1489
+ paths = super()._find_files_search_paths(keypath, step, index)
1490
+ if keypath == "script":
1491
+ paths.extend(self.find_files("refdir", step=step, index=index))
1492
+ elif keypath == "input":
1493
+ paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
1494
+ "inputs"))
1495
+ elif keypath == "report":
1496
+ paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
1497
+ "report"))
1498
+ elif keypath == "output":
1499
+ paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
1500
+ "outputs"))
1501
+ return paths
1502
+
1503
+ ###############################################################
1504
+ # Task methods
1044
1505
  ###############################################################
1045
1506
  def parse_version(self, stdout):
1507
+ """
1508
+ Parses the tool's version from its stdout. Must be implemented by subclasses.
1509
+ """
1046
1510
  raise NotImplementedError("must be implemented by the implementation class")
1047
1511
 
1048
1512
  def normalize_version(self, version):
1513
+ """
1514
+ Normalizes a version string to a standard format. Can be overridden.
1515
+ """
1049
1516
  return version
1050
1517
 
1051
1518
  def setup(self):
1519
+ """
1520
+ A hook for setting up the task before execution. Can be overridden.
1521
+ """
1052
1522
  pass
1053
1523
 
1054
1524
  def select_input_nodes(self):
1525
+ """
1526
+ Determines which preceding nodes are inputs to this task.
1527
+ """
1055
1528
  return self.schema("runtimeflow").get_node_inputs(
1056
1529
  self.__step, self.__index, record=self.schema("record"))
1057
1530
 
1058
1531
  def pre_process(self):
1532
+ """
1533
+ A hook for pre-processing before the main tool execution. Can be overridden.
1534
+ """
1059
1535
  pass
1060
1536
 
1061
1537
  def runtime_options(self):
1538
+ """
1539
+ Constructs the default runtime options for the task. Can be extended.
1540
+ """
1062
1541
  cmdargs = []
1063
1542
  cmdargs.extend(self.get("option"))
1064
-
1065
- # Add scripts files / TODO:
1066
- scripts = self.__chip.find_files('tool', self.tool(), 'task', self.task(), 'script',
1067
- step=self.__step, index=self.__index)
1068
-
1069
- cmdargs.extend(scripts)
1070
-
1543
+ script = self.find_files('script', missing_ok=True)
1544
+ if script:
1545
+ cmdargs.extend(script)
1071
1546
  return cmdargs
1072
1547
 
1073
1548
  def run(self):
1549
+ """
1550
+ The main execution logic for Python-based tasks. Must be implemented.
1551
+ """
1074
1552
  raise NotImplementedError("must be implemented by the implementation class")
1075
1553
 
1076
1554
  def post_process(self):
1555
+ """
1556
+ A hook for post-processing after the main tool execution. Can be overridden.
1557
+ """
1077
1558
  pass
1078
1559
 
1079
1560
 
1561
+ class ShowTaskSchema(TaskSchema):
1562
+ """
1563
+ A specialized TaskSchema for tasks that display files (e.g., in a GUI viewer).
1564
+
1565
+ This class provides a framework for dynamically finding and configuring
1566
+ viewer applications based on file types. It includes parameters for
1567
+ specifying the file to show and controlling the viewer's behavior.
1568
+ Subclasses should implement `get_supported_show_extentions` to declare
1569
+ which file types they can handle.
1570
+ """
1571
+ __TASKS_LOCK = threading.Lock()
1572
+ __TASKS = {}
1573
+
1574
+ def __init__(self):
1575
+ """Initializes a ShowTaskSchema, adding specific parameters for show tasks."""
1576
+ super().__init__()
1577
+ self.add_parameter("showfilepath", "file", "path to show")
1578
+ self.add_parameter("showfiletype", "str", "filetype to show")
1579
+ self.add_parameter("shownode", "(str,str,str)",
1580
+ "source node information, not always available")
1581
+ self.add_parameter("showexit", "bool", "exit after opening", defvalue=False)
1582
+
1583
+ @classmethod
1584
+ def __check_task(cls, task):
1585
+ """
1586
+ Private helper to validate if a task is a valid ShowTask or ScreenshotTask.
1587
+ """
1588
+ if cls is not ShowTaskSchema and cls is not ScreenshotTaskSchema:
1589
+ raise TypeError("class must be ShowTaskSchema or ScreenshotTaskSchema")
1590
+
1591
+ if task is None:
1592
+ return
1593
+
1594
+ if cls is ShowTaskSchema:
1595
+ check, task_filter = ShowTaskSchema, ScreenshotTaskSchema
1596
+ else:
1597
+ check, task_filter = ScreenshotTaskSchema, None
1598
+
1599
+ if not issubclass(task, check):
1600
+ return False
1601
+ if task_filter and issubclass(task, task_filter):
1602
+ return False
1603
+
1604
+ return True
1605
+
1606
+ @classmethod
1607
+ def register_task(cls, task):
1608
+ """
1609
+ Registers a new show task class for dynamic discovery.
1610
+
1611
+ Args:
1612
+ task: The show task class to register.
1613
+
1614
+ Raises:
1615
+ TypeError: If the task is not a valid subclass.
1616
+ """
1617
+ if not cls.__check_task(task):
1618
+ raise TypeError(f"task must be a subclass of {cls.__name__}")
1619
+
1620
+ with cls.__TASKS_LOCK:
1621
+ cls.__TASKS.setdefault(cls, set()).add(task)
1622
+
1623
+ @classmethod
1624
+ def __populate_tasks(cls):
1625
+ """
1626
+ Private helper to discover and populate all available show/screenshot tasks.
1627
+
1628
+ This method recursively finds all subclasses and also loads tasks from
1629
+ any installed plugins.
1630
+ """
1631
+ cls.__check_task(None)
1632
+
1633
+ def recurse(searchcls):
1634
+ subclss = set()
1635
+ if not cls.__check_task(searchcls):
1636
+ return subclss
1637
+
1638
+ subclss.add(searchcls)
1639
+ for subcls in searchcls.__subclasses__():
1640
+ subclss.update(recurse(subcls))
1641
+
1642
+ return subclss
1643
+
1644
+ classes = recurse(cls)
1645
+ # Support non-SC defined tasks from plugins
1646
+ for plugin in utils.get_plugins('showtask'): # TODO rename
1647
+ plugin()
1648
+
1649
+ if not classes:
1650
+ return
1651
+
1652
+ with ShowTaskSchema.__TASKS_LOCK:
1653
+ ShowTaskSchema.__TASKS.setdefault(cls, set()).update(classes)
1654
+
1655
+ @classmethod
1656
+ def get_task(cls, ext):
1657
+ """
1658
+ Retrieves a suitable show task instance for a given file extension.
1659
+
1660
+ Args:
1661
+ ext (str): The file extension to find a viewer for.
1662
+
1663
+ Returns:
1664
+ An instance of a compatible ShowTaskSchema subclass, or None if
1665
+ no suitable task is found.
1666
+ """
1667
+ cls.__check_task(None)
1668
+
1669
+ if cls not in ShowTaskSchema.__TASKS:
1670
+ cls.__populate_tasks()
1671
+
1672
+ with ShowTaskSchema.__TASKS_LOCK:
1673
+ if cls not in ShowTaskSchema.__TASKS:
1674
+ return None
1675
+ tasks = ShowTaskSchema.__TASKS[cls].copy()
1676
+
1677
+ # TODO: add user preference lookup (ext -> task)
1678
+
1679
+ if ext is None:
1680
+ return tasks
1681
+
1682
+ for task in tasks:
1683
+ try:
1684
+ if ext in task().get_supported_show_extentions():
1685
+ return task()
1686
+ except NotImplementedError:
1687
+ pass
1688
+
1689
+ return None
1690
+
1691
+ def task(self):
1692
+ """Returns the name of this task."""
1693
+ return "show"
1694
+
1695
+ def setup(self):
1696
+ """Sets up the parameters and requirements for the show task."""
1697
+ super().setup()
1698
+
1699
+ self._set_filetype()
1700
+
1701
+ self.add_required_tool_key("var", "showexit")
1702
+
1703
+ if self.get("var", "shownode"):
1704
+ self.add_required_tool_key("var", "shownode")
1705
+
1706
+ if self.get("var", "showfilepath"):
1707
+ self.add_required_tool_key("var", "showfilepath")
1708
+ elif self.get("var", "showfiletype"):
1709
+ self.add_required_tool_key("var", "showfiletype")
1710
+ else:
1711
+ raise ValueError("no file information provided to show")
1712
+
1713
+ def get_supported_show_extentions(self) -> List[str]:
1714
+ """
1715
+ Returns a list of file extensions supported by this show task.
1716
+ This method must be implemented by subclasses.
1717
+ """
1718
+ raise NotImplementedError(
1719
+ "get_supported_show_extentions must be implemented by the child class")
1720
+
1721
+ def _set_filetype(self):
1722
+ """
1723
+ Private helper to determine and set the 'showfiletype' parameter based
1724
+ on the provided 'showfilepath' or available input files.
1725
+ """
1726
+ def set_file(file, ext):
1727
+ if file.lower().endswith(".gz"):
1728
+ self.set("var", "showfiletype", f"{ext}.gz")
1729
+ else:
1730
+ self.set("var", "showfiletype", ext)
1731
+
1732
+ if not self.get("var", "showfilepath"):
1733
+ exts = self.preferred_show_extensions()
1734
+
1735
+ if not self.get("var", "showfiletype"):
1736
+ input_files = {utils.get_file_ext(f): f.lower()
1737
+ for f in self.get_files_from_input_nodes().keys()}
1738
+ for ext in exts:
1739
+ if ext in input_files:
1740
+ set_file(input_files[ext], ext)
1741
+ break
1742
+ self.set("var", "showfiletype", exts[-1], clobber=False)
1743
+ else:
1744
+ file = self.get("var", "showfilepath")
1745
+ ext = utils.get_file_ext(file)
1746
+ set_file(file, ext)
1747
+
1748
+ def set_showfilepath(self, path: str, step: str = None, index: str = None):
1749
+ """Sets the path to the file to be displayed."""
1750
+ return self.set("var", "showfilepath", path, step=step, index=index)
1751
+
1752
+ def set_showfiletype(self, file_type: str, step: str = None, index: str = None):
1753
+ """Sets the type of the file to be displayed."""
1754
+ return self.set("var", "showfiletype", file_type, step=step, index=index)
1755
+
1756
+ def set_showexit(self, value: bool, step: str = None, index: str = None):
1757
+ """Sets whether the viewer application should exit after opening the file."""
1758
+ return self.set("var", "showexit", value, step=step, index=index)
1759
+
1760
+ def set_shownode(self, jobname: str = None, nodestep: str = None, nodeindex: str = None,
1761
+ step: str = None, index: str = None):
1762
+ """Sets the source node information for the file being displayed."""
1763
+ return self.set("var", "shownode", (jobname, nodestep, nodeindex), step=step, index=index)
1764
+
1765
+ def get_tcl_variables(self, manifest=None):
1766
+ """
1767
+ Gets Tcl variables for the task, ensuring 'sc_do_screenshot' is false
1768
+ for regular show tasks.
1769
+ """
1770
+ vars = super().get_tcl_variables(manifest)
1771
+ vars["sc_do_screenshot"] = "false"
1772
+ return vars
1773
+
1774
+
1775
+ class ScreenshotTaskSchema(ShowTaskSchema):
1776
+ """
1777
+ A specialized TaskSchema for tasks that generate screenshots of files.
1778
+
1779
+ This class inherits from `ShowTaskSchema` and is specifically for tasks
1780
+ that need to open a file, generate an image, and then exit. It automatically
1781
+ sets the 'showexit' parameter to True.
1782
+ """
1783
+
1784
+ def task(self):
1785
+ """Returns the name of this task."""
1786
+ return "screenshot"
1787
+
1788
+ def setup(self):
1789
+ """
1790
+ Sets up the screenshot task, ensuring that the viewer will exit
1791
+ after the screenshot is taken.
1792
+ """
1793
+ super().setup()
1794
+ # Ensure the viewer exits after taking the screenshot
1795
+ self.set_showexit(True)
1796
+
1797
+ def get_tcl_variables(self, manifest=None):
1798
+ """
1799
+ Gets Tcl variables for the task, setting 'sc_do_screenshot' to true.
1800
+ """
1801
+ vars = super().get_tcl_variables(manifest)
1802
+ vars["sc_do_screenshot"] = "true"
1803
+ return vars
1804
+
1805
+
1806
+ class ASICTaskSchema(TaskSchema):
1807
+ """
1808
+ A TaskSchema with helper methods for tasks in a standard ASIC flow,
1809
+ providing easy access to PDK and standard cell library information.
1810
+ """
1811
+ @property
1812
+ def mainlib(self):
1813
+ """The main standard cell library schema object."""
1814
+ mainlib = self.schema().get("asic", "mainlib")
1815
+ if not mainlib:
1816
+ raise ValueError("mainlib has not been defined in [asic,mainlib]")
1817
+ if mainlib not in self.schema().getkeys("library"):
1818
+ raise LookupError(f"{mainlib} has not been loaded")
1819
+ return self.schema().get("library", mainlib, field="schema")
1820
+
1821
+ @property
1822
+ def pdk(self):
1823
+ """The Process Design Kit (PDK) schema object."""
1824
+ pdk = self.mainlib.get("asic", "pdk")
1825
+ if not pdk:
1826
+ raise ValueError("pdk has not been defined in "
1827
+ f"[{','.join([*self.mainlib._keypath, 'asic', 'pdk'])}]")
1828
+ if pdk not in self.schema().getkeys("library"):
1829
+ raise LookupError(f"{pdk} has not been loaded")
1830
+ return self.schema().get("library", pdk, field="schema")
1831
+
1832
+ def set_asic_var(self,
1833
+ key: str,
1834
+ defvalue=None,
1835
+ check_pdk: bool = True,
1836
+ require_pdk: bool = False,
1837
+ pdk_key: str = None,
1838
+ check_mainlib: bool = True,
1839
+ require_mainlib: bool = False,
1840
+ mainlib_key: str = None,
1841
+ require: bool = False):
1842
+ '''
1843
+ Set an ASIC parameter based on a prioritized lookup order.
1844
+
1845
+ This method attempts to set a parameter identified by `key` by checking
1846
+ values in a specific order:
1847
+ 1. The main library
1848
+ 2. The PDK
1849
+ 3. A provided default value (`defvalue`)
1850
+
1851
+ The first non-empty or non-None value found in this hierarchy will be
1852
+ used to set the parameter. If no value is found and `defvalue` is not
1853
+ provided, the parameter will not be set unless explicitly required.
1854
+
1855
+ Args:
1856
+ key: The string key for the parameter to be set. This key is used
1857
+ to identify the parameter within the current object (`self`)
1858
+ and, by default, within the main library and PDK.
1859
+ defvalue: An optional default value to use if the parameter is not
1860
+ found in the main library or PDK. If `None` and the parameter
1861
+ is not found, it will not be set unless `require` is True.
1862
+ check_pdk: If `True`, the method will attempt to retrieve the
1863
+ parameter from the PDK. Defaults to `True`.
1864
+ require_pdk: If `True`, the parameter *must* be defined in the PDK.
1865
+ An error will be raised if it's not found and `check_pdk` is `True`.
1866
+ Defaults to `False`.
1867
+ pdk_key: The specific key to use when looking up the parameter in the
1868
+ PDK. If `None`, `key` will be used.
1869
+ check_mainlib: If `True`, the method will attempt to retrieve the
1870
+ parameter from the main library. Defaults to `True`.
1871
+ require_mainlib: If `True`, the parameter *must* be defined in the
1872
+ main library. An error will be raised if it's not found and
1873
+ `check_mainlib` is `True`. Defaults to `False`.
1874
+ mainlib_key: The specific key to use when looking up the parameter in
1875
+ the main library. If `None`, `key` will be used.
1876
+ require: If `True`, the parameter *must* be set by this method (either
1877
+ from a source or `defvalue`). An error will be raised if it cannot
1878
+ be set. Defaults to `False`.
1879
+ '''
1880
+ check_keys = []
1881
+ if check_pdk:
1882
+ if not pdk_key:
1883
+ pdk_key = key
1884
+ if self.pdk.valid("tool", self.tool(), pdk_key):
1885
+ check_keys.append((self.pdk, ("tool", self.tool(), pdk_key)))
1886
+ if check_mainlib:
1887
+ if not mainlib_key:
1888
+ mainlib_key = key
1889
+ if self.mainlib.valid("tool", self.tool(), mainlib_key):
1890
+ check_keys.append((self.mainlib, ("tool", self.tool(), mainlib_key)))
1891
+ check_keys.append((self, ("var", key)))
1892
+
1893
+ if require_pdk:
1894
+ self.add_required_key(self.pdk, "tool", self.tool(), pdk_key)
1895
+ if require_mainlib:
1896
+ self.add_required_key(self.mainlib, "tool", self.tool(), mainlib_key)
1897
+ if require or defvalue is not None:
1898
+ self.add_required_key(self, "var", key)
1899
+
1900
+ if self.get("var", key, field=None).is_set(self.step, self.index):
1901
+ return
1902
+
1903
+ for obj, keypath in reversed(check_keys):
1904
+ if not obj.valid(*keypath):
1905
+ continue
1906
+
1907
+ value = obj.get(*keypath)
1908
+ if isinstance(value, (list, set, tuple)):
1909
+ if not value:
1910
+ continue
1911
+ else:
1912
+ if value is None:
1913
+ continue
1914
+ self.add_required_key(obj, *keypath)
1915
+ self.add_required_key(self, "var", key)
1916
+ return self.set("var", key, value)
1917
+ if defvalue is not None:
1918
+ return self.set("var", key, defvalue)
1919
+
1920
+
1080
1921
  class ToolSchema(NamedSchema):
1922
+ """
1923
+ A schema class that defines the parameters for a single tool, which can
1924
+ contain multiple tasks.
1925
+ """
1081
1926
  def __init__(self, name=None):
1082
1927
  super().__init__()
1083
1928
  self.set_name(name)
1084
-
1085
1929
  schema_tool(self)
1086
-
1087
1930
  schema = EditableSchema(self)
1088
- schema.insert("task", "default", TaskSchema(None))
1931
+ schema.insert("task", "default", TaskSchema())
1932
+
1933
+ @classmethod
1934
+ def _getdict_type(cls) -> str:
1935
+ """Returns the metadata for getdict."""
1936
+ return ToolSchema.__name__
1089
1937
 
1090
1938
 
1091
1939
  ###########################################################################
@@ -1100,6 +1948,14 @@ class ToolSchemaTmp(NamedSchema):
1100
1948
  schema = EditableSchema(self)
1101
1949
  schema.insert("task", "default", TaskSchemaTmp())
1102
1950
 
1951
+ @classmethod
1952
+ def _getdict_type(cls) -> str:
1953
+ """
1954
+ Returns the meta data for getdict
1955
+ """
1956
+
1957
+ return ToolSchemaTmp.__name__
1958
+
1103
1959
 
1104
1960
  class TaskSchemaTmp(TaskSchema):
1105
1961
  def __init__(self):
@@ -1113,28 +1969,26 @@ class TaskSchemaTmp(TaskSchema):
1113
1969
  return None
1114
1970
 
1115
1971
  def __tool_task_modules(self):
1116
- step, index = self.node()
1117
1972
  flow = self._TaskSchema__chip.get('option', 'flow')
1118
1973
  return \
1119
- self._TaskSchema__chip._get_tool_module(step, index, flow=flow), \
1120
- self._TaskSchema__chip._get_task_module(step, index, flow=flow)
1974
+ self._TaskSchema__chip._get_tool_module(self.step, self.index, flow=flow), \
1975
+ self._TaskSchema__chip._get_task_module(self.step, self.index, flow=flow)
1121
1976
 
1122
1977
  @contextlib.contextmanager
1123
1978
  def __in_step_index(self):
1124
1979
  prev_step, prev_index = self._TaskSchema__chip.get('arg', 'step'), \
1125
1980
  self._TaskSchema__chip.get('arg', 'index')
1126
- step, index = self.node()
1127
- self._TaskSchema__chip.set('arg', 'step', step)
1128
- self._TaskSchema__chip.set('arg', 'index', index)
1981
+ self._TaskSchema__chip.set('arg', 'step', self.step)
1982
+ self._TaskSchema__chip.set('arg', 'index', self.index)
1129
1983
  yield
1130
1984
  self._TaskSchema__chip.set('arg', 'step', prev_step)
1131
1985
  self._TaskSchema__chip.set('arg', 'index', prev_index)
1132
1986
 
1133
1987
  def tool(self):
1134
- return self.schema("flow").get(*self.node(), 'tool')
1988
+ return self.schema("flow").get(self.step, self.index, 'tool')
1135
1989
 
1136
1990
  def task(self):
1137
- return self.schema("flow").get(*self.node(), 'task')
1991
+ return self.schema("flow").get(self.step, self.index, 'task')
1138
1992
 
1139
1993
  def get_exe(self):
1140
1994
  if self.tool() == "execute" and self.task() == "exec_input":
@@ -1150,7 +2004,7 @@ class TaskSchemaTmp(TaskSchema):
1150
2004
  _, task = self.__tool_task_modules()
1151
2005
  method = self.__module_func("_gather_outputs", [task])
1152
2006
  if method:
1153
- return method(self._TaskSchema__chip, *self.node())
2007
+ return method(self._TaskSchema__chip, self.step, self.index)
1154
2008
  return TaskSchema.get_output_files(self)
1155
2009
 
1156
2010
  def parse_version(self, stdout):
@@ -1179,15 +2033,16 @@ class TaskSchemaTmp(TaskSchema):
1179
2033
  if method:
1180
2034
  with self.__in_step_index():
1181
2035
  ret = method(self._TaskSchema__chip)
1182
- return ret
1183
- return TaskSchema.setup(self)
2036
+ if ret:
2037
+ raise TaskSkip(ret)
2038
+ TaskSchema.setup(self)
1184
2039
 
1185
2040
  def select_input_nodes(self):
1186
2041
  _, task = self.__tool_task_modules()
1187
2042
  method = self.__module_func("_select_inputs", [task])
1188
2043
  if method:
1189
2044
  with self.__in_step_index():
1190
- ret = method(self._TaskSchema__chip, *self.node())
2045
+ ret = method(self._TaskSchema__chip, self.step, self.index)
1191
2046
  return ret
1192
2047
  return TaskSchema.select_input_nodes(self)
1193
2048
 
@@ -1197,8 +2052,9 @@ class TaskSchemaTmp(TaskSchema):
1197
2052
  if method:
1198
2053
  with self.__in_step_index():
1199
2054
  ret = method(self._TaskSchema__chip)
1200
- return ret
1201
- return TaskSchema.pre_process(self)
2055
+ if ret:
2056
+ raise TaskSkip(ret)
2057
+ TaskSchema.pre_process(self)
1202
2058
 
1203
2059
  def runtime_options(self):
1204
2060
  tool, task = self.__tool_task_modules()
@@ -1215,9 +2071,8 @@ class TaskSchemaTmp(TaskSchema):
1215
2071
  method = self.__module_func("run", [task])
1216
2072
  if method:
1217
2073
  # Handle logger stdout suppression if quiet
1218
- step, index = self.node()
1219
2074
  stdout_handler_level = self._TaskSchema__chip._logger_console.level
1220
- if self._TaskSchema__chip.get('option', 'quiet', step=step, index=index):
2075
+ if self._TaskSchema__chip.get('option', 'quiet', step=self.step, index=self.index):
1221
2076
  self._TaskSchema__chip._logger_console.setLevel(logging.CRITICAL)
1222
2077
 
1223
2078
  with self.__in_step_index():
@@ -1233,15 +2088,20 @@ class TaskSchemaTmp(TaskSchema):
1233
2088
  method = self.__module_func("post_process", [task])
1234
2089
  if method:
1235
2090
  with self.__in_step_index():
1236
- ret = method(self._TaskSchema__chip)
1237
- return ret
1238
- return TaskSchema.post_process(self)
2091
+ method(self._TaskSchema__chip)
2092
+ TaskSchema.post_process(self)
1239
2093
 
1240
2094
 
1241
2095
  ###########################################################################
1242
2096
  # Tool Setup
1243
2097
  ###########################################################################
1244
2098
  def schema_tool(schema):
2099
+ """
2100
+ Defines the standard parameters for a tool within the schema.
2101
+
2102
+ Args:
2103
+ schema (Schema): The schema object to add the parameters to.
2104
+ """
1245
2105
  schema = EditableSchema(schema)
1246
2106
 
1247
2107
  schema.insert(
@@ -1373,6 +2233,12 @@ def schema_tool(schema):
1373
2233
 
1374
2234
 
1375
2235
  def schema_task(schema):
2236
+ """
2237
+ Defines the standard parameters for a task within the schema.
2238
+
2239
+ Args:
2240
+ schema (Schema): The schema object to add the parameters to.
2241
+ """
1376
2242
  schema = EditableSchema(schema)
1377
2243
 
1378
2244
  schema.insert(