siliconcompiler 0.34.2__py3-none-any.whl → 0.34.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. siliconcompiler/__init__.py +12 -5
  2. siliconcompiler/__main__.py +1 -7
  3. siliconcompiler/_metadata.py +1 -1
  4. siliconcompiler/apps/_common.py +104 -23
  5. siliconcompiler/apps/sc.py +4 -8
  6. siliconcompiler/apps/sc_dashboard.py +6 -4
  7. siliconcompiler/apps/sc_install.py +10 -6
  8. siliconcompiler/apps/sc_issue.py +7 -5
  9. siliconcompiler/apps/sc_remote.py +1 -1
  10. siliconcompiler/apps/sc_server.py +9 -14
  11. siliconcompiler/apps/sc_show.py +6 -5
  12. siliconcompiler/apps/smake.py +130 -94
  13. siliconcompiler/apps/utils/replay.py +4 -7
  14. siliconcompiler/apps/utils/summarize.py +3 -5
  15. siliconcompiler/asic.py +420 -0
  16. siliconcompiler/checklist.py +25 -2
  17. siliconcompiler/cmdlineschema.py +534 -0
  18. siliconcompiler/constraints/asic_component.py +2 -2
  19. siliconcompiler/constraints/asic_pins.py +2 -2
  20. siliconcompiler/constraints/asic_timing.py +3 -3
  21. siliconcompiler/core.py +7 -32
  22. siliconcompiler/data/templates/tcl/manifest.tcl.j2 +8 -0
  23. siliconcompiler/dependencyschema.py +89 -31
  24. siliconcompiler/design.py +176 -207
  25. siliconcompiler/filesetschema.py +250 -0
  26. siliconcompiler/flowgraph.py +274 -95
  27. siliconcompiler/fpga.py +124 -1
  28. siliconcompiler/library.py +218 -20
  29. siliconcompiler/metric.py +233 -20
  30. siliconcompiler/package/__init__.py +271 -50
  31. siliconcompiler/package/git.py +92 -16
  32. siliconcompiler/package/github.py +108 -12
  33. siliconcompiler/package/https.py +79 -16
  34. siliconcompiler/packageschema.py +88 -7
  35. siliconcompiler/pathschema.py +31 -2
  36. siliconcompiler/pdk.py +566 -1
  37. siliconcompiler/project.py +1095 -94
  38. siliconcompiler/record.py +38 -1
  39. siliconcompiler/remote/__init__.py +5 -2
  40. siliconcompiler/remote/client.py +11 -6
  41. siliconcompiler/remote/schema.py +5 -23
  42. siliconcompiler/remote/server.py +41 -54
  43. siliconcompiler/report/__init__.py +3 -3
  44. siliconcompiler/report/dashboard/__init__.py +48 -14
  45. siliconcompiler/report/dashboard/cli/__init__.py +99 -21
  46. siliconcompiler/report/dashboard/cli/board.py +364 -179
  47. siliconcompiler/report/dashboard/web/__init__.py +90 -12
  48. siliconcompiler/report/dashboard/web/components/__init__.py +219 -240
  49. siliconcompiler/report/dashboard/web/components/flowgraph.py +49 -26
  50. siliconcompiler/report/dashboard/web/components/graph.py +139 -100
  51. siliconcompiler/report/dashboard/web/layouts/__init__.py +29 -1
  52. siliconcompiler/report/dashboard/web/layouts/_common.py +38 -2
  53. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph.py +39 -26
  54. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_node_tab.py +50 -50
  55. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_sac_tabs.py +49 -46
  56. siliconcompiler/report/dashboard/web/state.py +141 -14
  57. siliconcompiler/report/dashboard/web/utils/__init__.py +79 -16
  58. siliconcompiler/report/dashboard/web/utils/file_utils.py +74 -11
  59. siliconcompiler/report/dashboard/web/viewer.py +25 -1
  60. siliconcompiler/report/report.py +5 -2
  61. siliconcompiler/report/summary_image.py +29 -11
  62. siliconcompiler/scheduler/__init__.py +9 -1
  63. siliconcompiler/scheduler/docker.py +79 -1
  64. siliconcompiler/scheduler/run_node.py +35 -19
  65. siliconcompiler/scheduler/scheduler.py +208 -24
  66. siliconcompiler/scheduler/schedulernode.py +372 -46
  67. siliconcompiler/scheduler/send_messages.py +77 -29
  68. siliconcompiler/scheduler/slurm.py +76 -12
  69. siliconcompiler/scheduler/taskscheduler.py +140 -20
  70. siliconcompiler/schema/__init__.py +0 -2
  71. siliconcompiler/schema/baseschema.py +194 -38
  72. siliconcompiler/schema/journal.py +7 -4
  73. siliconcompiler/schema/namedschema.py +16 -10
  74. siliconcompiler/schema/parameter.py +55 -9
  75. siliconcompiler/schema/parametervalue.py +60 -0
  76. siliconcompiler/schema/safeschema.py +25 -2
  77. siliconcompiler/schema/schema_cfg.py +5 -5
  78. siliconcompiler/schema/utils.py +2 -2
  79. siliconcompiler/schema_obj.py +20 -3
  80. siliconcompiler/tool.py +979 -302
  81. siliconcompiler/tools/bambu/__init__.py +41 -0
  82. siliconcompiler/tools/builtin/concatenate.py +2 -2
  83. siliconcompiler/tools/builtin/minimum.py +2 -1
  84. siliconcompiler/tools/builtin/mux.py +2 -1
  85. siliconcompiler/tools/builtin/nop.py +2 -1
  86. siliconcompiler/tools/builtin/verify.py +2 -1
  87. siliconcompiler/tools/klayout/__init__.py +95 -0
  88. siliconcompiler/tools/openroad/__init__.py +289 -0
  89. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +3 -0
  90. siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +7 -2
  91. siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +8 -4
  92. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +9 -5
  93. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +5 -1
  94. siliconcompiler/tools/slang/__init__.py +1 -1
  95. siliconcompiler/tools/slang/elaborate.py +2 -1
  96. siliconcompiler/tools/vivado/scripts/sc_run.tcl +1 -1
  97. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +8 -1
  98. siliconcompiler/tools/vivado/syn_fpga.py +6 -0
  99. siliconcompiler/tools/vivado/vivado.py +35 -2
  100. siliconcompiler/tools/vpr/__init__.py +150 -0
  101. siliconcompiler/tools/yosys/__init__.py +369 -1
  102. siliconcompiler/tools/yosys/scripts/procs.tcl +0 -1
  103. siliconcompiler/toolscripts/_tools.json +5 -10
  104. siliconcompiler/utils/__init__.py +66 -0
  105. siliconcompiler/utils/flowgraph.py +2 -2
  106. siliconcompiler/utils/issue.py +2 -1
  107. siliconcompiler/utils/logging.py +14 -0
  108. siliconcompiler/utils/multiprocessing.py +256 -0
  109. siliconcompiler/utils/showtools.py +10 -0
  110. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/METADATA +5 -5
  111. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/RECORD +115 -118
  112. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/entry_points.txt +3 -0
  113. siliconcompiler/schema/cmdlineschema.py +0 -250
  114. siliconcompiler/toolscripts/rhel8/install-slang.sh +0 -40
  115. siliconcompiler/toolscripts/rhel9/install-slang.sh +0 -40
  116. siliconcompiler/toolscripts/ubuntu20/install-slang.sh +0 -47
  117. siliconcompiler/toolscripts/ubuntu22/install-slang.sh +0 -37
  118. siliconcompiler/toolscripts/ubuntu24/install-slang.sh +0 -37
  119. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/WHEEL +0 -0
  120. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/licenses/LICENSE +0 -0
  121. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/top_level.txt +0 -0
siliconcompiler/tool.py CHANGED
@@ -10,16 +10,18 @@ import shlex
10
10
  import shutil
11
11
  import subprocess
12
12
  import sys
13
+ import threading
13
14
  import time
14
15
  import yaml
15
16
 
16
17
  try:
18
+ # 'resource' is not available on Windows, so we handle its absence gracefully.
17
19
  import resource
18
20
  except ModuleNotFoundError:
19
21
  resource = None
20
22
 
21
23
  try:
22
- # Note: this import throws exception on Windows
24
+ # 'pty' is not available on Windows.
23
25
  import pty
24
26
  except ModuleNotFoundError:
25
27
  pty = None
@@ -29,7 +31,7 @@ import os.path
29
31
  from packaging.version import Version, InvalidVersion
30
32
  from packaging.specifiers import SpecifierSet, InvalidSpecifier
31
33
 
32
- from typing import List, Union
34
+ from typing import List, Dict, Tuple, Union
33
35
 
34
36
  from siliconcompiler.schema import BaseSchema, NamedSchema, Journal
35
37
  from siliconcompiler.schema import EditableSchema, Parameter, PerNode, Scope
@@ -41,34 +43,60 @@ from siliconcompiler import sc_open
41
43
  from siliconcompiler import Schema
42
44
 
43
45
  from siliconcompiler.record import RecordTool
46
+ from siliconcompiler.scheduler import SchedulerNode
44
47
  from siliconcompiler.flowgraph import RuntimeFlowgraph
45
48
 
46
49
 
47
50
  class TaskError(Exception):
48
- '''
49
- Error indicates execution cannot continue and should be terminated
50
- '''
51
+ '''Error indicating that task execution cannot continue and should be terminated.'''
52
+ pass
51
53
 
52
54
 
53
55
  class TaskTimeout(TaskError):
54
- '''
55
- Error indicates a timeout has occurred
56
+ '''Error indicating a timeout has occurred during task execution.
56
57
 
57
58
  Args:
58
- timeout (float): execution time at timeout
59
+ timeout (float): The execution time in seconds at which the timeout occurred.
59
60
  '''
61
+
60
62
  def __init__(self, *args, timeout=None, **kwargs):
61
63
  super().__init__(*args, **kwargs)
62
64
  self.timeout = timeout
63
65
 
64
66
 
65
67
  class TaskExecutableNotFound(TaskError):
66
- '''
67
- Executable not found.
68
- '''
68
+ '''Error indicating that the required tool executable could not be found.'''
69
+ pass
70
+
71
+
72
+ class TaskSkip(TaskError):
73
+ """
74
+ Error raised to indicate that the current task should be skipped.
75
+
76
+ This exception is only intended to be used within the `setup()` and
77
+ `pre_process()` methods of a Task.
78
+ """
79
+
80
+ def __init__(self, why: str, *args):
81
+ super().__init__(why, *args)
82
+ self.__why = why
83
+
84
+ @property
85
+ def why(self):
86
+ """str: The reason why the task is being skipped."""
87
+ return self.__why
69
88
 
70
89
 
71
90
  class TaskSchema(NamedSchema):
91
+ """
92
+ A schema class that defines the parameters and methods for a single task
93
+ in a compilation flow.
94
+
95
+ This class provides the framework for setting up, running, and post-processing
96
+ a tool. It includes methods for managing executables, versions, runtime
97
+ arguments, and file I/O.
98
+ """
99
+ # Regex for parsing version check strings like ">=1.2.3"
72
100
  __parse_version_check_str = r"""
73
101
  (?P<operator>(==|!=|<=|>=|<|>|~=))
74
102
  \s*
@@ -84,35 +112,71 @@ class TaskSchema(NamedSchema):
84
112
  r"^\s*" + __parse_version_check_str + r"\s*$",
85
113
  re.VERBOSE | re.IGNORECASE)
86
114
 
87
- def __init__(self, name=None):
115
+ def __init__(self):
88
116
  super().__init__()
89
- self.set_name(name)
90
117
 
91
118
  schema_task(self)
92
119
 
93
120
  self.__set_runtime(None)
94
121
 
122
+ @classmethod
123
+ def _getdict_type(cls) -> str:
124
+ """Returns the metadata for getdict."""
125
+ return TaskSchema.__name__
126
+
127
+ def _from_dict(self, manifest, keypath, version=None):
128
+ """
129
+ Populates the schema from a dictionary, dynamically adding 'var'
130
+ parameters found in the manifest that are not already defined.
131
+ """
132
+ if "var" in manifest:
133
+ # Collect existing and manifest var keys
134
+ var_keys = [k[0] for k in self.allkeys("var")]
135
+ manifest_keys = set(manifest["var"].keys())
136
+
137
+ # Add new vars found in the manifest to the schema
138
+ edit = EditableSchema(self)
139
+ for var in sorted(manifest_keys.difference(var_keys)):
140
+ edit.insert("var", var,
141
+ Parameter.from_dict(
142
+ manifest["var"][var],
143
+ keypath=keypath + [var],
144
+ version=version))
145
+ del manifest["var"][var]
146
+
147
+ if not manifest["var"]:
148
+ del manifest["var"]
149
+
150
+ return super()._from_dict(manifest, keypath, version)
151
+
95
152
  @contextlib.contextmanager
96
- def runtime(self, chip, step=None, index=None, relpath=None):
97
- '''
98
- Sets the runtime information needed to properly execute a task.
99
- Note: unstable API
153
+ def runtime(self, node, step=None, index=None, relpath=None):
154
+ """
155
+ A context manager to set the runtime information for a task.
156
+
157
+ This method creates a temporary copy of the task object with runtime
158
+ information (like the current step, index, and working directories)
159
+ populated from a SchedulerNode. This allows methods within the context
160
+ to access runtime-specific configuration and paths.
100
161
 
101
162
  Args:
102
- chip (:class:`Chip`): root schema for the runtime information
103
- '''
163
+ node (SchedulerNode): The scheduler node for this runtime context.
164
+ """
165
+ if node and not isinstance(node, SchedulerNode):
166
+ raise TypeError("node must be a scheduler node")
167
+
104
168
  obj_copy = copy.copy(self)
105
- obj_copy.__set_runtime(chip, step=step, index=index, relpath=relpath)
169
+ obj_copy.__set_runtime(node, step=step, index=index, relpath=relpath)
106
170
  yield obj_copy
107
171
 
108
- def __set_runtime(self, chip, step=None, index=None, relpath=None):
109
- '''
110
- Sets the runtime information needed to properly execute a task.
111
- Note: unstable API
172
+ def __set_runtime(self, node: SchedulerNode, step=None, index=None, relpath=None):
173
+ """
174
+ Private helper to set the runtime information for executing a task.
112
175
 
113
176
  Args:
114
- chip (:class:`Chip`): root schema for the runtime information
115
- '''
177
+ node (SchedulerNode): The scheduler node for this runtime.
178
+ """
179
+ self.__node = node
116
180
  self.__chip = None
117
181
  self.__schema_full = None
118
182
  self.__logger = None
@@ -122,18 +186,26 @@ class TaskSchema(NamedSchema):
122
186
  self.__cwd = None
123
187
  self.__relpath = relpath
124
188
  self.__collection_path = None
125
- if chip:
126
- self.__chip = chip
127
- self.__schema_full = chip.schema
128
- self.__logger = chip.logger
129
- self.__design_name = chip.design
130
- self.__design_top = chip.top(step=step, index=index)
131
- self.__design_top_global = chip.top()
132
- self.__cwd = chip.cwd
133
- self.__collection_path = chip._getcollectdir()
134
-
135
- self.__step = step
136
- self.__index = index
189
+ self.__jobdir = None
190
+ if node:
191
+ if step is not None or index is not None:
192
+ raise RuntimeError("step and index cannot be provided with node")
193
+
194
+ self.__chip = node.chip
195
+ self.__schema_full = node.chip.schema
196
+ self.__logger = node.chip.logger
197
+ self.__design_name = node.name
198
+ self.__design_top = node.topmodule
199
+ self.__design_top_global = node.topmodule_global
200
+ self.__cwd = node.project_cwd
201
+ self.__collection_path = node.collection_dir
202
+ self.__jobdir = node.workdir
203
+
204
+ self.__step = node.step
205
+ self.__index = node.index
206
+ else:
207
+ self.__step = step
208
+ self.__index = index
137
209
 
138
210
  self.__schema_record = None
139
211
  self.__schema_metric = None
@@ -163,61 +235,63 @@ class TaskSchema(NamedSchema):
163
235
  from_steps=set([step for step, _ in self.__schema_flow.get_entry_nodes()]),
164
236
  prune_nodes=self.__schema_full.get('option', 'prune'))
165
237
 
238
+ @property
166
239
  def design_name(self) -> str:
167
- '''
168
- Returns:
169
- name of the design
170
- '''
240
+ """str: The name of the design."""
171
241
  return self.__design_name
172
242
 
243
+ @property
173
244
  def design_topmodule(self) -> str:
174
- '''
175
- Returns:
176
- top module of the design
177
- '''
245
+ """str: The top module of the design for the current node."""
178
246
  return self.__design_top
179
247
 
180
- def node(self):
181
- '''
182
- Returns:
183
- step and index for the current runtime
184
- '''
248
+ @property
249
+ def node(self) -> SchedulerNode:
250
+ """SchedulerNode: The scheduler node for the current runtime."""
251
+ return self.__node
185
252
 
186
- return self.__step, self.__index
253
+ @property
254
+ def step(self) -> str:
255
+ """str: The step for the current runtime."""
256
+ return self.__step
187
257
 
188
- def tool(self):
189
- '''
190
- Returns:
191
- tool name
192
- '''
258
+ @property
259
+ def index(self) -> str:
260
+ """str: The index for the current runtime."""
261
+ return self.__index
193
262
 
263
+ def tool(self) -> str:
264
+ """str: The name of the tool associated with this task."""
194
265
  raise NotImplementedError("tool name must be implemented by the child class")
195
266
 
196
- def task(self):
197
- '''
198
- Returns:
199
- task name
200
- '''
201
-
267
+ def task(self) -> str:
268
+ """str: The name of this task."""
269
+ if self.name:
270
+ return self.name
202
271
  raise NotImplementedError("task name must be implemented by the child class")
203
272
 
204
- def logger(self):
205
- '''
206
- Returns:
207
- logger
208
- '''
273
+ @property
274
+ def logger(self) -> logging.Logger:
275
+ """logging.Logger: The logger instance."""
209
276
  return self.__logger
210
277
 
278
+ @property
279
+ def nodeworkdir(self) -> str:
280
+ """str: The path to the node's working directory."""
281
+ return self.__jobdir
282
+
211
283
  def schema(self, type=None):
212
- '''
213
- Get useful section of the schema.
284
+ """
285
+ Gets a specific section of the schema.
214
286
 
215
287
  Args:
216
- type (str): schema section to find, if None returns the root schema.
288
+ type (str, optional): The schema section to retrieve. If None,
289
+ returns the root schema. Valid types include "record",
290
+ "metric", "flow", "runtimeflow", and "tool".
217
291
 
218
292
  Returns:
219
- schema section.
220
- '''
293
+ The requested schema section object.
294
+ """
221
295
  if type is None:
222
296
  return self.__schema_full
223
297
  elif type == "record":
@@ -233,30 +307,44 @@ class TaskSchema(NamedSchema):
233
307
  else:
234
308
  raise ValueError(f"{type} is not a schema section")
235
309
 
310
+ def get_logpath(self, log: str) -> str:
311
+ """
312
+ Returns the relative path to a specified log file.
313
+
314
+ Args:
315
+ log (str): The type of log file (e.g., 'exe', 'sc').
316
+
317
+ Returns:
318
+ str: The relative path to the log file from the node's workdir.
319
+ """
320
+ return os.path.relpath(self.__node.get_log(log), self.__jobdir)
321
+
236
322
  def has_breakpoint(self) -> bool:
237
- '''
323
+ """
324
+ Checks if a breakpoint is set for this task.
325
+
238
326
  Returns:
239
- True if this task has a breakpoint associated with it
240
- '''
327
+ bool: True if a breakpoint is active, False otherwise.
328
+ """
241
329
  return self.schema().get("option", "breakpoint", step=self.__step, index=self.__index)
242
330
 
243
331
  def get_exe(self) -> str:
244
- '''
245
- Determines the absolute path for the specified executable.
332
+ """
333
+ Determines the absolute path for the task's executable.
246
334
 
247
335
  Raises:
248
- :class:`TaskExecutableNotFound`: if executable not found.
336
+ TaskExecutableNotFound: If the executable cannot be found in the system PATH.
249
337
 
250
338
  Returns:
251
- path to executable, or None if not specified
252
- '''
339
+ str: The absolute path to the executable, or None if not specified.
340
+ """
253
341
 
254
342
  exe = self.schema("tool").get('exe')
255
343
 
256
344
  if exe is None:
257
345
  return None
258
346
 
259
- # Collect path
347
+ # Collect PATH from environment variables
260
348
  env = self.get_runtime_environmental_variables(include_path=True)
261
349
 
262
350
  fullexe = shutil.which(exe, path=env["PATH"])
@@ -267,16 +355,16 @@ class TaskSchema(NamedSchema):
267
355
  return fullexe
268
356
 
269
357
  def get_exe_version(self) -> str:
270
- '''
271
- Gets the version of the specified executable.
358
+ """
359
+ Gets the version of the task's executable by running it with a version switch.
272
360
 
273
361
  Raises:
274
- :class:`TaskExecutableNotFound`: if executable not found.
275
- :class:`NotImplementedError`: if :meth:`.parse_version` has not be implemented.
362
+ TaskExecutableNotFound: If the executable is not found.
363
+ NotImplementedError: If the `parse_version` method is not implemented.
276
364
 
277
365
  Returns:
278
- version determined by :meth:`.parse_version`.
279
- '''
366
+ str: The parsed version string.
367
+ """
280
368
 
281
369
  veropt = self.schema("tool").get('vswitch')
282
370
  if not veropt:
@@ -320,21 +408,20 @@ class TaskSchema(NamedSchema):
320
408
  return version
321
409
 
322
410
  def check_exe_version(self, reported_version) -> bool:
323
- '''
324
- Check if the reported version matches the versions specified in
325
- :keypath:`tool,<tool>,version`.
411
+ """
412
+ Checks if the reported version of a tool satisfies the requirements
413
+ specified in the schema.
326
414
 
327
415
  Args:
328
- reported_version (str): version to check
416
+ reported_version (str): The version string reported by the tool.
329
417
 
330
418
  Returns:
331
- True if the version matched, false otherwise
332
-
333
- '''
419
+ bool: True if the version is acceptable, False otherwise.
420
+ """
334
421
 
335
422
  spec_sets = self.schema("tool").get('version', step=self.__step, index=self.__index)
336
423
  if not spec_sets:
337
- # No requirement so always true
424
+ # No requirement, so always true
338
425
  return True
339
426
 
340
427
  for spec_set in spec_sets:
@@ -394,22 +481,22 @@ class TaskSchema(NamedSchema):
394
481
  return False
395
482
 
396
483
  def get_runtime_environmental_variables(self, include_path=True):
397
- '''
398
- Determine the environmental variables needed for the task
484
+ """
485
+ Determines the environment variables needed for the task.
399
486
 
400
487
  Args:
401
- include_path (bool): if True, includes PATH variable
488
+ include_path (bool): If True, includes the PATH variable.
402
489
 
403
490
  Returns:
404
- dict of str: dictionary of environmental variable to value mapping
405
- '''
491
+ dict: A dictionary of environment variable names to their values.
492
+ """
406
493
 
407
494
  # Add global environmental vars
408
495
  envvars = {}
409
496
  for env in self.__schema_full.getkeys('option', 'env'):
410
497
  envvars[env] = self.__schema_full.get('option', 'env', env)
411
498
 
412
- # Add tool specific vars
499
+ # Add tool-specific license server vars
413
500
  for lic_env in self.schema("tool").getkeys('licenseserver'):
414
501
  license_file = self.schema("tool").get('licenseserver', lic_env,
415
502
  step=self.__step, index=self.__index)
@@ -428,25 +515,25 @@ class TaskSchema(NamedSchema):
428
515
  if path:
429
516
  envvars["PATH"] = path + os.pathsep + envvars["PATH"]
430
517
 
431
- # Forward additional variables
518
+ # Forward additional variables like LD_LIBRARY_PATH
432
519
  for var in ('LD_LIBRARY_PATH',):
433
520
  val = os.getenv(var, None)
434
521
  if val:
435
522
  envvars[var] = val
436
523
 
437
- # Add task specific vars
524
+ # Add task-specific vars
438
525
  for env in self.getkeys("env"):
439
526
  envvars[env] = self.get("env", env)
440
527
 
441
528
  return envvars
442
529
 
443
530
  def get_runtime_arguments(self):
444
- '''
445
- Constructs the arguments needed to run the task.
531
+ """
532
+ Constructs the command-line arguments needed to run the task.
446
533
 
447
534
  Returns:
448
- command (list)
449
- '''
535
+ list: A list of command-line arguments.
536
+ """
450
537
 
451
538
  cmdargs = []
452
539
  try:
@@ -472,14 +559,14 @@ class TaskSchema(NamedSchema):
472
559
  return cmdargs
473
560
 
474
561
  def generate_replay_script(self, filepath, workdir, include_path=True):
475
- '''
476
- Generate a replay script for the task.
562
+ """
563
+ Generates a shell script to replay the task's execution.
477
564
 
478
565
  Args:
479
- filepath (path): path to the file to write
480
- workdir (path): path to the run work directory
481
- include_path (bool): include path information in environmental variables
482
- '''
566
+ filepath (str): The path to write the replay script to.
567
+ workdir (str): The path to the run's working directory.
568
+ include_path (bool): If True, includes PATH information.
569
+ """
483
570
  replay_opts = {}
484
571
  replay_opts["work_dir"] = workdir
485
572
  replay_opts["exports"] = self.get_runtime_environmental_variables(include_path=include_path)
@@ -494,10 +581,8 @@ class TaskSchema(NamedSchema):
494
581
  if vswitch:
495
582
  replay_opts["version_flag"] = shlex.join(vswitch)
496
583
 
497
- # detect arguments
584
+ # Regex to detect arguments and file paths for formatting
498
585
  arg_test = re.compile(r'^[-+]')
499
-
500
- # detect file paths
501
586
  file_test = re.compile(r'^[/\.]')
502
587
 
503
588
  if replay_opts["executable"]:
@@ -521,7 +606,7 @@ class TaskSchema(NamedSchema):
521
606
  format_cmd = []
522
607
  replay_opts["cmds"] = format_cmd
523
608
 
524
- # create replay file
609
+ # Create replay file from template
525
610
  with open(filepath, 'w') as f:
526
611
  f.write(utils.get_file_template("replay/replay.sh.j2").render(replay_opts))
527
612
  f.write("\n")
@@ -529,25 +614,26 @@ class TaskSchema(NamedSchema):
529
614
  os.chmod(filepath, 0o755)
530
615
 
531
616
  def setup_work_directory(self, workdir, remove_exist=True):
532
- '''
533
- Create the runtime directories needed to execute a task.
617
+ """
618
+ Creates the runtime directories needed to execute a task.
534
619
 
535
620
  Args:
536
- workdir (path): path to the run work directory
537
- remove_exist (bool): if True, removes the existing directory
538
- '''
621
+ workdir (str): The path to the node's working directory.
622
+ remove_exist (bool): If True, removes the directory if it already exists.
623
+ """
539
624
 
540
- # Delete existing directory
625
+ # Delete existing directory if requested
541
626
  if os.path.isdir(workdir) and remove_exist:
542
627
  shutil.rmtree(workdir)
543
628
 
544
- # Create directories
629
+ # Create standard subdirectories
545
630
  os.makedirs(workdir, exist_ok=True)
546
631
  os.makedirs(os.path.join(workdir, 'inputs'), exist_ok=True)
547
632
  os.makedirs(os.path.join(workdir, 'outputs'), exist_ok=True)
548
633
  os.makedirs(os.path.join(workdir, 'reports'), exist_ok=True)
549
634
 
550
635
  def __write_yaml_manifest(self, fout, manifest):
636
+ """Private helper to write a manifest in YAML format."""
551
637
  class YamlIndentDumper(yaml.Dumper):
552
638
  def increase_indent(self, flow=False, indentless=False):
553
639
  return super().increase_indent(flow=flow, indentless=indentless)
@@ -555,24 +641,51 @@ class TaskSchema(NamedSchema):
555
641
  fout.write(yaml.dump(manifest.getdict(), Dumper=YamlIndentDumper,
556
642
  default_flow_style=False))
557
643
 
644
+ def get_tcl_variables(self, manifest: BaseSchema = None) -> Dict[str, str]:
645
+ """
646
+ Gets a dictionary of variables to define for the task in a Tcl manifest.
647
+
648
+ Args:
649
+ manifest (BaseSchema, optional): The manifest to retrieve values from.
650
+
651
+ Returns:
652
+ dict: A dictionary of variable names and their Tcl-formatted values.
653
+ """
654
+
655
+ if manifest is None:
656
+ manifest = self.schema()
657
+
658
+ vars = {
659
+ "sc_tool": NodeType.to_tcl(self.tool(), "str"),
660
+ "sc_task": NodeType.to_tcl(self.task(), "str"),
661
+ "sc_topmodule": NodeType.to_tcl(self.design_topmodule, "str")
662
+ }
663
+
664
+ refdir = manifest.get("tool", self.tool(), "task", self.task(), "refdir", field=None)
665
+ if refdir.get(step=self.__step, index=self.__index):
666
+ vars["sc_refdir"] = refdir.gettcl(step=self.__step, index=self.__index)
667
+
668
+ return vars
669
+
558
670
  def __write_tcl_manifest(self, fout, manifest):
671
+ """Private helper to write a manifest in Tcl format."""
559
672
  template = utils.get_file_template('tcl/manifest.tcl.j2')
560
673
  tcl_set_cmds = []
561
674
  for key in sorted(manifest.allkeys()):
562
- # print out all non default values
675
+ # Skip default values
563
676
  if 'default' in key:
564
677
  continue
565
678
 
566
679
  param = manifest.get(*key, field=None)
567
680
 
568
- # create a TCL dict
681
+ # Create a Tcl dict key string
569
682
  keystr = ' '.join([NodeType.to_tcl(keypart, 'str') for keypart in key])
570
683
 
571
684
  valstr = param.gettcl(step=self.__step, index=self.__index)
572
685
  if valstr is None:
573
686
  continue
574
687
 
575
- # Ensure empty values get something
688
+ # Ensure empty values are represented as empty Tcl lists
576
689
  if valstr == '':
577
690
  valstr = '{}'
578
691
 
@@ -581,7 +694,8 @@ class TaskSchema(NamedSchema):
581
694
  if template:
582
695
  fout.write(template.render(manifest_dict='\n'.join(tcl_set_cmds),
583
696
  scroot=os.path.abspath(
584
- os.path.join(os.path.dirname(__file__))),
697
+ os.path.join(os.path.dirname(__file__))),
698
+ toolvars=self.get_tcl_variables(manifest),
585
699
  record_access="get" in Journal.access(self).get_types(),
586
700
  record_access_id=Schema._RECORD_ACCESS_IDENTIFIER))
587
701
  else:
@@ -590,6 +704,7 @@ class TaskSchema(NamedSchema):
590
704
  fout.write('\n')
591
705
 
592
706
  def __write_csv_manifest(self, fout, manifest):
707
+ """Private helper to write a manifest in CSV format."""
593
708
  csvwriter = csv.writer(fout)
594
709
  csvwriter.writerow(['Keypath', 'Value'])
595
710
 
@@ -608,13 +723,13 @@ class TaskSchema(NamedSchema):
608
723
  csvwriter.writerow([keypath, value])
609
724
 
610
725
  def write_task_manifest(self, directory, backup=True):
611
- '''
612
- Write the manifest needed for the task
726
+ """
727
+ Writes the manifest needed for the task in the format specified by the tool.
613
728
 
614
729
  Args:
615
- directory (path): directory to write the manifest into.
616
- backup (bool): if True and an existing manifest is found a backup is kept.
617
- '''
730
+ directory (str): The directory to write the manifest into.
731
+ backup (bool): If True, backs up an existing manifest.
732
+ """
618
733
 
619
734
  suffix = self.schema("tool").get('format')
620
735
  if not suffix:
@@ -625,19 +740,17 @@ class TaskSchema(NamedSchema):
625
740
  if backup and os.path.exists(manifest_path):
626
741
  shutil.copyfile(manifest_path, f'{manifest_path}.bak')
627
742
 
628
- # Generate abs paths
743
+ # Generate a schema with absolute paths for the manifest
629
744
  schema = self.__abspath_schema()
630
745
 
631
746
  if re.search(r'\.json(\.gz)?$', manifest_path):
632
747
  schema.write_manifest(manifest_path)
633
748
  else:
634
749
  try:
635
- # format specific dumping
750
+ # Format-specific dumping
636
751
  if manifest_path.endswith('.gz'):
637
752
  fout = gzip.open(manifest_path, 'wt', encoding='UTF-8')
638
753
  elif re.search(r'\.csv$', manifest_path):
639
- # Files written using csv library should be opened with newline=''
640
- # https://docs.python.org/3/library/csv.html#id3
641
754
  fout = open(manifest_path, 'w', newline='')
642
755
  else:
643
756
  fout = open(manifest_path, 'w')
@@ -654,6 +767,10 @@ class TaskSchema(NamedSchema):
654
767
  fout.close()
655
768
 
656
769
  def __abspath_schema(self):
770
+ """
771
+ Private helper to create a copy of the schema with all file/dir paths
772
+ converted to absolute paths.
773
+ """
657
774
  root = self.schema()
658
775
  schema = root.copy()
659
776
 
@@ -663,7 +780,6 @@ class TaskSchema(NamedSchema):
663
780
  for keypath in root.allkeys():
664
781
  paramtype = schema.get(*keypath, field='type')
665
782
  if 'file' not in paramtype and 'dir' not in paramtype:
666
- # only do something if type is file or dir
667
783
  continue
668
784
 
669
785
  for value, step, index in root.get(*keypath, field=None).getvalues():
@@ -671,7 +787,6 @@ class TaskSchema(NamedSchema):
671
787
  continue
672
788
  abspaths = root.find_files(*keypath, missing_ok=True, step=step, index=index)
673
789
  if isinstance(abspaths, (set, list)) and None in abspaths:
674
- # Lists may not contain None
675
790
  schema.set(*keypath, [], step=step, index=index)
676
791
  else:
677
792
  if self.__relpath:
@@ -686,12 +801,12 @@ class TaskSchema(NamedSchema):
686
801
  return schema
687
802
 
688
803
  def __get_io_file(self, io_type):
689
- '''
690
- Get the runtime destination for the io type.
804
+ """
805
+ Private helper to get the runtime destination for stdout or stderr.
691
806
 
692
807
  Args:
693
- io_type (str): name of io type
694
- '''
808
+ io_type (str): The I/O type ('stdout' or 'stderr').
809
+ """
695
810
  suffix = self.get(io_type, "suffix")
696
811
  destination = self.get(io_type, "destination")
697
812
 
@@ -708,19 +823,17 @@ class TaskSchema(NamedSchema):
708
823
  return io_file, io_log
709
824
 
710
825
  def __terminate_exe(self, proc):
711
- '''
712
- Terminates a subprocess
826
+ """
827
+ Private helper to terminate a subprocess and its children.
713
828
 
714
829
  Args:
715
- proc (subprocess.Process): process to terminate
716
- '''
830
+ proc (subprocess.Process): The process to terminate.
831
+ """
717
832
 
718
833
  def terminate_process(pid, timeout=3):
719
- '''Terminates a process and all its (grand+)children.
720
-
834
+ """Terminates a process and all its (grand+)children.
721
835
  Based on https://psutil.readthedocs.io/en/latest/#psutil.wait_procs and
722
- https://psutil.readthedocs.io/en/latest/#kill-process-tree.
723
- '''
836
+ https://psutil.readthedocs.io/en/latest/#kill-process-tree."""
724
837
  parent = psutil.Process(pid)
725
838
  children = parent.children(recursive=True)
726
839
  children.append(parent)
@@ -728,13 +841,10 @@ class TaskSchema(NamedSchema):
728
841
  try:
729
842
  p.terminate()
730
843
  except psutil.NoSuchProcess:
731
- # Process may have terminated on its own in the meantime
732
844
  pass
733
845
 
734
846
  _, alive = psutil.wait_procs(children, timeout=timeout)
735
847
  for p in alive:
736
- # If processes are still alive after timeout seconds, send more
737
- # aggressive signal.
738
848
  p.kill()
739
849
 
740
850
  TERMINATE_TIMEOUT = 5
@@ -750,37 +860,35 @@ class TaskSchema(NamedSchema):
750
860
  terminate_process(proc.pid, timeout=TERMINATE_TIMEOUT)
751
861
 
752
862
  def run_task(self, workdir, quiet, loglevel, breakpoint, nice, timeout):
753
- '''
754
- Run the task.
863
+ """
864
+ Executes the task's main process.
755
865
 
756
- Raises:
757
- :class:`TaskError`: raised if the task failed to complete and
758
- should not be considered complete.
759
- :class:`TaskTimeout`: raised if the task reaches a timeout
866
+ This method handles the full lifecycle of running the tool, including
867
+ setting up the work directory, writing manifests, redirecting I/O,
868
+ monitoring for timeouts, and recording metrics.
760
869
 
761
870
  Args:
762
- workdir (path): path to the run work directory
763
- quiet (bool): if True, execution output is suppressed
764
- loglevel (str): logging level
765
- breakpoint (bool): if True, will attempt to execute with a breakpoint
766
- nice (int): POSIX nice level to use in execution
767
- timeout (int): timeout to use for execution
871
+ workdir (str): The path to the node's working directory.
872
+ quiet (bool): If True, suppresses execution output.
873
+ loglevel (str): The logging level.
874
+ breakpoint (bool): If True, attempts to run with a breakpoint.
875
+ nice (int): The POSIX nice level for the process.
876
+ timeout (int): The execution timeout in seconds.
768
877
 
769
878
  Returns:
770
- return code from the execution
771
- '''
879
+ int: The return code from the execution.
880
+ """
772
881
 
773
- # TODO: Currently no memory usage tracking in breakpoints, builtins, or unexpected errors.
774
882
  max_mem_bytes = 0
775
883
  cpu_start = time.time()
776
884
 
777
- # Ensure directories are setup
885
+ # Ensure directories are set up
778
886
  self.setup_work_directory(workdir, remove_exist=False)
779
887
 
780
- # Write task manifest
888
+ # Write task-specific manifest
781
889
  self.write_task_manifest(workdir)
782
890
 
783
- # Get file IO
891
+ # Get file I/O destinations
784
892
  stdout_file, is_stdout_log = self.__get_io_file("stdout")
785
893
  stderr_file, is_stderr_log = self.__get_io_file("stderr")
786
894
 
@@ -791,6 +899,7 @@ class TaskSchema(NamedSchema):
791
899
  stderr_print = self.__logger.error
792
900
 
793
901
  def read_stdio(stdout_reader, stderr_reader):
902
+ """Helper to read and print stdout/stderr streams."""
794
903
  if quiet:
795
904
  return
796
905
 
@@ -805,16 +914,16 @@ class TaskSchema(NamedSchema):
805
914
 
806
915
  retcode = 0
807
916
  if not exe:
808
- # No executable, so must call run()
917
+ # No executable defined, so call the Python `run()` method
809
918
  try:
810
919
  with open(stdout_file, 'w') as stdout_writer, \
811
- open(stderr_file, 'w') as stderr_writer:
920
+ open(stderr_file, 'w') as stderr_writer:
812
921
  if stderr_file == stdout_file:
813
922
  stderr_writer.close()
814
923
  stderr_writer = sys.stdout
815
924
 
816
925
  with contextlib.redirect_stderr(stderr_writer), \
817
- contextlib.redirect_stdout(stdout_writer):
926
+ contextlib.redirect_stdout(stdout_writer):
818
927
  retcode = self.run()
819
928
  except Exception as e:
820
929
  self.__logger.error(f'Failed in run() for {self.tool()}/{self.task()}: {e}')
@@ -822,22 +931,22 @@ class TaskSchema(NamedSchema):
822
931
  raise e
823
932
  finally:
824
933
  with sc_open(stdout_file) as stdout_reader, \
825
- sc_open(stderr_file) as stderr_reader:
934
+ sc_open(stderr_file) as stderr_reader:
826
935
  read_stdio(stdout_reader, stderr_reader)
827
936
 
828
937
  if resource:
829
938
  try:
830
- # Since memory collection is not possible, collect the current process
831
- # peak memory
939
+ # Collect peak memory usage of the current process
832
940
  max_mem_bytes = max(
833
941
  max_mem_bytes,
834
942
  1024 * resource.getrusage(resource.RUSAGE_SELF).ru_maxrss)
835
943
  except (OSError, ValueError, PermissionError):
836
944
  pass
837
945
  else:
946
+ # An executable is defined, run it as a subprocess
838
947
  cmdlist = self.get_runtime_arguments()
839
948
 
840
- # Make record of tool options
949
+ # Record tool options
841
950
  self.schema("record").record_tool(
842
951
  self.__step, self.__index,
843
952
  cmdlist, RecordTool.ARGS)
@@ -845,18 +954,10 @@ class TaskSchema(NamedSchema):
845
954
  self.__logger.info(shlex.join([os.path.basename(exe), *cmdlist]))
846
955
 
847
956
  if not pty and breakpoint:
848
- # pty not available
849
957
  breakpoint = False
850
958
 
851
959
  if breakpoint and sys.platform in ('darwin', 'linux'):
852
- # When we break on a step, the tool often drops into a shell.
853
- # However, our usual subprocess scheme seems to break terminal
854
- # echo for some tools. On POSIX-compatible systems, we can use
855
- # pty to connect the tool to our terminal instead. This code
856
- # doesn't handle quiet/timeout logic, since we don't want either
857
- # of these features for an interactive session. Logic for
858
- # forwarding to file based on
859
- # https://docs.python.org/3/library/pty.html#example.
960
+ # Use pty for interactive breakpoint sessions on POSIX systems
860
961
  with open(f"{self.__step}.log", 'wb') as log_writer:
861
962
  def read(fd):
862
963
  data = os.read(fd, 1024)
@@ -864,12 +965,11 @@ class TaskSchema(NamedSchema):
864
965
  return data
865
966
  retcode = pty.spawn([exe, *cmdlist], read)
866
967
  else:
968
+ # Standard subprocess execution
867
969
  with open(stdout_file, 'w') as stdout_writer, \
868
- open(stdout_file, 'r', errors='replace') as stdout_reader, \
869
- open(stderr_file, 'w') as stderr_writer, \
870
- open(stderr_file, 'r', errors='replace') as stderr_reader:
871
- # if STDOUT and STDERR are to be redirected to the same file,
872
- # use a single writer
970
+ open(stdout_file, 'r', errors='replace') as stdout_reader, \
971
+ open(stderr_file, 'w') as stderr_writer, \
972
+ open(stderr_file, 'r', errors='replace') as stderr_reader:
873
973
  if stderr_file == stdout_file:
874
974
  stderr_writer.close()
875
975
  stderr_reader.close()
@@ -891,13 +991,11 @@ class TaskSchema(NamedSchema):
891
991
  except Exception as e:
892
992
  raise TaskError(f"Unable to start {exe}: {str(e)}")
893
993
 
894
- # How long to wait for proc to quit on ctrl-c before force
895
- # terminating.
896
994
  POLL_INTERVAL = 0.1
897
995
  MEMORY_WARN_LIMIT = 90
898
996
  try:
899
997
  while proc.poll() is None:
900
- # Gather subprocess memory usage.
998
+ # Monitor subprocess memory usage
901
999
  try:
902
1000
  pproc = psutil.Process(proc.pid)
903
1001
  proc_mem_bytes = pproc.memory_full_info().uss
@@ -910,8 +1008,6 @@ class TaskSchema(NamedSchema):
910
1008
  self.__logger.warning(
911
1009
  'Current system memory usage is '
912
1010
  f'{memory_usage.percent:.1f}%')
913
-
914
- # increase limit warning
915
1011
  MEMORY_WARN_LIMIT = int(memory_usage.percent + 1)
916
1012
  except psutil.Error:
917
1013
  # Process may have already terminated or been killed.
@@ -922,9 +1018,9 @@ class TaskSchema(NamedSchema):
922
1018
  # be collected
923
1019
  pass
924
1020
 
925
- # Loop until process terminates
926
1021
  read_stdio(stdout_reader, stderr_reader)
927
1022
 
1023
+ # Check for timeout
928
1024
  duration = time.time() - cpu_start
929
1025
  if timeout is not None and duration > timeout:
930
1026
  raise TaskTimeout(timeout=duration)
@@ -939,17 +1035,16 @@ class TaskSchema(NamedSchema):
939
1035
  self.__terminate_exe(proc)
940
1036
  raise e from None
941
1037
 
942
- # Read the remaining io
1038
+ # Read any remaining I/O
943
1039
  read_stdio(stdout_reader, stderr_reader)
944
1040
 
945
1041
  retcode = proc.returncode
946
1042
 
947
- # Record record information
1043
+ # Record metrics
948
1044
  self.schema("record").record_tool(
949
1045
  self.__step, self.__index,
950
1046
  retcode, RecordTool.EXITCODE)
951
1047
 
952
- # Capture runtime metrics
953
1048
  self.schema("metric").record(
954
1049
  self.__step, self.__index,
955
1050
  'exetime', time.time() - cpu_start, unit='s')
@@ -960,45 +1055,40 @@ class TaskSchema(NamedSchema):
960
1055
  return retcode
961
1056
 
962
1057
  def __getstate__(self):
1058
+ """Custom state for pickling, removing runtime info."""
963
1059
  state = self.__dict__.copy()
964
-
965
- # Remove runtime information
966
1060
  for key in list(state.keys()):
967
1061
  if key.startswith("_TaskSchema__"):
968
1062
  del state[key]
969
-
970
1063
  return state
971
1064
 
972
1065
  def __setstate__(self, state):
1066
+ """Custom state for unpickling, re-initializing runtime info."""
973
1067
  self.__dict__ = state
974
-
975
- # Reinit runtime information
976
1068
  self.__set_runtime(None)
977
1069
 
978
1070
  def get_output_files(self):
1071
+ """Gets the set of output files defined for this task."""
979
1072
  return set(self.get("output"))
980
1073
 
981
1074
  def get_files_from_input_nodes(self):
982
1075
  """
983
- Returns a dictionary of files with the node they originated from
1076
+ Returns a dictionary of files from input nodes, mapped to the node
1077
+ they originated from.
984
1078
  """
985
-
986
1079
  nodes = self.schema("runtimeflow").get_nodes()
987
-
988
1080
  inputs = {}
989
- for in_step, in_index in self.schema("flow").get(*self.node(), 'input'):
1081
+ for in_step, in_index in self.schema("flow").get(self.step, self.index, 'input'):
990
1082
  if (in_step, in_index) not in nodes:
991
- # node has been pruned so will not provide anything
992
1083
  continue
993
1084
 
994
1085
  in_tool = self.schema("flow").get(in_step, in_index, "tool")
995
1086
  in_task = self.schema("flow").get(in_step, in_index, "task")
996
-
997
1087
  task_obj = self.schema().get("tool", in_tool, "task", in_task, field="schema")
998
1088
 
999
1089
  if self.schema("record").get('status', step=in_step, index=in_index) == \
1000
1090
  NodeStatus.SKIPPED:
1001
- with task_obj.runtime(self.__chip, step=in_step, index=in_index) as task:
1091
+ with task_obj.runtime(self.__node.switch_node(in_step, in_index)) as task:
1002
1092
  for file, nodes in task.get_files_from_input_nodes().items():
1003
1093
  inputs.setdefault(file, []).extend(nodes)
1004
1094
  continue
@@ -1010,39 +1100,35 @@ class TaskSchema(NamedSchema):
1010
1100
 
1011
1101
  def compute_input_file_node_name(self, filename, step, index):
1012
1102
  """
1013
- Generate a unique name for in input file based on the originating node.
1103
+ Generates a unique name for an input file based on its originating node.
1014
1104
 
1015
1105
  Args:
1016
- filename (str): name of inputfile
1017
- step (str): Step name
1018
- index (str): Index name
1106
+ filename (str): The original name of the input file.
1107
+ step (str): The step name of the originating node.
1108
+ index (str): The index of the originating node.
1019
1109
  """
1020
-
1021
1110
  _, file_type = os.path.splitext(filename)
1022
-
1023
1111
  if file_type:
1024
1112
  base = filename
1025
1113
  total_ext = []
1026
1114
  while file_type:
1027
1115
  base, file_type = os.path.splitext(base)
1028
1116
  total_ext.append(file_type)
1029
-
1030
1117
  total_ext.reverse()
1031
-
1032
1118
  return f'{base}.{step}{index}{"".join(total_ext)}'
1033
1119
  else:
1034
1120
  return f'{filename}.{step}{index}'
1035
1121
 
1036
1122
  def add_parameter(self, name, type, help, defvalue=None, **kwargs):
1037
- '''
1038
- Adds a parameter to the task definition.
1123
+ """
1124
+ Adds a custom parameter ('var') to the task definition.
1039
1125
 
1040
1126
  Args:
1041
- name (str): name of parameter
1042
- type (str): schema type of the parameter
1043
- help (str): help string for this parameter
1044
- defvalue (any): default value for the parameter
1045
- '''
1127
+ name (str): The name of the parameter.
1128
+ type (str): The schema type of the parameter.
1129
+ help (str): The help string for the parameter.
1130
+ defvalue: The default value for the parameter.
1131
+ """
1046
1132
  help = trim(help)
1047
1133
  param = Parameter(
1048
1134
  type,
@@ -1053,21 +1139,23 @@ class TaskSchema(NamedSchema):
1053
1139
  shorthelp=help,
1054
1140
  help=help
1055
1141
  )
1056
-
1057
1142
  EditableSchema(self).insert("var", name, param)
1058
-
1059
1143
  return param
1060
1144
 
1061
- def add_required_tool_key(self, *key: str):
1145
+ ###############################################################
1146
+ # Task settings
1147
+ ###############################################################
1148
+ def add_required_tool_key(self, *key: str, step: str = None, index: str = None):
1062
1149
  '''
1063
1150
  Adds a required tool keypath to the task driver.
1064
1151
 
1065
1152
  Args:
1066
1153
  key (list of str): required key path
1067
1154
  '''
1068
- return self.add_required_key(self, *key)
1155
+ return self.add_required_key(self, *key, step=step, index=index)
1069
1156
 
1070
- def add_required_key(self, obj: Union[BaseSchema, str], *key: str):
1157
+ def add_required_key(self, obj: Union[BaseSchema, str], *key: str,
1158
+ step: str = None, index: str = None):
1071
1159
  '''
1072
1160
  Adds a required keypath to the task driver.
1073
1161
 
@@ -1086,9 +1174,11 @@ class TaskSchema(NamedSchema):
1086
1174
  if any([not isinstance(k, str) for k in key]):
1087
1175
  raise ValueError("key can only contain strings")
1088
1176
 
1089
- return self.add("require", ",".join(key))
1177
+ return self.add("require", ",".join(key), step=step, index=index)
1090
1178
 
1091
- def set_threads(self, max_threads: int = None, clobber: bool = False):
1179
+ def set_threads(self, max_threads: int = None,
1180
+ step: str = None, index: str = None,
1181
+ clobber: bool = False):
1092
1182
  """
1093
1183
  Sets the requested thread count for the task
1094
1184
 
@@ -1101,15 +1191,17 @@ class TaskSchema(NamedSchema):
1101
1191
  if max_threads is None or max_threads <= 0:
1102
1192
  max_threads = utils.get_cores(None)
1103
1193
 
1104
- return self.set("threads", max_threads, clobber=clobber)
1194
+ return self.set("threads", max_threads, step=step, index=index, clobber=clobber)
1105
1195
 
1106
- def get_threads(self) -> int:
1196
+ def get_threads(self, step: str = None, index: str = None) -> int:
1107
1197
  """
1108
1198
  Returns the number of threads requested.
1109
1199
  """
1110
- return self.get("threads")
1200
+ return self.get("threads", step=step, index=index)
1111
1201
 
1112
- def add_commandline_option(self, option: Union[List[str], str], clobber: bool = False):
1202
+ def add_commandline_option(self, option: Union[List[str], str],
1203
+ step: str = None, index: str = None,
1204
+ clobber: bool = False):
1113
1205
  """
1114
1206
  Add to the command line options for the task
1115
1207
 
@@ -1119,17 +1211,19 @@ class TaskSchema(NamedSchema):
1119
1211
  """
1120
1212
 
1121
1213
  if clobber:
1122
- return self.set("option", option)
1214
+ return self.set("option", option, step=step, index=index)
1123
1215
  else:
1124
- return self.add("option", option)
1216
+ return self.add("option", option, step=step, index=index)
1125
1217
 
1126
- def get_commandline_options(self) -> List[str]:
1218
+ def get_commandline_options(self, step: str = None, index: str = None) -> List[str]:
1127
1219
  """
1128
1220
  Returns the command line options specified
1129
1221
  """
1130
- return self.get("option")
1222
+ return self.get("option", step=step, index=index)
1131
1223
 
1132
- def add_input_file(self, file: str = None, ext: str = None, clobber: bool = False):
1224
+ def add_input_file(self, file: str = None, ext: str = None,
1225
+ step: str = None, index: str = None,
1226
+ clobber: bool = False):
1133
1227
  """
1134
1228
  Add a required input file from the previous step in the flow.
1135
1229
  file and ext are mutually exclusive.
@@ -1143,14 +1237,16 @@ class TaskSchema(NamedSchema):
1143
1237
  raise ValueError("only file or ext can be specified")
1144
1238
 
1145
1239
  if ext:
1146
- file = f"{self.design_topmodule()}.{ext}"
1240
+ file = f"{self.design_topmodule}.{ext}"
1147
1241
 
1148
1242
  if clobber:
1149
- return self.set("input", file)
1243
+ return self.set("input", file, step=step, index=index)
1150
1244
  else:
1151
- return self.add("input", file)
1245
+ return self.add("input", file, step=step, index=index)
1152
1246
 
1153
- def add_output_file(self, file: str = None, ext: str = None, clobber: bool = False):
1247
+ def add_output_file(self, file: str = None, ext: str = None,
1248
+ step: str = None, index: str = None,
1249
+ clobber: bool = False):
1154
1250
  """
1155
1251
  Add an output file that this task will produce
1156
1252
  file and ext are mutually exclusive.
@@ -1164,14 +1260,145 @@ class TaskSchema(NamedSchema):
1164
1260
  raise ValueError("only file or ext can be specified")
1165
1261
 
1166
1262
  if ext:
1167
- file = f"{self.design_topmodule()}.{ext}"
1263
+ file = f"{self.design_topmodule}.{ext}"
1264
+
1265
+ if clobber:
1266
+ return self.set("output", file, step=step, index=index)
1267
+ else:
1268
+ return self.add("output", file, step=step, index=index)
1269
+
1270
+ def set_environmentalvariable(self, name: str, value: str,
1271
+ step: str = None, index: str = None,
1272
+ clobber: bool = False):
1273
+ return self.set("env", name, value, step=step, index=index, clobber=clobber)
1274
+
1275
+ def add_prescript(self, script: str, dataroot: str = None,
1276
+ step: str = None, index: str = None,
1277
+ clobber: bool = False):
1278
+ if not dataroot:
1279
+ dataroot = self._get_active("package")
1280
+ with self._active(package=dataroot):
1281
+ if clobber:
1282
+ return self.set("prescript", script, step=step, index=index)
1283
+ else:
1284
+ return self.add("prescript", script, step=step, index=index)
1285
+
1286
+ def add_postscript(self, script: str, dataroot: str = None,
1287
+ step: str = None, index: str = None,
1288
+ clobber: bool = False):
1289
+ if not dataroot:
1290
+ dataroot = self._get_active("package")
1291
+ with self._active(package=dataroot):
1292
+ if clobber:
1293
+ return self.set("postscript", script, step=step, index=index)
1294
+ else:
1295
+ return self.add("postscript", script, step=step, index=index)
1296
+
1297
+ def has_prescript(self, step: str = None, index: str = None) -> bool:
1298
+ if self.get("prescript", step=step, index=index):
1299
+ return True
1300
+ return False
1301
+
1302
+ def has_postscript(self, step: str = None, index: str = None) -> bool:
1303
+ if self.get("postscript", step=step, index=index):
1304
+ return True
1305
+ return False
1168
1306
 
1307
+ def set_refdir(self, dir: str, dataroot: str = None,
1308
+ step: str = None, index: str = None,
1309
+ clobber: bool = False):
1310
+ if not dataroot:
1311
+ dataroot = self._get_active("package")
1312
+ with self._active(package=dataroot):
1313
+ return self.set("refdir", dir, step=step, index=index, clobber=clobber)
1314
+
1315
+ def set_script(self, script: str, dataroot: str = None,
1316
+ step: str = None, index: str = None,
1317
+ clobber: bool = False):
1318
+ if not dataroot:
1319
+ dataroot = self._get_active("package")
1320
+ with self._active(package=dataroot):
1321
+ return self.set("script", script, step=step, index=index, clobber=clobber)
1322
+
1323
+ def add_regex(self, type: str, regex: str,
1324
+ step: str = None, index: str = None,
1325
+ clobber: bool = False):
1169
1326
  if clobber:
1170
- return self.set("output", file)
1327
+ return self.set("regex", type, regex, step=step, index=index)
1171
1328
  else:
1172
- return self.add("output", file)
1329
+ return self.add("regex", type, regex, step=step, index=index)
1330
+
1331
+ def set_logdestination(self, type: str, dest: str, suffix: str = None,
1332
+ step: str = None, index: str = None,
1333
+ clobber: bool = False):
1334
+ rets = []
1335
+ rets.append(self.set(type, "destination", dest, step=step, index=index, clobber=clobber))
1336
+ if suffix:
1337
+ rets.append(self.set(type, "suffix", suffix, step=step, index=index, clobber=clobber))
1338
+ return rets
1339
+
1340
+ def add_warningoff(self, type: str, step: str = None, index: str = None, clobber: bool = False):
1341
+ if clobber:
1342
+ return self.set("warningoff", type, step=step, index=index)
1343
+ else:
1344
+ return self.add("warningoff", type, step=step, index=index)
1345
+
1346
+ ###############################################################
1347
+ # Tool settings
1348
+ ###############################################################
1349
+ def set_exe(self, exe: str = None, vswitch: List[str] = None, format: str = None,
1350
+ step: str = None, index: str = None,
1351
+ clobber: bool = False):
1352
+ rets = []
1353
+ if exe:
1354
+ rets.append(self.schema("tool").set("exe", exe, clobber=clobber))
1355
+ if vswitch:
1356
+ switches = self.add_vswitch(vswitch, clobber=clobber)
1357
+ if not isinstance(switches, list):
1358
+ switches = list(switches)
1359
+ rets.extend(switches)
1360
+ if format:
1361
+ rets.append(self.schema("tool").set("format", format, clobber=clobber))
1362
+ return rets
1363
+
1364
+ def set_path(self, path: str, dataroot: str = None,
1365
+ step: str = None, index: str = None,
1366
+ clobber: bool = False):
1367
+ if not dataroot:
1368
+ dataroot = self.schema("tool")._get_active("package")
1369
+ with self.schema("tool")._active(package=dataroot):
1370
+ return self.schema("tool").set("path", path, step=step, index=index, clobber=clobber)
1371
+
1372
+ def add_version(self, version: str, step: str = None, index: str = None, clobber: bool = False):
1373
+ if clobber:
1374
+ return self.schema("tool").set("version", version, step=step, index=index)
1375
+ else:
1376
+ return self.schema("tool").add("version", version, step=step, index=index)
1377
+
1378
+ def add_vswitch(self, switch: str, clobber: bool = False):
1379
+ if clobber:
1380
+ return self.schema("tool").set("vswitch", switch)
1381
+ else:
1382
+ return self.schema("tool").add("vswitch", switch)
1383
+
1384
+ def add_licenseserver(self, name: str, server: str,
1385
+ step: str = None, index: str = None,
1386
+ clobber: bool = False):
1387
+ if clobber:
1388
+ return self.schema("tool").set("licenseserver", name, server, step=step, index=index)
1389
+ else:
1390
+ return self.schema("tool").add("licenseserver", name, server, step=step, index=index)
1391
+
1392
+ def add_sbom(self, version: str, sbom: str, dataroot: str = None, clobber: bool = False):
1393
+ if not dataroot:
1394
+ dataroot = self.schema("tool")._get_active("package")
1395
+ with self.schema("tool")._active(package=dataroot):
1396
+ if clobber:
1397
+ return self.schema("tool").set("sbom", version, sbom)
1398
+ else:
1399
+ return self.schema("tool").add("sbom", version, sbom)
1173
1400
 
1174
- def record_metric(self, metric, value, source_file=None, source_unit=None):
1401
+ def record_metric(self, metric, value, source_file=None, source_unit=None, quiet=False):
1175
1402
  '''
1176
1403
  Records a metric and associates the source file with it.
1177
1404
 
@@ -1180,6 +1407,7 @@ class TaskSchema(NamedSchema):
1180
1407
  value (float/int): value of the metric that is being recorded
1181
1408
  source (str): file the value came from
1182
1409
  source_unit (str): unit of the value, if not provided it is assumed to have no units
1410
+ quiet (bool): dont generate warning on missing metric
1183
1411
 
1184
1412
  Examples:
1185
1413
  >>> self.record_metric('cellarea', 500.0, 'reports/metrics.json', \\
@@ -1188,33 +1416,79 @@ class TaskSchema(NamedSchema):
1188
1416
  '''
1189
1417
 
1190
1418
  if metric not in self.schema("metric").getkeys():
1191
- self.logger().warning(f"{metric} is not a valid metric")
1419
+ if not quiet:
1420
+ self.logger.warning(f"{metric} is not a valid metric")
1192
1421
  return
1193
1422
 
1194
1423
  self.schema("metric").record(self.__step, self.__index, metric, value, unit=source_unit)
1195
1424
  if source_file:
1196
1425
  self.add("report", metric, source_file)
1197
1426
 
1198
- ###############################################################
1199
- def get(self, *keypath, field='value'):
1200
- return super().get(*keypath, field=field,
1201
- step=self.__step, index=self.__index)
1427
+ def get_fileset_file_keys(self, filetype: str) -> List[Tuple[NamedSchema, Tuple[str]]]:
1428
+ """
1429
+ Collect a set of keys for a particular filetype.
1430
+
1431
+ Args:
1432
+ filetype (str): Name of the filetype
1433
+
1434
+ Returns:
1435
+ list of (object, keypath)
1436
+ """
1437
+ if not isinstance(filetype, str):
1438
+ raise TypeError("filetype must be a string")
1202
1439
 
1203
- def set(self, *args, field='value', clobber=True):
1204
- return super().set(*args, field=field, clobber=clobber,
1205
- step=self.__step, index=self.__index)
1440
+ keys = []
1441
+ for obj, fileset in self.schema().get_filesets():
1442
+ key = ("fileset", fileset, "file", filetype)
1443
+ if obj.valid(*key, check_complete=True):
1444
+ keys.append((obj, key))
1445
+ return keys
1206
1446
 
1207
- def add(self, *args, field='value'):
1208
- return super().add(*args, field=field, step=self.__step, index=self.__index)
1447
+ ###############################################################
1448
+ # Schema
1449
+ ###############################################################
1450
+ def get(self, *keypath, field='value', step: str = None, index: str = None):
1451
+ if not step:
1452
+ step = self.__step
1453
+ if not index:
1454
+ index = self.__index
1455
+ return super().get(*keypath, field=field, step=step, index=index)
1456
+
1457
+ def set(self, *args, field='value', step: str = None, index: str = None, clobber=True):
1458
+ if not step:
1459
+ step = self.__step
1460
+ if not index:
1461
+ index = self.__index
1462
+ return super().set(*args, field=field, clobber=clobber, step=step, index=index)
1463
+
1464
+ def add(self, *args, field='value', step: str = None, index: str = None):
1465
+ if not step:
1466
+ step = self.__step
1467
+ if not index:
1468
+ index = self.__index
1469
+ return super().add(*args, field=field, step=step, index=index)
1470
+
1471
+ def unset(self, *args, step: str = None, index: str = None):
1472
+ if not step:
1473
+ step = self.__step
1474
+ if not index:
1475
+ index = self.__index
1476
+ return super().unset(*args, step=step, index=index)
1477
+
1478
+ def find_files(self, *keypath, missing_ok=False, step=None, index=None):
1479
+ if not step:
1480
+ step = self.__step
1481
+ if not index:
1482
+ index = self.__index
1483
+ return super().find_files(*keypath, missing_ok=missing_ok,
1484
+ step=step, index=index,
1485
+ collection_dir=self.__collection_path,
1486
+ cwd=self.__cwd)
1209
1487
 
1210
1488
  def _find_files_search_paths(self, keypath, step, index):
1211
1489
  paths = super()._find_files_search_paths(keypath, step, index)
1212
1490
  if keypath == "script":
1213
- paths.extend(self.find_files(
1214
- "refdir",
1215
- step=step, index=index,
1216
- cwd=self.__cwd,
1217
- collection_dir=self.__collection_path))
1491
+ paths.extend(self.find_files("refdir", step=step, index=index))
1218
1492
  elif keypath == "input":
1219
1493
  paths.append(os.path.join(self._parent(root=True).getworkdir(step=step, index=index),
1220
1494
  "inputs"))
@@ -1226,55 +1500,440 @@ class TaskSchema(NamedSchema):
1226
1500
  "outputs"))
1227
1501
  return paths
1228
1502
 
1503
+ ###############################################################
1504
+ # Task methods
1229
1505
  ###############################################################
1230
1506
  def parse_version(self, stdout):
1507
+ """
1508
+ Parses the tool's version from its stdout. Must be implemented by subclasses.
1509
+ """
1231
1510
  raise NotImplementedError("must be implemented by the implementation class")
1232
1511
 
1233
1512
  def normalize_version(self, version):
1513
+ """
1514
+ Normalizes a version string to a standard format. Can be overridden.
1515
+ """
1234
1516
  return version
1235
1517
 
1236
1518
  def setup(self):
1519
+ """
1520
+ A hook for setting up the task before execution. Can be overridden.
1521
+ """
1237
1522
  pass
1238
1523
 
1239
1524
  def select_input_nodes(self):
1525
+ """
1526
+ Determines which preceding nodes are inputs to this task.
1527
+ """
1240
1528
  return self.schema("runtimeflow").get_node_inputs(
1241
1529
  self.__step, self.__index, record=self.schema("record"))
1242
1530
 
1243
1531
  def pre_process(self):
1532
+ """
1533
+ A hook for pre-processing before the main tool execution. Can be overridden.
1534
+ """
1244
1535
  pass
1245
1536
 
1246
1537
  def runtime_options(self):
1538
+ """
1539
+ Constructs the default runtime options for the task. Can be extended.
1540
+ """
1247
1541
  cmdargs = []
1248
1542
  cmdargs.extend(self.get("option"))
1249
-
1250
- # Add scripts files / TODO:
1251
- scripts = self.find_files(
1252
- 'script',
1253
- step=self.__step, index=self.__index,
1254
- cwd=self.__cwd,
1255
- collection_dir=self.__collection_path,
1256
- missing_ok=True)
1257
-
1258
- cmdargs.extend(scripts)
1259
-
1543
+ script = self.find_files('script', missing_ok=True)
1544
+ if script:
1545
+ cmdargs.extend(script)
1260
1546
  return cmdargs
1261
1547
 
1262
1548
  def run(self):
1549
+ """
1550
+ The main execution logic for Python-based tasks. Must be implemented.
1551
+ """
1263
1552
  raise NotImplementedError("must be implemented by the implementation class")
1264
1553
 
1265
1554
  def post_process(self):
1555
+ """
1556
+ A hook for post-processing after the main tool execution. Can be overridden.
1557
+ """
1266
1558
  pass
1267
1559
 
1268
1560
 
1561
+ class ShowTaskSchema(TaskSchema):
1562
+ """
1563
+ A specialized TaskSchema for tasks that display files (e.g., in a GUI viewer).
1564
+
1565
+ This class provides a framework for dynamically finding and configuring
1566
+ viewer applications based on file types. It includes parameters for
1567
+ specifying the file to show and controlling the viewer's behavior.
1568
+ Subclasses should implement `get_supported_show_extentions` to declare
1569
+ which file types they can handle.
1570
+ """
1571
+ __TASKS_LOCK = threading.Lock()
1572
+ __TASKS = {}
1573
+
1574
+ def __init__(self):
1575
+ """Initializes a ShowTaskSchema, adding specific parameters for show tasks."""
1576
+ super().__init__()
1577
+ self.add_parameter("showfilepath", "file", "path to show")
1578
+ self.add_parameter("showfiletype", "str", "filetype to show")
1579
+ self.add_parameter("shownode", "(str,str,str)",
1580
+ "source node information, not always available")
1581
+ self.add_parameter("showexit", "bool", "exit after opening", defvalue=False)
1582
+
1583
+ @classmethod
1584
+ def __check_task(cls, task):
1585
+ """
1586
+ Private helper to validate if a task is a valid ShowTask or ScreenshotTask.
1587
+ """
1588
+ if cls is not ShowTaskSchema and cls is not ScreenshotTaskSchema:
1589
+ raise TypeError("class must be ShowTaskSchema or ScreenshotTaskSchema")
1590
+
1591
+ if task is None:
1592
+ return
1593
+
1594
+ if cls is ShowTaskSchema:
1595
+ check, task_filter = ShowTaskSchema, ScreenshotTaskSchema
1596
+ else:
1597
+ check, task_filter = ScreenshotTaskSchema, None
1598
+
1599
+ if not issubclass(task, check):
1600
+ return False
1601
+ if task_filter and issubclass(task, task_filter):
1602
+ return False
1603
+
1604
+ return True
1605
+
1606
+ @classmethod
1607
+ def register_task(cls, task):
1608
+ """
1609
+ Registers a new show task class for dynamic discovery.
1610
+
1611
+ Args:
1612
+ task: The show task class to register.
1613
+
1614
+ Raises:
1615
+ TypeError: If the task is not a valid subclass.
1616
+ """
1617
+ if not cls.__check_task(task):
1618
+ raise TypeError(f"task must be a subclass of {cls.__name__}")
1619
+
1620
+ with cls.__TASKS_LOCK:
1621
+ cls.__TASKS.setdefault(cls, set()).add(task)
1622
+
1623
+ @classmethod
1624
+ def __populate_tasks(cls):
1625
+ """
1626
+ Private helper to discover and populate all available show/screenshot tasks.
1627
+
1628
+ This method recursively finds all subclasses and also loads tasks from
1629
+ any installed plugins.
1630
+ """
1631
+ cls.__check_task(None)
1632
+
1633
+ def recurse(searchcls):
1634
+ subclss = set()
1635
+ if not cls.__check_task(searchcls):
1636
+ return subclss
1637
+
1638
+ subclss.add(searchcls)
1639
+ for subcls in searchcls.__subclasses__():
1640
+ subclss.update(recurse(subcls))
1641
+
1642
+ return subclss
1643
+
1644
+ classes = recurse(cls)
1645
+ # Support non-SC defined tasks from plugins
1646
+ for plugin in utils.get_plugins('showtask'): # TODO rename
1647
+ plugin()
1648
+
1649
+ if not classes:
1650
+ return
1651
+
1652
+ with ShowTaskSchema.__TASKS_LOCK:
1653
+ ShowTaskSchema.__TASKS.setdefault(cls, set()).update(classes)
1654
+
1655
+ @classmethod
1656
+ def get_task(cls, ext):
1657
+ """
1658
+ Retrieves a suitable show task instance for a given file extension.
1659
+
1660
+ Args:
1661
+ ext (str): The file extension to find a viewer for.
1662
+
1663
+ Returns:
1664
+ An instance of a compatible ShowTaskSchema subclass, or None if
1665
+ no suitable task is found.
1666
+ """
1667
+ cls.__check_task(None)
1668
+
1669
+ if cls not in ShowTaskSchema.__TASKS:
1670
+ cls.__populate_tasks()
1671
+
1672
+ with ShowTaskSchema.__TASKS_LOCK:
1673
+ if cls not in ShowTaskSchema.__TASKS:
1674
+ return None
1675
+ tasks = ShowTaskSchema.__TASKS[cls].copy()
1676
+
1677
+ # TODO: add user preference lookup (ext -> task)
1678
+
1679
+ if ext is None:
1680
+ return tasks
1681
+
1682
+ for task in tasks:
1683
+ try:
1684
+ if ext in task().get_supported_show_extentions():
1685
+ return task()
1686
+ except NotImplementedError:
1687
+ pass
1688
+
1689
+ return None
1690
+
1691
+ def task(self):
1692
+ """Returns the name of this task."""
1693
+ return "show"
1694
+
1695
+ def setup(self):
1696
+ """Sets up the parameters and requirements for the show task."""
1697
+ super().setup()
1698
+
1699
+ self._set_filetype()
1700
+
1701
+ self.add_required_tool_key("var", "showexit")
1702
+
1703
+ if self.get("var", "shownode"):
1704
+ self.add_required_tool_key("var", "shownode")
1705
+
1706
+ if self.get("var", "showfilepath"):
1707
+ self.add_required_tool_key("var", "showfilepath")
1708
+ elif self.get("var", "showfiletype"):
1709
+ self.add_required_tool_key("var", "showfiletype")
1710
+ else:
1711
+ raise ValueError("no file information provided to show")
1712
+
1713
+ def get_supported_show_extentions(self) -> List[str]:
1714
+ """
1715
+ Returns a list of file extensions supported by this show task.
1716
+ This method must be implemented by subclasses.
1717
+ """
1718
+ raise NotImplementedError(
1719
+ "get_supported_show_extentions must be implemented by the child class")
1720
+
1721
+ def _set_filetype(self):
1722
+ """
1723
+ Private helper to determine and set the 'showfiletype' parameter based
1724
+ on the provided 'showfilepath' or available input files.
1725
+ """
1726
+ def set_file(file, ext):
1727
+ if file.lower().endswith(".gz"):
1728
+ self.set("var", "showfiletype", f"{ext}.gz")
1729
+ else:
1730
+ self.set("var", "showfiletype", ext)
1731
+
1732
+ if not self.get("var", "showfilepath"):
1733
+ exts = self.preferred_show_extensions()
1734
+
1735
+ if not self.get("var", "showfiletype"):
1736
+ input_files = {utils.get_file_ext(f): f.lower()
1737
+ for f in self.get_files_from_input_nodes().keys()}
1738
+ for ext in exts:
1739
+ if ext in input_files:
1740
+ set_file(input_files[ext], ext)
1741
+ break
1742
+ self.set("var", "showfiletype", exts[-1], clobber=False)
1743
+ else:
1744
+ file = self.get("var", "showfilepath")
1745
+ ext = utils.get_file_ext(file)
1746
+ set_file(file, ext)
1747
+
1748
+ def set_showfilepath(self, path: str, step: str = None, index: str = None):
1749
+ """Sets the path to the file to be displayed."""
1750
+ return self.set("var", "showfilepath", path, step=step, index=index)
1751
+
1752
+ def set_showfiletype(self, file_type: str, step: str = None, index: str = None):
1753
+ """Sets the type of the file to be displayed."""
1754
+ return self.set("var", "showfiletype", file_type, step=step, index=index)
1755
+
1756
+ def set_showexit(self, value: bool, step: str = None, index: str = None):
1757
+ """Sets whether the viewer application should exit after opening the file."""
1758
+ return self.set("var", "showexit", value, step=step, index=index)
1759
+
1760
+ def set_shownode(self, jobname: str = None, nodestep: str = None, nodeindex: str = None,
1761
+ step: str = None, index: str = None):
1762
+ """Sets the source node information for the file being displayed."""
1763
+ return self.set("var", "shownode", (jobname, nodestep, nodeindex), step=step, index=index)
1764
+
1765
+ def get_tcl_variables(self, manifest=None):
1766
+ """
1767
+ Gets Tcl variables for the task, ensuring 'sc_do_screenshot' is false
1768
+ for regular show tasks.
1769
+ """
1770
+ vars = super().get_tcl_variables(manifest)
1771
+ vars["sc_do_screenshot"] = "false"
1772
+ return vars
1773
+
1774
+
1775
+ class ScreenshotTaskSchema(ShowTaskSchema):
1776
+ """
1777
+ A specialized TaskSchema for tasks that generate screenshots of files.
1778
+
1779
+ This class inherits from `ShowTaskSchema` and is specifically for tasks
1780
+ that need to open a file, generate an image, and then exit. It automatically
1781
+ sets the 'showexit' parameter to True.
1782
+ """
1783
+
1784
+ def task(self):
1785
+ """Returns the name of this task."""
1786
+ return "screenshot"
1787
+
1788
+ def setup(self):
1789
+ """
1790
+ Sets up the screenshot task, ensuring that the viewer will exit
1791
+ after the screenshot is taken.
1792
+ """
1793
+ super().setup()
1794
+ # Ensure the viewer exits after taking the screenshot
1795
+ self.set_showexit(True)
1796
+
1797
+ def get_tcl_variables(self, manifest=None):
1798
+ """
1799
+ Gets Tcl variables for the task, setting 'sc_do_screenshot' to true.
1800
+ """
1801
+ vars = super().get_tcl_variables(manifest)
1802
+ vars["sc_do_screenshot"] = "true"
1803
+ return vars
1804
+
1805
+
1806
+ class ASICTaskSchema(TaskSchema):
1807
+ """
1808
+ A TaskSchema with helper methods for tasks in a standard ASIC flow,
1809
+ providing easy access to PDK and standard cell library information.
1810
+ """
1811
+ @property
1812
+ def mainlib(self):
1813
+ """The main standard cell library schema object."""
1814
+ mainlib = self.schema().get("asic", "mainlib")
1815
+ if not mainlib:
1816
+ raise ValueError("mainlib has not been defined in [asic,mainlib]")
1817
+ if mainlib not in self.schema().getkeys("library"):
1818
+ raise LookupError(f"{mainlib} has not been loaded")
1819
+ return self.schema().get("library", mainlib, field="schema")
1820
+
1821
+ @property
1822
+ def pdk(self):
1823
+ """The Process Design Kit (PDK) schema object."""
1824
+ pdk = self.mainlib.get("asic", "pdk")
1825
+ if not pdk:
1826
+ raise ValueError("pdk has not been defined in "
1827
+ f"[{','.join([*self.mainlib._keypath, 'asic', 'pdk'])}]")
1828
+ if pdk not in self.schema().getkeys("library"):
1829
+ raise LookupError(f"{pdk} has not been loaded")
1830
+ return self.schema().get("library", pdk, field="schema")
1831
+
1832
+ def set_asic_var(self,
1833
+ key: str,
1834
+ defvalue=None,
1835
+ check_pdk: bool = True,
1836
+ require_pdk: bool = False,
1837
+ pdk_key: str = None,
1838
+ check_mainlib: bool = True,
1839
+ require_mainlib: bool = False,
1840
+ mainlib_key: str = None,
1841
+ require: bool = False):
1842
+ '''
1843
+ Set an ASIC parameter based on a prioritized lookup order.
1844
+
1845
+ This method attempts to set a parameter identified by `key` by checking
1846
+ values in a specific order:
1847
+ 1. The main library
1848
+ 2. The PDK
1849
+ 3. A provided default value (`defvalue`)
1850
+
1851
+ The first non-empty or non-None value found in this hierarchy will be
1852
+ used to set the parameter. If no value is found and `defvalue` is not
1853
+ provided, the parameter will not be set unless explicitly required.
1854
+
1855
+ Args:
1856
+ key: The string key for the parameter to be set. This key is used
1857
+ to identify the parameter within the current object (`self`)
1858
+ and, by default, within the main library and PDK.
1859
+ defvalue: An optional default value to use if the parameter is not
1860
+ found in the main library or PDK. If `None` and the parameter
1861
+ is not found, it will not be set unless `require` is True.
1862
+ check_pdk: If `True`, the method will attempt to retrieve the
1863
+ parameter from the PDK. Defaults to `True`.
1864
+ require_pdk: If `True`, the parameter *must* be defined in the PDK.
1865
+ An error will be raised if it's not found and `check_pdk` is `True`.
1866
+ Defaults to `False`.
1867
+ pdk_key: The specific key to use when looking up the parameter in the
1868
+ PDK. If `None`, `key` will be used.
1869
+ check_mainlib: If `True`, the method will attempt to retrieve the
1870
+ parameter from the main library. Defaults to `True`.
1871
+ require_mainlib: If `True`, the parameter *must* be defined in the
1872
+ main library. An error will be raised if it's not found and
1873
+ `check_mainlib` is `True`. Defaults to `False`.
1874
+ mainlib_key: The specific key to use when looking up the parameter in
1875
+ the main library. If `None`, `key` will be used.
1876
+ require: If `True`, the parameter *must* be set by this method (either
1877
+ from a source or `defvalue`). An error will be raised if it cannot
1878
+ be set. Defaults to `False`.
1879
+ '''
1880
+ check_keys = []
1881
+ if check_pdk:
1882
+ if not pdk_key:
1883
+ pdk_key = key
1884
+ if self.pdk.valid("tool", self.tool(), pdk_key):
1885
+ check_keys.append((self.pdk, ("tool", self.tool(), pdk_key)))
1886
+ if check_mainlib:
1887
+ if not mainlib_key:
1888
+ mainlib_key = key
1889
+ if self.mainlib.valid("tool", self.tool(), mainlib_key):
1890
+ check_keys.append((self.mainlib, ("tool", self.tool(), mainlib_key)))
1891
+ check_keys.append((self, ("var", key)))
1892
+
1893
+ if require_pdk:
1894
+ self.add_required_key(self.pdk, "tool", self.tool(), pdk_key)
1895
+ if require_mainlib:
1896
+ self.add_required_key(self.mainlib, "tool", self.tool(), mainlib_key)
1897
+ if require or defvalue is not None:
1898
+ self.add_required_key(self, "var", key)
1899
+
1900
+ if self.get("var", key, field=None).is_set(self.step, self.index):
1901
+ return
1902
+
1903
+ for obj, keypath in reversed(check_keys):
1904
+ if not obj.valid(*keypath):
1905
+ continue
1906
+
1907
+ value = obj.get(*keypath)
1908
+ if isinstance(value, (list, set, tuple)):
1909
+ if not value:
1910
+ continue
1911
+ else:
1912
+ if value is None:
1913
+ continue
1914
+ self.add_required_key(obj, *keypath)
1915
+ self.add_required_key(self, "var", key)
1916
+ return self.set("var", key, value)
1917
+ if defvalue is not None:
1918
+ return self.set("var", key, defvalue)
1919
+
1920
+
1269
1921
  class ToolSchema(NamedSchema):
1922
+ """
1923
+ A schema class that defines the parameters for a single tool, which can
1924
+ contain multiple tasks.
1925
+ """
1270
1926
  def __init__(self, name=None):
1271
1927
  super().__init__()
1272
1928
  self.set_name(name)
1273
-
1274
1929
  schema_tool(self)
1275
-
1276
1930
  schema = EditableSchema(self)
1277
- schema.insert("task", "default", TaskSchema(None))
1931
+ schema.insert("task", "default", TaskSchema())
1932
+
1933
+ @classmethod
1934
+ def _getdict_type(cls) -> str:
1935
+ """Returns the metadata for getdict."""
1936
+ return ToolSchema.__name__
1278
1937
 
1279
1938
 
1280
1939
  ###########################################################################
@@ -1289,6 +1948,14 @@ class ToolSchemaTmp(NamedSchema):
1289
1948
  schema = EditableSchema(self)
1290
1949
  schema.insert("task", "default", TaskSchemaTmp())
1291
1950
 
1951
+ @classmethod
1952
+ def _getdict_type(cls) -> str:
1953
+ """
1954
+ Returns the meta data for getdict
1955
+ """
1956
+
1957
+ return ToolSchemaTmp.__name__
1958
+
1292
1959
 
1293
1960
  class TaskSchemaTmp(TaskSchema):
1294
1961
  def __init__(self):
@@ -1302,28 +1969,26 @@ class TaskSchemaTmp(TaskSchema):
1302
1969
  return None
1303
1970
 
1304
1971
  def __tool_task_modules(self):
1305
- step, index = self.node()
1306
1972
  flow = self._TaskSchema__chip.get('option', 'flow')
1307
1973
  return \
1308
- self._TaskSchema__chip._get_tool_module(step, index, flow=flow), \
1309
- self._TaskSchema__chip._get_task_module(step, index, flow=flow)
1974
+ self._TaskSchema__chip._get_tool_module(self.step, self.index, flow=flow), \
1975
+ self._TaskSchema__chip._get_task_module(self.step, self.index, flow=flow)
1310
1976
 
1311
1977
  @contextlib.contextmanager
1312
1978
  def __in_step_index(self):
1313
1979
  prev_step, prev_index = self._TaskSchema__chip.get('arg', 'step'), \
1314
1980
  self._TaskSchema__chip.get('arg', 'index')
1315
- step, index = self.node()
1316
- self._TaskSchema__chip.set('arg', 'step', step)
1317
- self._TaskSchema__chip.set('arg', 'index', index)
1981
+ self._TaskSchema__chip.set('arg', 'step', self.step)
1982
+ self._TaskSchema__chip.set('arg', 'index', self.index)
1318
1983
  yield
1319
1984
  self._TaskSchema__chip.set('arg', 'step', prev_step)
1320
1985
  self._TaskSchema__chip.set('arg', 'index', prev_index)
1321
1986
 
1322
1987
  def tool(self):
1323
- return self.schema("flow").get(*self.node(), 'tool')
1988
+ return self.schema("flow").get(self.step, self.index, 'tool')
1324
1989
 
1325
1990
  def task(self):
1326
- return self.schema("flow").get(*self.node(), 'task')
1991
+ return self.schema("flow").get(self.step, self.index, 'task')
1327
1992
 
1328
1993
  def get_exe(self):
1329
1994
  if self.tool() == "execute" and self.task() == "exec_input":
@@ -1339,7 +2004,7 @@ class TaskSchemaTmp(TaskSchema):
1339
2004
  _, task = self.__tool_task_modules()
1340
2005
  method = self.__module_func("_gather_outputs", [task])
1341
2006
  if method:
1342
- return method(self._TaskSchema__chip, *self.node())
2007
+ return method(self._TaskSchema__chip, self.step, self.index)
1343
2008
  return TaskSchema.get_output_files(self)
1344
2009
 
1345
2010
  def parse_version(self, stdout):
@@ -1368,15 +2033,16 @@ class TaskSchemaTmp(TaskSchema):
1368
2033
  if method:
1369
2034
  with self.__in_step_index():
1370
2035
  ret = method(self._TaskSchema__chip)
1371
- return ret
1372
- return TaskSchema.setup(self)
2036
+ if ret:
2037
+ raise TaskSkip(ret)
2038
+ TaskSchema.setup(self)
1373
2039
 
1374
2040
  def select_input_nodes(self):
1375
2041
  _, task = self.__tool_task_modules()
1376
2042
  method = self.__module_func("_select_inputs", [task])
1377
2043
  if method:
1378
2044
  with self.__in_step_index():
1379
- ret = method(self._TaskSchema__chip, *self.node())
2045
+ ret = method(self._TaskSchema__chip, self.step, self.index)
1380
2046
  return ret
1381
2047
  return TaskSchema.select_input_nodes(self)
1382
2048
 
@@ -1386,8 +2052,9 @@ class TaskSchemaTmp(TaskSchema):
1386
2052
  if method:
1387
2053
  with self.__in_step_index():
1388
2054
  ret = method(self._TaskSchema__chip)
1389
- return ret
1390
- return TaskSchema.pre_process(self)
2055
+ if ret:
2056
+ raise TaskSkip(ret)
2057
+ TaskSchema.pre_process(self)
1391
2058
 
1392
2059
  def runtime_options(self):
1393
2060
  tool, task = self.__tool_task_modules()
@@ -1404,9 +2071,8 @@ class TaskSchemaTmp(TaskSchema):
1404
2071
  method = self.__module_func("run", [task])
1405
2072
  if method:
1406
2073
  # Handle logger stdout suppression if quiet
1407
- step, index = self.node()
1408
2074
  stdout_handler_level = self._TaskSchema__chip._logger_console.level
1409
- if self._TaskSchema__chip.get('option', 'quiet', step=step, index=index):
2075
+ if self._TaskSchema__chip.get('option', 'quiet', step=self.step, index=self.index):
1410
2076
  self._TaskSchema__chip._logger_console.setLevel(logging.CRITICAL)
1411
2077
 
1412
2078
  with self.__in_step_index():
@@ -1422,15 +2088,20 @@ class TaskSchemaTmp(TaskSchema):
1422
2088
  method = self.__module_func("post_process", [task])
1423
2089
  if method:
1424
2090
  with self.__in_step_index():
1425
- ret = method(self._TaskSchema__chip)
1426
- return ret
1427
- return TaskSchema.post_process(self)
2091
+ method(self._TaskSchema__chip)
2092
+ TaskSchema.post_process(self)
1428
2093
 
1429
2094
 
1430
2095
  ###########################################################################
1431
2096
  # Tool Setup
1432
2097
  ###########################################################################
1433
2098
  def schema_tool(schema):
2099
+ """
2100
+ Defines the standard parameters for a tool within the schema.
2101
+
2102
+ Args:
2103
+ schema (Schema): The schema object to add the parameters to.
2104
+ """
1434
2105
  schema = EditableSchema(schema)
1435
2106
 
1436
2107
  schema.insert(
@@ -1562,6 +2233,12 @@ def schema_tool(schema):
1562
2233
 
1563
2234
 
1564
2235
  def schema_task(schema):
2236
+ """
2237
+ Defines the standard parameters for a task within the schema.
2238
+
2239
+ Args:
2240
+ schema (Schema): The schema object to add the parameters to.
2241
+ """
1565
2242
  schema = EditableSchema(schema)
1566
2243
 
1567
2244
  schema.insert(