siliconcompiler 0.34.2__py3-none-any.whl → 0.34.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. siliconcompiler/__init__.py +12 -5
  2. siliconcompiler/__main__.py +1 -7
  3. siliconcompiler/_metadata.py +1 -1
  4. siliconcompiler/apps/_common.py +104 -23
  5. siliconcompiler/apps/sc.py +4 -8
  6. siliconcompiler/apps/sc_dashboard.py +6 -4
  7. siliconcompiler/apps/sc_install.py +10 -6
  8. siliconcompiler/apps/sc_issue.py +7 -5
  9. siliconcompiler/apps/sc_remote.py +1 -1
  10. siliconcompiler/apps/sc_server.py +9 -14
  11. siliconcompiler/apps/sc_show.py +6 -5
  12. siliconcompiler/apps/smake.py +130 -94
  13. siliconcompiler/apps/utils/replay.py +4 -7
  14. siliconcompiler/apps/utils/summarize.py +3 -5
  15. siliconcompiler/asic.py +420 -0
  16. siliconcompiler/checklist.py +25 -2
  17. siliconcompiler/cmdlineschema.py +534 -0
  18. siliconcompiler/constraints/asic_component.py +2 -2
  19. siliconcompiler/constraints/asic_pins.py +2 -2
  20. siliconcompiler/constraints/asic_timing.py +3 -3
  21. siliconcompiler/core.py +7 -32
  22. siliconcompiler/data/templates/tcl/manifest.tcl.j2 +8 -0
  23. siliconcompiler/dependencyschema.py +89 -31
  24. siliconcompiler/design.py +176 -207
  25. siliconcompiler/filesetschema.py +250 -0
  26. siliconcompiler/flowgraph.py +274 -95
  27. siliconcompiler/fpga.py +124 -1
  28. siliconcompiler/library.py +218 -20
  29. siliconcompiler/metric.py +233 -20
  30. siliconcompiler/package/__init__.py +271 -50
  31. siliconcompiler/package/git.py +92 -16
  32. siliconcompiler/package/github.py +108 -12
  33. siliconcompiler/package/https.py +79 -16
  34. siliconcompiler/packageschema.py +88 -7
  35. siliconcompiler/pathschema.py +31 -2
  36. siliconcompiler/pdk.py +566 -1
  37. siliconcompiler/project.py +1095 -94
  38. siliconcompiler/record.py +38 -1
  39. siliconcompiler/remote/__init__.py +5 -2
  40. siliconcompiler/remote/client.py +11 -6
  41. siliconcompiler/remote/schema.py +5 -23
  42. siliconcompiler/remote/server.py +41 -54
  43. siliconcompiler/report/__init__.py +3 -3
  44. siliconcompiler/report/dashboard/__init__.py +48 -14
  45. siliconcompiler/report/dashboard/cli/__init__.py +99 -21
  46. siliconcompiler/report/dashboard/cli/board.py +364 -179
  47. siliconcompiler/report/dashboard/web/__init__.py +90 -12
  48. siliconcompiler/report/dashboard/web/components/__init__.py +219 -240
  49. siliconcompiler/report/dashboard/web/components/flowgraph.py +49 -26
  50. siliconcompiler/report/dashboard/web/components/graph.py +139 -100
  51. siliconcompiler/report/dashboard/web/layouts/__init__.py +29 -1
  52. siliconcompiler/report/dashboard/web/layouts/_common.py +38 -2
  53. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph.py +39 -26
  54. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_node_tab.py +50 -50
  55. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_sac_tabs.py +49 -46
  56. siliconcompiler/report/dashboard/web/state.py +141 -14
  57. siliconcompiler/report/dashboard/web/utils/__init__.py +79 -16
  58. siliconcompiler/report/dashboard/web/utils/file_utils.py +74 -11
  59. siliconcompiler/report/dashboard/web/viewer.py +25 -1
  60. siliconcompiler/report/report.py +5 -2
  61. siliconcompiler/report/summary_image.py +29 -11
  62. siliconcompiler/scheduler/__init__.py +9 -1
  63. siliconcompiler/scheduler/docker.py +79 -1
  64. siliconcompiler/scheduler/run_node.py +35 -19
  65. siliconcompiler/scheduler/scheduler.py +208 -24
  66. siliconcompiler/scheduler/schedulernode.py +372 -46
  67. siliconcompiler/scheduler/send_messages.py +77 -29
  68. siliconcompiler/scheduler/slurm.py +76 -12
  69. siliconcompiler/scheduler/taskscheduler.py +140 -20
  70. siliconcompiler/schema/__init__.py +0 -2
  71. siliconcompiler/schema/baseschema.py +194 -38
  72. siliconcompiler/schema/journal.py +7 -4
  73. siliconcompiler/schema/namedschema.py +16 -10
  74. siliconcompiler/schema/parameter.py +55 -9
  75. siliconcompiler/schema/parametervalue.py +60 -0
  76. siliconcompiler/schema/safeschema.py +25 -2
  77. siliconcompiler/schema/schema_cfg.py +5 -5
  78. siliconcompiler/schema/utils.py +2 -2
  79. siliconcompiler/schema_obj.py +20 -3
  80. siliconcompiler/tool.py +979 -302
  81. siliconcompiler/tools/bambu/__init__.py +41 -0
  82. siliconcompiler/tools/builtin/concatenate.py +2 -2
  83. siliconcompiler/tools/builtin/minimum.py +2 -1
  84. siliconcompiler/tools/builtin/mux.py +2 -1
  85. siliconcompiler/tools/builtin/nop.py +2 -1
  86. siliconcompiler/tools/builtin/verify.py +2 -1
  87. siliconcompiler/tools/klayout/__init__.py +95 -0
  88. siliconcompiler/tools/openroad/__init__.py +289 -0
  89. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +3 -0
  90. siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +7 -2
  91. siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +8 -4
  92. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +9 -5
  93. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +5 -1
  94. siliconcompiler/tools/slang/__init__.py +1 -1
  95. siliconcompiler/tools/slang/elaborate.py +2 -1
  96. siliconcompiler/tools/vivado/scripts/sc_run.tcl +1 -1
  97. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +8 -1
  98. siliconcompiler/tools/vivado/syn_fpga.py +6 -0
  99. siliconcompiler/tools/vivado/vivado.py +35 -2
  100. siliconcompiler/tools/vpr/__init__.py +150 -0
  101. siliconcompiler/tools/yosys/__init__.py +369 -1
  102. siliconcompiler/tools/yosys/scripts/procs.tcl +0 -1
  103. siliconcompiler/toolscripts/_tools.json +5 -10
  104. siliconcompiler/utils/__init__.py +66 -0
  105. siliconcompiler/utils/flowgraph.py +2 -2
  106. siliconcompiler/utils/issue.py +2 -1
  107. siliconcompiler/utils/logging.py +14 -0
  108. siliconcompiler/utils/multiprocessing.py +256 -0
  109. siliconcompiler/utils/showtools.py +10 -0
  110. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/METADATA +5 -5
  111. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/RECORD +115 -118
  112. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/entry_points.txt +3 -0
  113. siliconcompiler/schema/cmdlineschema.py +0 -250
  114. siliconcompiler/toolscripts/rhel8/install-slang.sh +0 -40
  115. siliconcompiler/toolscripts/rhel9/install-slang.sh +0 -40
  116. siliconcompiler/toolscripts/ubuntu20/install-slang.sh +0 -47
  117. siliconcompiler/toolscripts/ubuntu22/install-slang.sh +0 -37
  118. siliconcompiler/toolscripts/ubuntu24/install-slang.sh +0 -37
  119. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/WHEEL +0 -0
  120. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/licenses/LICENSE +0 -0
  121. {siliconcompiler-0.34.2.dist-info → siliconcompiler-0.34.3.dist-info}/top_level.txt +0 -0
@@ -1,14 +1,18 @@
1
1
  import contextlib
2
+ import glob
2
3
  import logging
3
4
  import os
4
5
  import shutil
5
6
  import sys
7
+ import tarfile
6
8
  import time
7
9
 
8
10
  import os.path
9
11
 
10
12
  from logging.handlers import QueueHandler
11
13
 
14
+ from typing import List
15
+
12
16
  from siliconcompiler import utils, sc_open
13
17
  from siliconcompiler import Schema
14
18
  from siliconcompiler import NodeStatus
@@ -22,13 +26,40 @@ from siliconcompiler.scheduler import send_messages
22
26
 
23
27
 
24
28
  class SchedulerNode:
29
+ """
30
+ A class for managing and executing a single node in the compilation flow graph.
31
+
32
+ This class encapsulates the state and logic required to run a specific
33
+ step and index, including setting up directories, handling file I/O,
34
+ executing the associated tool, and recording results.
35
+
36
+ """
37
+
25
38
  def __init__(self, chip, step, index, replay=False):
39
+ """
40
+ Initializes a SchedulerNode.
41
+
42
+ Args:
43
+ chip (Chip): The parent Chip object containing the schema and settings.
44
+ step (str): The step name in the flowgraph this node represents.
45
+ index (str): The index for the step this node represents.
46
+ replay (bool): If True, sets up the node to replay a previous run.
47
+
48
+ Raises:
49
+ TypeError: If 'step' or 'index' are not non-empty strings.
50
+ """
51
+ if not isinstance(step, str) or step == "":
52
+ raise TypeError("step must be a string with a value")
53
+ if not isinstance(index, str) or index == "":
54
+ raise TypeError("index must be a string with a value")
55
+
26
56
  self.__step = step
27
57
  self.__index = index
28
58
  self.__chip = chip
29
59
 
30
60
  self.__name = self.__chip.design
31
61
  self.__topmodule = self.__chip.top(step=step, index=index)
62
+ self.__topmodule_global = self.__chip.top()
32
63
 
33
64
  self.__job = self.__chip.get('option', 'jobname')
34
65
  self.__record_user_info = self.__chip.get("option", "track",
@@ -48,6 +79,7 @@ class SchedulerNode:
48
79
  self.__is_entry_node = (self.__step, self.__index) in \
49
80
  self.__chip.get("flowgraph", flow, field="schema").get_entry_nodes()
50
81
 
82
+ self.__cwd = self.__chip.cwd
51
83
  self.__jobworkdir = self.__chip.getworkdir(jobname=self.__job)
52
84
  self.__workdir = self.__chip.getworkdir(jobname=self.__job,
53
85
  step=self.__step, index=self.__index)
@@ -60,102 +92,194 @@ class SchedulerNode:
60
92
  "exe": os.path.join(self.__workdir, f"{self.__step}.log")
61
93
  }
62
94
  self.__replay_script = os.path.join(self.__workdir, "replay.sh")
95
+ self.__collection_path = self.__chip._getcollectdir()
63
96
 
64
97
  self.set_queue(None, None)
65
98
  self.__setup_schema_access()
66
99
 
67
100
  @contextlib.contextmanager
68
101
  def runtime(self):
102
+ """
103
+ A context manager to temporarily switch the node's active task.
104
+
105
+ This is used to ensure that API calls within a specific context
106
+ are directed to the correct task's schema.
107
+ """
69
108
  prev_task = self.__task
70
- with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as runtask:
109
+ with self.__task.runtime(self) as runtask:
71
110
  self.__task = runtask
72
111
  yield
73
112
  self.__task = prev_task
74
113
 
75
114
  @staticmethod
76
115
  def init(chip):
116
+ """Static placeholder for future initialization logic."""
77
117
  pass
78
118
 
119
+ def switch_node(self, step: str, index: str) -> "SchedulerNode":
120
+ """
121
+ Creates a new SchedulerNode for a different step/index.
122
+
123
+ This allows for context switching to inspect or interact with other nodes
124
+ within the same chip context.
125
+
126
+ Args:
127
+ step (str): The step name of the new node.
128
+ index (str): The index of the new node.
129
+
130
+ Returns:
131
+ SchedulerNode: A new SchedulerNode instance for the specified step and index.
132
+ """
133
+ return SchedulerNode(self.__chip, step, index)
134
+
79
135
  @property
80
- def is_local(self):
136
+ def is_local(self) -> bool:
137
+ """bool: Returns True, indicating the node runs on the local machine."""
81
138
  return True
82
139
 
83
140
  @property
84
- def has_error(self):
141
+ def has_error(self) -> bool:
142
+ """bool: True if the node has encountered an error."""
85
143
  return self.__error
86
144
 
87
145
  def set_builtin(self):
146
+ """Flags this node as a 'builtin' node."""
88
147
  self.__builtin = True
89
148
 
90
149
  @property
91
- def is_builtin(self):
150
+ def is_builtin(self) -> bool:
151
+ """bool: True if this node is a 'builtin' node."""
92
152
  return self.__builtin
93
153
 
94
154
  @property
95
- def logger(self):
155
+ def logger(self) -> logging.Logger:
156
+ """logging.Logger: The logger instance for this node."""
96
157
  return self.__chip.logger
97
158
 
98
159
  @property
99
160
  def chip(self):
161
+ """Chip: The parent Chip object."""
100
162
  return self.__chip
101
163
 
102
164
  @property
103
- def step(self):
165
+ def project(self):
166
+ """Chip: The parent Chip object (alias for 'chip')."""
167
+ return self.chip
168
+
169
+ @property
170
+ def step(self) -> str:
171
+ """str: The step name of this node."""
104
172
  return self.__step
105
173
 
106
174
  @property
107
- def index(self):
175
+ def index(self) -> str:
176
+ """str: The index of this node."""
108
177
  return self.__index
109
178
 
110
179
  @property
111
- def name(self):
180
+ def name(self) -> str:
181
+ """str: The design name associated with this node."""
112
182
  return self.__name
113
183
 
114
184
  @property
115
- def topmodule(self):
185
+ def topmodule(self) -> str:
186
+ """str: The top module for this specific node."""
116
187
  return self.__topmodule
117
188
 
118
189
  @property
119
- def jobname(self):
190
+ def topmodule_global(self) -> str:
191
+ """str: The global top module for the entire flow."""
192
+ return self.__topmodule_global
193
+
194
+ @property
195
+ def jobname(self) -> str:
196
+ """str: The name of the current job."""
120
197
  return self.__job
121
198
 
122
199
  @property
123
- def workdir(self):
200
+ def project_cwd(self) -> str:
201
+ """str: The original current working directory where the process was launched."""
202
+ return self.__cwd
203
+
204
+ @property
205
+ def workdir(self) -> str:
206
+ """str: The working directory for this specific node (step/index)."""
124
207
  return self.__workdir
125
208
 
126
209
  @property
127
- def jobworkdir(self):
210
+ def jobworkdir(self) -> str:
211
+ """str: The top-level working directory for the job."""
128
212
  return self.__jobworkdir
129
213
 
130
214
  @property
131
- def is_replay(self):
215
+ def collection_dir(self) -> str:
216
+ """str: The directory for collected source files."""
217
+ return self.__collection_path
218
+
219
+ @property
220
+ def is_replay(self) -> bool:
221
+ """bool: True if this node is configured for a replay run."""
132
222
  return self.__replay
133
223
 
134
224
  @property
135
225
  def task(self):
226
+ """Task: The task object associated with this node."""
136
227
  return self.__task
137
228
 
138
229
  def get_manifest(self, input=False):
230
+ """
231
+ Gets the path to the input or output manifest file for this node.
232
+
233
+ Args:
234
+ input (bool): If True, returns the input manifest path. Otherwise,
235
+ returns the output manifest path.
236
+
237
+ Returns:
238
+ str: The absolute path to the manifest file.
239
+ """
139
240
  if input:
140
241
  return self.__manifests["input"]
141
242
  return self.__manifests["output"]
142
243
 
143
244
  def get_log(self, type="exe"):
245
+ """
246
+ Gets the path to a specific log file for this node.
247
+
248
+ Args:
249
+ type (str): The type of log file to retrieve ('exe' or 'sc').
250
+
251
+ Returns:
252
+ str: The absolute path to the log file.
253
+
254
+ Raises:
255
+ ValueError: If an unknown log type is requested.
256
+ """
144
257
  if type not in self.__logs:
145
258
  raise ValueError(f"{type} is not a log")
146
259
  return self.__logs[type]
147
260
 
148
261
  @property
149
262
  def replay_script(self):
263
+ """str: The path to the shell script for replaying this node's execution."""
150
264
  return self.__replay_script
151
265
 
152
266
  @property
153
267
  def threads(self):
154
- with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as task:
268
+ """int: The number of threads allocated for this node's task."""
269
+ with self.__task.runtime(self) as task:
155
270
  thread_count = task.get("threads")
156
271
  return thread_count
157
272
 
158
273
  def set_queue(self, pipe, queue):
274
+ """
275
+ Configures the multiprocessing queue and pipe for inter-process communication.
276
+
277
+ This is primarily used for logging from a child process back to the parent.
278
+
279
+ Args:
280
+ pipe: The pipe for sending data back to the parent process.
281
+ queue: The multiprocessing.Queue for handling log records.
282
+ """
159
283
  self.__pipe = pipe
160
284
  self.__queue = queue
161
285
 
@@ -163,6 +287,13 @@ class SchedulerNode:
163
287
  self.__setup_schema_access()
164
288
 
165
289
  def __setup_schema_access(self):
290
+ """
291
+ Private helper to set up direct access to schema objects.
292
+
293
+ This method initializes direct references to the schema objects for the
294
+ flow, task, records, and metrics associated with this node, optimizing
295
+ access to configuration and results.
296
+ """
166
297
  flow = self.__chip.get('option', 'flow')
167
298
  self.__flow = self.__chip.get("flowgraph", flow, field="schema")
168
299
 
@@ -173,11 +304,17 @@ class SchedulerNode:
173
304
  self.__metrics = self.__chip.get("metric", field="schema")
174
305
 
175
306
  def _init_run_logger(self):
307
+ """
308
+ Initializes and configures the logger for the node's execution.
309
+
310
+ This sets up the console formatter to include the step/index and redirects
311
+ log output to a queue if one is provided for multiprocessing.
312
+ """
176
313
  self.__chip._logger_console.setFormatter(
177
314
  get_console_formatter(self.__chip, True, self.__step, self.__index))
178
315
  self.logger.setLevel(
179
316
  schema_utils.translate_loglevel(self.__chip.get('option', 'loglevel',
180
- step=self.__step, index=self.__index)))
317
+ step=self.__step, index=self.__index)))
181
318
 
182
319
  if self.__queue:
183
320
  formatter = self.__chip._logger_console.formatter
@@ -187,6 +324,15 @@ class SchedulerNode:
187
324
  self.logger.addHandler(self.__chip._logger_console)
188
325
 
189
326
  def halt(self, msg=None):
327
+ """
328
+ Stops the node's execution due to an error.
329
+
330
+ This method logs an error message, sets the node's status to ERROR,
331
+ writes the final manifest, and exits the process.
332
+
333
+ Args:
334
+ msg (str, optional): An error message to log.
335
+ """
190
336
  if msg:
191
337
  self.logger.error(msg)
192
338
 
@@ -201,30 +347,55 @@ class SchedulerNode:
201
347
  sys.exit(1)
202
348
 
203
349
  def setup(self):
204
- with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as task:
350
+ """
351
+ Runs the setup() method for the node's assigned task.
352
+
353
+ This method prepares the task for execution. If the task's setup()
354
+ raises a TaskSkip exception, the node is marked as SKIPPED.
355
+
356
+ Returns:
357
+ bool: False if the node was skipped, True otherwise.
358
+
359
+ Raises:
360
+ Exception: Propagates any exception from the task's setup() method.
361
+ """
362
+ from siliconcompiler.tool import TaskSkip
363
+
364
+ with self.__task.runtime(self) as task:
205
365
  # Run node setup.
206
366
  self.logger.info(f'Setting up node {self.__step}/{self.__index} with '
207
367
  f'{task.tool()}/{task.task()}')
208
- setup_ret = None
209
368
  try:
210
- setup_ret = task.setup()
369
+ task.setup()
370
+ except TaskSkip as skip:
371
+ self.logger.warning(f'Removing {self.__step}/{self.__index} due to {skip.why}')
372
+ self.__record.set('status', NodeStatus.SKIPPED,
373
+ step=self.__step, index=self.__index)
374
+ return False
211
375
  except Exception as e:
212
376
  self.logger.error(f'Failed to run setup() for {self.__step}/{self.__index} '
213
377
  f'with {task.tool()}/{task.task()}')
214
378
  raise e
215
379
 
216
- if setup_ret is not None:
217
- self.logger.warning(f'Removing {self.__step}/{self.__index} due to {setup_ret}')
218
- self.__record.set('status', NodeStatus.SKIPPED,
219
- step=self.__step, index=self.__index)
220
-
221
- return False
222
-
223
380
  return True
224
381
 
225
382
  def check_previous_run_status(self, previous_run):
383
+ """
384
+ Checks if the previous run of this node completed successfully.
385
+
386
+ Compares tool/task names and status to determine if the prior result
387
+ is valid as a starting point for an incremental build.
388
+
389
+ Args:
390
+ previous_run (SchedulerNode): The node object from a previous run
391
+ loaded from a manifest.
392
+
393
+ Returns:
394
+ bool: True if the previous run was successful and compatible,
395
+ False otherwise.
396
+ """
226
397
  # Assume modified if flow does not match
227
- if self.__flow.name() != previous_run.__flow.name():
398
+ if self.__flow.name != previous_run.__flow.name:
228
399
  self.logger.debug("Flow name changed")
229
400
  return False
230
401
 
@@ -266,6 +437,16 @@ class SchedulerNode:
266
437
  return True
267
438
 
268
439
  def check_values_changed(self, previous_run, keys):
440
+ """
441
+ Checks if any specified schema parameter values have changed.
442
+
443
+ Args:
444
+ previous_run (SchedulerNode): The node object from a previous run.
445
+ keys (set of tuples): A set of keypaths to check for changes.
446
+
447
+ Returns:
448
+ bool: True if any value has changed, False otherwise.
449
+ """
269
450
  def print_warning(key):
270
451
  self.logger.warning(f'[{",".join(key)}] in {self.__step}/{self.__index} has been '
271
452
  'modified from previous run')
@@ -291,6 +472,19 @@ class SchedulerNode:
291
472
  return False
292
473
 
293
474
  def check_files_changed(self, previous_run, previous_time, keys):
475
+ """
476
+ Checks if any specified file-based parameters have changed.
477
+
478
+ This check can be based on file hashes (if enabled) or timestamps.
479
+
480
+ Args:
481
+ previous_run (SchedulerNode): The node object from a previous run.
482
+ previous_time (float): The timestamp of the previous run's manifest.
483
+ keys (set of tuples): A set of file/dir keypaths to check.
484
+
485
+ Returns:
486
+ bool: True if any file has changed, False otherwise.
487
+ """
294
488
  use_hash = self.__hash and previous_run.__hash
295
489
 
296
490
  def print_warning(key, reason):
@@ -339,6 +533,20 @@ class SchedulerNode:
339
533
  return False
340
534
 
341
535
  def get_check_changed_keys(self):
536
+ """
537
+ Gathers all schema keys that could trigger a re-run if changed.
538
+
539
+ This includes tool options, scripts, and required inputs specified
540
+ in the task's schema.
541
+
542
+ Returns:
543
+ tuple: A tuple containing two sets: (value_keys, path_keys).
544
+ `value_keys` are keys for simple values.
545
+ `path_keys` are keys for file/directory paths.
546
+
547
+ Raises:
548
+ KeyError: If a required keypath is not found in the schema.
549
+ """
342
550
  all_keys = set()
343
551
 
344
552
  all_keys.update(self.__task.get('require'))
@@ -365,6 +573,17 @@ class SchedulerNode:
365
573
  return value_keys, path_keys
366
574
 
367
575
  def requires_run(self):
576
+ """
577
+ Determines if the node needs to be re-run.
578
+
579
+ This method performs a series of checks against the results of a
580
+ previous run (if one exists). It checks for changes in run status,
581
+ configuration parameters, and input files to decide if the node's
582
+ task can be skipped.
583
+
584
+ Returns:
585
+ bool: True if a re-run is required, False otherwise.
586
+ """
368
587
  from siliconcompiler import Chip
369
588
 
370
589
  # Load previous manifest
@@ -429,6 +648,13 @@ class SchedulerNode:
429
648
  return False
430
649
 
431
650
  def setup_input_directory(self):
651
+ """
652
+ Prepares the 'inputs/' directory for the node's execution.
653
+
654
+ This method gathers output files from all preceding nodes in the
655
+ flowgraph and links or copies them into the current node's 'inputs/'
656
+ directory. It also handles file renaming as specified by the task.
657
+ """
432
658
  in_files = set(self.__task.get('input'))
433
659
 
434
660
  for in_step, in_index in self.__record.get('inputnode',
@@ -467,12 +693,16 @@ class SchedulerNode:
467
693
  f'{self.__workdir}/inputs/{new_name}')
468
694
 
469
695
  def validate(self):
470
- '''
471
- Runtime checks called from _runtask().
696
+ """
697
+ Performs pre-run validation checks.
472
698
 
473
- - Make sure expected inputs exist.
474
- - Make sure all required filepaths resolve correctly.
475
- '''
699
+ This method ensures that all expected input files exist in the 'inputs/'
700
+ directory and that all required schema parameters have been set and can
701
+ be resolved correctly before the task is executed.
702
+
703
+ Returns:
704
+ bool: True if validation passes, False otherwise.
705
+ """
476
706
  error = False
477
707
 
478
708
  required_inputs = self.__task.get('input')
@@ -499,8 +729,7 @@ class SchedulerNode:
499
729
  if param.get(field='pernode').is_never():
500
730
  check_step, check_index = None, None
501
731
 
502
- value = self.__chip.get(*keypath, step=check_step, index=check_index)
503
- if not value:
732
+ if not param.has_value(step=check_step, index=check_index):
504
733
  self.logger.error('No value set for required keypath '
505
734
  f'[{",".join(keypath)}].')
506
735
  error = True
@@ -512,7 +741,7 @@ class SchedulerNode:
512
741
  missing_ok=True,
513
742
  step=check_step, index=check_index)
514
743
 
515
- unresolved_paths = value
744
+ unresolved_paths = param.get(step=check_step, index=check_index)
516
745
  if not isinstance(abspath, list):
517
746
  abspath = [abspath]
518
747
  unresolved_paths = [unresolved_paths]
@@ -526,6 +755,7 @@ class SchedulerNode:
526
755
  return not error
527
756
 
528
757
  def summarize(self):
758
+ """Prints a post-run summary of metrics to the logger."""
529
759
  for metric in ['errors', 'warnings']:
530
760
  val = self.__metrics.get(metric, step=self.__step, index=self.__index)
531
761
  if val is not None:
@@ -535,17 +765,22 @@ class SchedulerNode:
535
765
  self.logger.info(f"Finished task in {walltime:.2f}s")
536
766
 
537
767
  def run(self):
538
- '''
539
- Private per node run method called by run().
540
-
541
- The method takes in a step string and index string to indicate what
542
- to run.
543
-
544
- Note that since _runtask occurs in its own process with a separate
545
- address space, any changes made to the `self` object will not
546
- be reflected in the parent. We rely on reading/writing the chip manifest
547
- to the filesystem to communicate updates between processes.
548
- '''
768
+ """
769
+ Executes the full lifecycle for this node.
770
+
771
+ This method orchestrates the entire process of running a node:
772
+ 1. Initializes logging and records metadata.
773
+ 2. Sets up the working directory.
774
+ 3. Determines and links inputs from previous nodes.
775
+ 4. Writes the pre-execution manifest.
776
+ 5. Validates that all inputs and parameters are ready.
777
+ 6. Calls `execute()` to run the tool.
778
+ 7. Stops journaling and returns to the original directory.
779
+
780
+ Note: Since this method may run in its own process with a separate
781
+ address space, any changes made to the schema are communicated through
782
+ reading/writing the chip manifest to the filesystem.
783
+ """
549
784
 
550
785
  # Setup logger
551
786
  self._init_run_logger()
@@ -619,10 +854,22 @@ class SchedulerNode:
619
854
  self.__pipe.send(Resolver.get_cache(self.__chip))
620
855
 
621
856
  def execute(self):
857
+ """
858
+ Handles the core tool execution logic.
859
+
860
+ This method runs the pre-processing, execution, and post-processing
861
+ steps for the node's task. It manages the tool's environment, checks
862
+ for return codes, and handles log file parsing and error reporting.
863
+ """
864
+ from siliconcompiler.tool import TaskSkip
865
+
622
866
  self.logger.info(f'Running in {self.__workdir}')
623
867
 
624
868
  try:
625
869
  self.__task.pre_process()
870
+ except TaskSkip as skip:
871
+ self.logger.warning(f'Removing {self.__step}/{self.__index} due to {skip.why}')
872
+ self.__record.set('status', NodeStatus.SKIPPED, step=self.__step, index=self.__index)
626
873
  except Exception as e:
627
874
  self.logger.error(
628
875
  f"Pre-processing failed for {self.__task.tool()}/{self.__task.task()}")
@@ -698,7 +945,7 @@ class SchedulerNode:
698
945
  loglines = logfd.read().splitlines()
699
946
  for logline in loglines[-self.__failed_log_lines:]:
700
947
  self.logger.error(logline)
701
- # No log file for pure-Python tools.
948
+ # No log file for pure-Python tools.
702
949
  msg += f' See log file {os.path.abspath(self.__logs["exe"])}'
703
950
  self.logger.warning(msg)
704
951
  self.__error = True
@@ -750,6 +997,12 @@ class SchedulerNode:
750
997
  send_messages.send(self.__chip, "end", self.__step, self.__index)
751
998
 
752
999
  def __generate_testcase(self):
1000
+ """
1001
+ Private helper to generate a test case upon failure.
1002
+
1003
+ This method packages the failing state (including manifests, inputs,
1004
+ and logs) into a compressed archive for easier debugging.
1005
+ """
753
1006
  from siliconcompiler.utils.issue import generate_testcase
754
1007
  import lambdapdk
755
1008
 
@@ -766,6 +1019,14 @@ class SchedulerNode:
766
1019
  verbose_collect=False)
767
1020
 
768
1021
  def check_logfile(self):
1022
+ """
1023
+ Parses the tool execution log file for patterns.
1024
+
1025
+ This method reads the tool's log file (e.g., 'synthesis.log') and
1026
+ uses regular expressions defined in the schema to find and count
1027
+ errors, warnings, and other specified metrics. The findings are
1028
+ recorded in the schema and printed to the console.
1029
+ """
769
1030
  if self.__record.get('status', step=self.__step, index=self.__index) == NodeStatus.SKIPPED:
770
1031
  return
771
1032
 
@@ -849,6 +1110,7 @@ class SchedulerNode:
849
1110
  self.__task.record_metric(metric, value, source_file=sources)
850
1111
 
851
1112
  def __hash_files_pre_execute(self):
1113
+ """Private helper to hash all relevant input files before execution."""
852
1114
  for task_key in ('refdir', 'prescript', 'postscript', 'script'):
853
1115
  self.__chip.hash_files('tool', self.__task.tool(), 'task', self.__task.task(), task_key,
854
1116
  step=self.__step, index=self.__index, check=False,
@@ -866,6 +1128,7 @@ class SchedulerNode:
866
1128
  check=False, allow_cache=True, verbose=False)
867
1129
 
868
1130
  def __hash_files_post_execute(self):
1131
+ """Private helper to hash all output files after execution."""
869
1132
  # hash all outputs
870
1133
  self.__chip.hash_files('tool', self.__task.tool(), 'task', self.__task.task(), 'output',
871
1134
  step=self.__step, index=self.__index, check=False, verbose=False)
@@ -884,6 +1147,12 @@ class SchedulerNode:
884
1147
  check=False, allow_cache=True, verbose=False)
885
1148
 
886
1149
  def __report_output_files(self):
1150
+ """
1151
+ Private helper to check for missing or unexpected output files.
1152
+
1153
+ Compares the files found in the 'outputs/' directory against the
1154
+ files expected by the task's schema. Reports errors if they don't match.
1155
+ """
887
1156
  if self.__task.tool() == 'builtin':
888
1157
  return
889
1158
 
@@ -920,6 +1189,16 @@ class SchedulerNode:
920
1189
  self.halt()
921
1190
 
922
1191
  def copy_from(self, source):
1192
+ """
1193
+ Imports the results of this node from a different job run.
1194
+
1195
+ This method copies the entire working directory of a node from a
1196
+ specified source job into the current job's working directory. It is
1197
+ used for resuming or branching from a previous run.
1198
+
1199
+ Args:
1200
+ source (str): The jobname of the source run to copy from.
1201
+ """
923
1202
  copy_from = self.__chip.getworkdir(jobname=source, step=self.__step, index=self.__index)
924
1203
 
925
1204
  if not os.path.exists(copy_from):
@@ -948,5 +1227,52 @@ class SchedulerNode:
948
1227
  schema.write_manifest(manifest)
949
1228
 
950
1229
  def clean_directory(self):
1230
+ """Removes the working directory for this node."""
951
1231
  if os.path.exists(self.__workdir):
952
1232
  shutil.rmtree(self.__workdir)
1233
+
1234
+ def archive(self, tar: tarfile.TarFile, include: List[str] = None, verbose: bool = None):
1235
+ """
1236
+ Archives the node's results into a tar file.
1237
+
1238
+ By default, it archives the 'reports' and 'outputs' directories and all
1239
+ log files. The `include` argument allows for custom file selection using
1240
+ glob patterns.
1241
+
1242
+ Args:
1243
+ tar (tarfile.TarFile): The tarfile object to add files to.
1244
+ include (List[str], optional): A list of glob patterns to specify
1245
+ which files to include in the archive. Defaults to None.
1246
+ verbose (bool, optional): If True, prints archiving status messages.
1247
+ Defaults to None.
1248
+ """
1249
+ if not tar:
1250
+ return
1251
+
1252
+ if verbose:
1253
+ self.logger.info(f'Archiving {self.step}/{self.index}...')
1254
+
1255
+ def arcname(path):
1256
+ return os.path.relpath(path, self.__cwd)
1257
+
1258
+ if not os.path.isdir(self.__workdir):
1259
+ if self.project.get('record', 'status', step=self.step, index=self.index) != \
1260
+ NodeStatus.SKIPPED:
1261
+ self.logger.error(f'Unable to archive {self.step}/{self.index} '
1262
+ 'due to missing node directory')
1263
+ return
1264
+
1265
+ if include:
1266
+ if isinstance(include, str):
1267
+ include = [include]
1268
+ for pattern in include:
1269
+ for path in glob.iglob(os.path.join(self.__workdir, pattern)):
1270
+ tar.add(path, arcname=arcname(path))
1271
+ else:
1272
+ for folder in ('reports', 'outputs'):
1273
+ path = os.path.join(self.__workdir, folder)
1274
+ tar.add(path, arcname=arcname(path))
1275
+
1276
+ for logfile in self.__logs.values():
1277
+ if os.path.isfile(logfile):
1278
+ tar.add(logfile, arcname=arcname(logfile))