siliconcompiler 0.34.1__py3-none-any.whl → 0.34.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (129) hide show
  1. siliconcompiler/__init__.py +23 -4
  2. siliconcompiler/__main__.py +1 -7
  3. siliconcompiler/_metadata.py +1 -1
  4. siliconcompiler/apps/_common.py +104 -23
  5. siliconcompiler/apps/sc.py +4 -8
  6. siliconcompiler/apps/sc_dashboard.py +6 -4
  7. siliconcompiler/apps/sc_install.py +10 -6
  8. siliconcompiler/apps/sc_issue.py +7 -5
  9. siliconcompiler/apps/sc_remote.py +1 -1
  10. siliconcompiler/apps/sc_server.py +9 -14
  11. siliconcompiler/apps/sc_show.py +7 -6
  12. siliconcompiler/apps/smake.py +130 -94
  13. siliconcompiler/apps/utils/replay.py +4 -7
  14. siliconcompiler/apps/utils/summarize.py +3 -5
  15. siliconcompiler/asic.py +420 -0
  16. siliconcompiler/checklist.py +25 -2
  17. siliconcompiler/cmdlineschema.py +534 -0
  18. siliconcompiler/constraints/__init__.py +17 -0
  19. siliconcompiler/constraints/asic_component.py +378 -0
  20. siliconcompiler/constraints/asic_floorplan.py +449 -0
  21. siliconcompiler/constraints/asic_pins.py +489 -0
  22. siliconcompiler/constraints/asic_timing.py +517 -0
  23. siliconcompiler/core.py +10 -35
  24. siliconcompiler/data/templates/tcl/manifest.tcl.j2 +8 -0
  25. siliconcompiler/dependencyschema.py +96 -202
  26. siliconcompiler/design.py +327 -241
  27. siliconcompiler/filesetschema.py +250 -0
  28. siliconcompiler/flowgraph.py +298 -106
  29. siliconcompiler/fpga.py +124 -1
  30. siliconcompiler/library.py +331 -0
  31. siliconcompiler/metric.py +327 -92
  32. siliconcompiler/metrics/__init__.py +7 -0
  33. siliconcompiler/metrics/asic.py +245 -0
  34. siliconcompiler/metrics/fpga.py +220 -0
  35. siliconcompiler/package/__init__.py +391 -67
  36. siliconcompiler/package/git.py +92 -16
  37. siliconcompiler/package/github.py +114 -22
  38. siliconcompiler/package/https.py +79 -16
  39. siliconcompiler/packageschema.py +341 -16
  40. siliconcompiler/pathschema.py +255 -0
  41. siliconcompiler/pdk.py +566 -1
  42. siliconcompiler/project.py +1460 -0
  43. siliconcompiler/record.py +38 -1
  44. siliconcompiler/remote/__init__.py +5 -2
  45. siliconcompiler/remote/client.py +11 -6
  46. siliconcompiler/remote/schema.py +5 -23
  47. siliconcompiler/remote/server.py +41 -54
  48. siliconcompiler/report/__init__.py +3 -3
  49. siliconcompiler/report/dashboard/__init__.py +48 -14
  50. siliconcompiler/report/dashboard/cli/__init__.py +99 -21
  51. siliconcompiler/report/dashboard/cli/board.py +364 -179
  52. siliconcompiler/report/dashboard/web/__init__.py +90 -12
  53. siliconcompiler/report/dashboard/web/components/__init__.py +219 -240
  54. siliconcompiler/report/dashboard/web/components/flowgraph.py +49 -26
  55. siliconcompiler/report/dashboard/web/components/graph.py +139 -100
  56. siliconcompiler/report/dashboard/web/layouts/__init__.py +29 -1
  57. siliconcompiler/report/dashboard/web/layouts/_common.py +38 -2
  58. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph.py +39 -26
  59. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_node_tab.py +50 -50
  60. siliconcompiler/report/dashboard/web/layouts/vertical_flowgraph_sac_tabs.py +49 -46
  61. siliconcompiler/report/dashboard/web/state.py +141 -14
  62. siliconcompiler/report/dashboard/web/utils/__init__.py +79 -16
  63. siliconcompiler/report/dashboard/web/utils/file_utils.py +74 -11
  64. siliconcompiler/report/dashboard/web/viewer.py +25 -1
  65. siliconcompiler/report/report.py +5 -2
  66. siliconcompiler/report/summary_image.py +29 -11
  67. siliconcompiler/scheduler/__init__.py +9 -1
  68. siliconcompiler/scheduler/docker.py +81 -4
  69. siliconcompiler/scheduler/run_node.py +37 -20
  70. siliconcompiler/scheduler/scheduler.py +211 -36
  71. siliconcompiler/scheduler/schedulernode.py +394 -60
  72. siliconcompiler/scheduler/send_messages.py +77 -29
  73. siliconcompiler/scheduler/slurm.py +76 -12
  74. siliconcompiler/scheduler/taskscheduler.py +142 -21
  75. siliconcompiler/schema/__init__.py +0 -4
  76. siliconcompiler/schema/baseschema.py +338 -59
  77. siliconcompiler/schema/editableschema.py +14 -6
  78. siliconcompiler/schema/journal.py +28 -17
  79. siliconcompiler/schema/namedschema.py +22 -14
  80. siliconcompiler/schema/parameter.py +89 -28
  81. siliconcompiler/schema/parametertype.py +2 -0
  82. siliconcompiler/schema/parametervalue.py +258 -15
  83. siliconcompiler/schema/safeschema.py +25 -2
  84. siliconcompiler/schema/schema_cfg.py +23 -19
  85. siliconcompiler/schema/utils.py +2 -2
  86. siliconcompiler/schema_obj.py +24 -5
  87. siliconcompiler/tool.py +1131 -265
  88. siliconcompiler/tools/bambu/__init__.py +41 -0
  89. siliconcompiler/tools/builtin/concatenate.py +2 -2
  90. siliconcompiler/tools/builtin/minimum.py +2 -1
  91. siliconcompiler/tools/builtin/mux.py +2 -1
  92. siliconcompiler/tools/builtin/nop.py +2 -1
  93. siliconcompiler/tools/builtin/verify.py +2 -1
  94. siliconcompiler/tools/klayout/__init__.py +95 -0
  95. siliconcompiler/tools/openroad/__init__.py +289 -0
  96. siliconcompiler/tools/openroad/scripts/apr/preamble.tcl +3 -0
  97. siliconcompiler/tools/openroad/scripts/apr/sc_detailed_route.tcl +7 -2
  98. siliconcompiler/tools/openroad/scripts/apr/sc_global_route.tcl +8 -4
  99. siliconcompiler/tools/openroad/scripts/apr/sc_init_floorplan.tcl +9 -5
  100. siliconcompiler/tools/openroad/scripts/common/write_images.tcl +5 -1
  101. siliconcompiler/tools/slang/__init__.py +1 -1
  102. siliconcompiler/tools/slang/elaborate.py +2 -1
  103. siliconcompiler/tools/vivado/scripts/sc_run.tcl +1 -1
  104. siliconcompiler/tools/vivado/scripts/sc_syn_fpga.tcl +8 -1
  105. siliconcompiler/tools/vivado/syn_fpga.py +6 -0
  106. siliconcompiler/tools/vivado/vivado.py +35 -2
  107. siliconcompiler/tools/vpr/__init__.py +150 -0
  108. siliconcompiler/tools/yosys/__init__.py +369 -1
  109. siliconcompiler/tools/yosys/scripts/procs.tcl +0 -1
  110. siliconcompiler/toolscripts/_tools.json +5 -10
  111. siliconcompiler/utils/__init__.py +66 -0
  112. siliconcompiler/utils/flowgraph.py +2 -2
  113. siliconcompiler/utils/issue.py +2 -1
  114. siliconcompiler/utils/logging.py +14 -0
  115. siliconcompiler/utils/multiprocessing.py +256 -0
  116. siliconcompiler/utils/showtools.py +10 -0
  117. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/METADATA +6 -6
  118. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/RECORD +122 -115
  119. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/entry_points.txt +3 -0
  120. siliconcompiler/schema/cmdlineschema.py +0 -250
  121. siliconcompiler/schema/packageschema.py +0 -101
  122. siliconcompiler/toolscripts/rhel8/install-slang.sh +0 -40
  123. siliconcompiler/toolscripts/rhel9/install-slang.sh +0 -40
  124. siliconcompiler/toolscripts/ubuntu20/install-slang.sh +0 -47
  125. siliconcompiler/toolscripts/ubuntu22/install-slang.sh +0 -37
  126. siliconcompiler/toolscripts/ubuntu24/install-slang.sh +0 -37
  127. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/WHEEL +0 -0
  128. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/licenses/LICENSE +0 -0
  129. {siliconcompiler-0.34.1.dist-info → siliconcompiler-0.34.3.dist-info}/top_level.txt +0 -0
@@ -1,34 +1,65 @@
1
1
  import contextlib
2
+ import glob
2
3
  import logging
3
4
  import os
4
5
  import shutil
5
6
  import sys
7
+ import tarfile
6
8
  import time
7
9
 
8
10
  import os.path
9
11
 
10
12
  from logging.handlers import QueueHandler
11
13
 
14
+ from typing import List
15
+
12
16
  from siliconcompiler import utils, sc_open
13
17
  from siliconcompiler import Schema
14
18
  from siliconcompiler import NodeStatus
15
19
  from siliconcompiler.utils.logging import get_console_formatter, SCInRunLoggerFormatter
16
20
  from siliconcompiler.schema import utils as schema_utils
17
21
 
18
- from siliconcompiler.tools._common import input_file_node_name, record_metric
19
-
22
+ from siliconcompiler.package import Resolver
20
23
  from siliconcompiler.record import RecordTime, RecordTool
21
24
  from siliconcompiler.schema import Journal
22
25
  from siliconcompiler.scheduler import send_messages
23
26
 
24
27
 
25
28
  class SchedulerNode:
29
+ """
30
+ A class for managing and executing a single node in the compilation flow graph.
31
+
32
+ This class encapsulates the state and logic required to run a specific
33
+ step and index, including setting up directories, handling file I/O,
34
+ executing the associated tool, and recording results.
35
+
36
+ """
37
+
26
38
  def __init__(self, chip, step, index, replay=False):
39
+ """
40
+ Initializes a SchedulerNode.
41
+
42
+ Args:
43
+ chip (Chip): The parent Chip object containing the schema and settings.
44
+ step (str): The step name in the flowgraph this node represents.
45
+ index (str): The index for the step this node represents.
46
+ replay (bool): If True, sets up the node to replay a previous run.
47
+
48
+ Raises:
49
+ TypeError: If 'step' or 'index' are not non-empty strings.
50
+ """
51
+ if not isinstance(step, str) or step == "":
52
+ raise TypeError("step must be a string with a value")
53
+ if not isinstance(index, str) or index == "":
54
+ raise TypeError("index must be a string with a value")
55
+
27
56
  self.__step = step
28
57
  self.__index = index
29
58
  self.__chip = chip
30
59
 
31
- self.__design = self.__chip.design
60
+ self.__name = self.__chip.design
61
+ self.__topmodule = self.__chip.top(step=step, index=index)
62
+ self.__topmodule_global = self.__chip.top()
32
63
 
33
64
  self.__job = self.__chip.get('option', 'jobname')
34
65
  self.__record_user_info = self.__chip.get("option", "track",
@@ -48,106 +79,207 @@ class SchedulerNode:
48
79
  self.__is_entry_node = (self.__step, self.__index) in \
49
80
  self.__chip.get("flowgraph", flow, field="schema").get_entry_nodes()
50
81
 
82
+ self.__cwd = self.__chip.cwd
51
83
  self.__jobworkdir = self.__chip.getworkdir(jobname=self.__job)
52
84
  self.__workdir = self.__chip.getworkdir(jobname=self.__job,
53
85
  step=self.__step, index=self.__index)
54
86
  self.__manifests = {
55
- "input": os.path.join(self.__workdir, "inputs", f"{self.__design}.pkg.json"),
56
- "output": os.path.join(self.__workdir, "outputs", f"{self.__design}.pkg.json")
87
+ "input": os.path.join(self.__workdir, "inputs", f"{self.__name}.pkg.json"),
88
+ "output": os.path.join(self.__workdir, "outputs", f"{self.__name}.pkg.json")
57
89
  }
58
90
  self.__logs = {
59
91
  "sc": os.path.join(self.__workdir, f"sc_{self.__step}_{self.__index}.log"),
60
92
  "exe": os.path.join(self.__workdir, f"{self.__step}.log")
61
93
  }
62
94
  self.__replay_script = os.path.join(self.__workdir, "replay.sh")
95
+ self.__collection_path = self.__chip._getcollectdir()
63
96
 
64
97
  self.set_queue(None, None)
65
98
  self.__setup_schema_access()
66
99
 
67
100
  @contextlib.contextmanager
68
101
  def runtime(self):
102
+ """
103
+ A context manager to temporarily switch the node's active task.
104
+
105
+ This is used to ensure that API calls within a specific context
106
+ are directed to the correct task's schema.
107
+ """
69
108
  prev_task = self.__task
70
- with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as runtask:
109
+ with self.__task.runtime(self) as runtask:
71
110
  self.__task = runtask
72
111
  yield
73
112
  self.__task = prev_task
74
113
 
75
114
  @staticmethod
76
115
  def init(chip):
116
+ """Static placeholder for future initialization logic."""
77
117
  pass
78
118
 
119
+ def switch_node(self, step: str, index: str) -> "SchedulerNode":
120
+ """
121
+ Creates a new SchedulerNode for a different step/index.
122
+
123
+ This allows for context switching to inspect or interact with other nodes
124
+ within the same chip context.
125
+
126
+ Args:
127
+ step (str): The step name of the new node.
128
+ index (str): The index of the new node.
129
+
130
+ Returns:
131
+ SchedulerNode: A new SchedulerNode instance for the specified step and index.
132
+ """
133
+ return SchedulerNode(self.__chip, step, index)
134
+
79
135
  @property
80
- def is_local(self):
136
+ def is_local(self) -> bool:
137
+ """bool: Returns True, indicating the node runs on the local machine."""
81
138
  return True
82
139
 
83
140
  @property
84
- def has_error(self):
141
+ def has_error(self) -> bool:
142
+ """bool: True if the node has encountered an error."""
85
143
  return self.__error
86
144
 
87
145
  def set_builtin(self):
146
+ """Flags this node as a 'builtin' node."""
88
147
  self.__builtin = True
89
148
 
90
149
  @property
91
- def is_builtin(self):
150
+ def is_builtin(self) -> bool:
151
+ """bool: True if this node is a 'builtin' node."""
92
152
  return self.__builtin
93
153
 
94
154
  @property
95
- def logger(self):
155
+ def logger(self) -> logging.Logger:
156
+ """logging.Logger: The logger instance for this node."""
96
157
  return self.__chip.logger
97
158
 
98
159
  @property
99
160
  def chip(self):
161
+ """Chip: The parent Chip object."""
100
162
  return self.__chip
101
163
 
102
164
  @property
103
- def step(self):
165
+ def project(self):
166
+ """Chip: The parent Chip object (alias for 'chip')."""
167
+ return self.chip
168
+
169
+ @property
170
+ def step(self) -> str:
171
+ """str: The step name of this node."""
104
172
  return self.__step
105
173
 
106
174
  @property
107
- def index(self):
175
+ def index(self) -> str:
176
+ """str: The index of this node."""
108
177
  return self.__index
109
178
 
110
179
  @property
111
- def design(self):
112
- return self.__design
180
+ def name(self) -> str:
181
+ """str: The design name associated with this node."""
182
+ return self.__name
183
+
184
+ @property
185
+ def topmodule(self) -> str:
186
+ """str: The top module for this specific node."""
187
+ return self.__topmodule
113
188
 
114
189
  @property
115
- def workdir(self):
190
+ def topmodule_global(self) -> str:
191
+ """str: The global top module for the entire flow."""
192
+ return self.__topmodule_global
193
+
194
+ @property
195
+ def jobname(self) -> str:
196
+ """str: The name of the current job."""
197
+ return self.__job
198
+
199
+ @property
200
+ def project_cwd(self) -> str:
201
+ """str: The original current working directory where the process was launched."""
202
+ return self.__cwd
203
+
204
+ @property
205
+ def workdir(self) -> str:
206
+ """str: The working directory for this specific node (step/index)."""
116
207
  return self.__workdir
117
208
 
118
209
  @property
119
- def jobworkdir(self):
210
+ def jobworkdir(self) -> str:
211
+ """str: The top-level working directory for the job."""
120
212
  return self.__jobworkdir
121
213
 
122
214
  @property
123
- def is_replay(self):
215
+ def collection_dir(self) -> str:
216
+ """str: The directory for collected source files."""
217
+ return self.__collection_path
218
+
219
+ @property
220
+ def is_replay(self) -> bool:
221
+ """bool: True if this node is configured for a replay run."""
124
222
  return self.__replay
125
223
 
126
224
  @property
127
225
  def task(self):
226
+ """Task: The task object associated with this node."""
128
227
  return self.__task
129
228
 
130
229
  def get_manifest(self, input=False):
230
+ """
231
+ Gets the path to the input or output manifest file for this node.
232
+
233
+ Args:
234
+ input (bool): If True, returns the input manifest path. Otherwise,
235
+ returns the output manifest path.
236
+
237
+ Returns:
238
+ str: The absolute path to the manifest file.
239
+ """
131
240
  if input:
132
241
  return self.__manifests["input"]
133
242
  return self.__manifests["output"]
134
243
 
135
244
  def get_log(self, type="exe"):
245
+ """
246
+ Gets the path to a specific log file for this node.
247
+
248
+ Args:
249
+ type (str): The type of log file to retrieve ('exe' or 'sc').
250
+
251
+ Returns:
252
+ str: The absolute path to the log file.
253
+
254
+ Raises:
255
+ ValueError: If an unknown log type is requested.
256
+ """
136
257
  if type not in self.__logs:
137
258
  raise ValueError(f"{type} is not a log")
138
259
  return self.__logs[type]
139
260
 
140
261
  @property
141
262
  def replay_script(self):
263
+ """str: The path to the shell script for replaying this node's execution."""
142
264
  return self.__replay_script
143
265
 
144
266
  @property
145
267
  def threads(self):
146
- with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as task:
268
+ """int: The number of threads allocated for this node's task."""
269
+ with self.__task.runtime(self) as task:
147
270
  thread_count = task.get("threads")
148
271
  return thread_count
149
272
 
150
273
  def set_queue(self, pipe, queue):
274
+ """
275
+ Configures the multiprocessing queue and pipe for inter-process communication.
276
+
277
+ This is primarily used for logging from a child process back to the parent.
278
+
279
+ Args:
280
+ pipe: The pipe for sending data back to the parent process.
281
+ queue: The multiprocessing.Queue for handling log records.
282
+ """
151
283
  self.__pipe = pipe
152
284
  self.__queue = queue
153
285
 
@@ -155,6 +287,13 @@ class SchedulerNode:
155
287
  self.__setup_schema_access()
156
288
 
157
289
  def __setup_schema_access(self):
290
+ """
291
+ Private helper to set up direct access to schema objects.
292
+
293
+ This method initializes direct references to the schema objects for the
294
+ flow, task, records, and metrics associated with this node, optimizing
295
+ access to configuration and results.
296
+ """
158
297
  flow = self.__chip.get('option', 'flow')
159
298
  self.__flow = self.__chip.get("flowgraph", flow, field="schema")
160
299
 
@@ -165,11 +304,17 @@ class SchedulerNode:
165
304
  self.__metrics = self.__chip.get("metric", field="schema")
166
305
 
167
306
  def _init_run_logger(self):
307
+ """
308
+ Initializes and configures the logger for the node's execution.
309
+
310
+ This sets up the console formatter to include the step/index and redirects
311
+ log output to a queue if one is provided for multiprocessing.
312
+ """
168
313
  self.__chip._logger_console.setFormatter(
169
314
  get_console_formatter(self.__chip, True, self.__step, self.__index))
170
315
  self.logger.setLevel(
171
316
  schema_utils.translate_loglevel(self.__chip.get('option', 'loglevel',
172
- step=self.__step, index=self.__index)))
317
+ step=self.__step, index=self.__index)))
173
318
 
174
319
  if self.__queue:
175
320
  formatter = self.__chip._logger_console.formatter
@@ -179,6 +324,15 @@ class SchedulerNode:
179
324
  self.logger.addHandler(self.__chip._logger_console)
180
325
 
181
326
  def halt(self, msg=None):
327
+ """
328
+ Stops the node's execution due to an error.
329
+
330
+ This method logs an error message, sets the node's status to ERROR,
331
+ writes the final manifest, and exits the process.
332
+
333
+ Args:
334
+ msg (str, optional): An error message to log.
335
+ """
182
336
  if msg:
183
337
  self.logger.error(msg)
184
338
 
@@ -193,30 +347,55 @@ class SchedulerNode:
193
347
  sys.exit(1)
194
348
 
195
349
  def setup(self):
196
- with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as task:
350
+ """
351
+ Runs the setup() method for the node's assigned task.
352
+
353
+ This method prepares the task for execution. If the task's setup()
354
+ raises a TaskSkip exception, the node is marked as SKIPPED.
355
+
356
+ Returns:
357
+ bool: False if the node was skipped, True otherwise.
358
+
359
+ Raises:
360
+ Exception: Propagates any exception from the task's setup() method.
361
+ """
362
+ from siliconcompiler.tool import TaskSkip
363
+
364
+ with self.__task.runtime(self) as task:
197
365
  # Run node setup.
198
366
  self.logger.info(f'Setting up node {self.__step}/{self.__index} with '
199
367
  f'{task.tool()}/{task.task()}')
200
- setup_ret = None
201
368
  try:
202
- setup_ret = task.setup()
369
+ task.setup()
370
+ except TaskSkip as skip:
371
+ self.logger.warning(f'Removing {self.__step}/{self.__index} due to {skip.why}')
372
+ self.__record.set('status', NodeStatus.SKIPPED,
373
+ step=self.__step, index=self.__index)
374
+ return False
203
375
  except Exception as e:
204
376
  self.logger.error(f'Failed to run setup() for {self.__step}/{self.__index} '
205
377
  f'with {task.tool()}/{task.task()}')
206
378
  raise e
207
379
 
208
- if setup_ret is not None:
209
- self.logger.warning(f'Removing {self.__step}/{self.__index} due to {setup_ret}')
210
- self.__record.set('status', NodeStatus.SKIPPED,
211
- step=self.__step, index=self.__index)
212
-
213
- return False
214
-
215
380
  return True
216
381
 
217
382
  def check_previous_run_status(self, previous_run):
383
+ """
384
+ Checks if the previous run of this node completed successfully.
385
+
386
+ Compares tool/task names and status to determine if the prior result
387
+ is valid as a starting point for an incremental build.
388
+
389
+ Args:
390
+ previous_run (SchedulerNode): The node object from a previous run
391
+ loaded from a manifest.
392
+
393
+ Returns:
394
+ bool: True if the previous run was successful and compatible,
395
+ False otherwise.
396
+ """
218
397
  # Assume modified if flow does not match
219
- if self.__flow.name() != previous_run.__flow.name():
398
+ if self.__flow.name != previous_run.__flow.name:
220
399
  self.logger.debug("Flow name changed")
221
400
  return False
222
401
 
@@ -258,6 +437,16 @@ class SchedulerNode:
258
437
  return True
259
438
 
260
439
  def check_values_changed(self, previous_run, keys):
440
+ """
441
+ Checks if any specified schema parameter values have changed.
442
+
443
+ Args:
444
+ previous_run (SchedulerNode): The node object from a previous run.
445
+ keys (set of tuples): A set of keypaths to check for changes.
446
+
447
+ Returns:
448
+ bool: True if any value has changed, False otherwise.
449
+ """
261
450
  def print_warning(key):
262
451
  self.logger.warning(f'[{",".join(key)}] in {self.__step}/{self.__index} has been '
263
452
  'modified from previous run')
@@ -283,6 +472,19 @@ class SchedulerNode:
283
472
  return False
284
473
 
285
474
  def check_files_changed(self, previous_run, previous_time, keys):
475
+ """
476
+ Checks if any specified file-based parameters have changed.
477
+
478
+ This check can be based on file hashes (if enabled) or timestamps.
479
+
480
+ Args:
481
+ previous_run (SchedulerNode): The node object from a previous run.
482
+ previous_time (float): The timestamp of the previous run's manifest.
483
+ keys (set of tuples): A set of file/dir keypaths to check.
484
+
485
+ Returns:
486
+ bool: True if any file has changed, False otherwise.
487
+ """
286
488
  use_hash = self.__hash and previous_run.__hash
287
489
 
288
490
  def print_warning(key, reason):
@@ -331,6 +533,20 @@ class SchedulerNode:
331
533
  return False
332
534
 
333
535
  def get_check_changed_keys(self):
536
+ """
537
+ Gathers all schema keys that could trigger a re-run if changed.
538
+
539
+ This includes tool options, scripts, and required inputs specified
540
+ in the task's schema.
541
+
542
+ Returns:
543
+ tuple: A tuple containing two sets: (value_keys, path_keys).
544
+ `value_keys` are keys for simple values.
545
+ `path_keys` are keys for file/directory paths.
546
+
547
+ Raises:
548
+ KeyError: If a required keypath is not found in the schema.
549
+ """
334
550
  all_keys = set()
335
551
 
336
552
  all_keys.update(self.__task.get('require'))
@@ -357,6 +573,17 @@ class SchedulerNode:
357
573
  return value_keys, path_keys
358
574
 
359
575
  def requires_run(self):
576
+ """
577
+ Determines if the node needs to be re-run.
578
+
579
+ This method performs a series of checks against the results of a
580
+ previous run (if one exists). It checks for changes in run status,
581
+ configuration parameters, and input files to decide if the node's
582
+ task can be skipped.
583
+
584
+ Returns:
585
+ bool: True if a re-run is required, False otherwise.
586
+ """
360
587
  from siliconcompiler import Chip
361
588
 
362
589
  # Load previous manifest
@@ -421,6 +648,13 @@ class SchedulerNode:
421
648
  return False
422
649
 
423
650
  def setup_input_directory(self):
651
+ """
652
+ Prepares the 'inputs/' directory for the node's execution.
653
+
654
+ This method gathers output files from all preceding nodes in the
655
+ flowgraph and links or copies them into the current node's 'inputs/'
656
+ directory. It also handles file renaming as specified by the task.
657
+ """
424
658
  in_files = set(self.__task.get('input'))
425
659
 
426
660
  for in_step, in_index in self.__record.get('inputnode',
@@ -435,11 +669,11 @@ class SchedulerNode:
435
669
  f'{output_dir}')
436
670
 
437
671
  for outfile in os.scandir(output_dir):
438
- if outfile.name == f'{self.__design}.pkg.json':
672
+ if outfile.name == f'{self.__name}.pkg.json':
439
673
  # Dont forward manifest
440
674
  continue
441
675
 
442
- new_name = input_file_node_name(outfile.name, in_step, in_index)
676
+ new_name = self.__task.compute_input_file_node_name(outfile.name, in_step, in_index)
443
677
  if self.__enforce_inputfiles:
444
678
  if outfile.name not in in_files and new_name not in in_files:
445
679
  continue
@@ -459,12 +693,16 @@ class SchedulerNode:
459
693
  f'{self.__workdir}/inputs/{new_name}')
460
694
 
461
695
  def validate(self):
462
- '''
463
- Runtime checks called from _runtask().
696
+ """
697
+ Performs pre-run validation checks.
698
+
699
+ This method ensures that all expected input files exist in the 'inputs/'
700
+ directory and that all required schema parameters have been set and can
701
+ be resolved correctly before the task is executed.
464
702
 
465
- - Make sure expected inputs exist.
466
- - Make sure all required filepaths resolve correctly.
467
- '''
703
+ Returns:
704
+ bool: True if validation passes, False otherwise.
705
+ """
468
706
  error = False
469
707
 
470
708
  required_inputs = self.__task.get('input')
@@ -491,8 +729,7 @@ class SchedulerNode:
491
729
  if param.get(field='pernode').is_never():
492
730
  check_step, check_index = None, None
493
731
 
494
- value = self.__chip.get(*keypath, step=check_step, index=check_index)
495
- if not value:
732
+ if not param.has_value(step=check_step, index=check_index):
496
733
  self.logger.error('No value set for required keypath '
497
734
  f'[{",".join(keypath)}].')
498
735
  error = True
@@ -504,7 +741,7 @@ class SchedulerNode:
504
741
  missing_ok=True,
505
742
  step=check_step, index=check_index)
506
743
 
507
- unresolved_paths = value
744
+ unresolved_paths = param.get(step=check_step, index=check_index)
508
745
  if not isinstance(abspath, list):
509
746
  abspath = [abspath]
510
747
  unresolved_paths = [unresolved_paths]
@@ -518,6 +755,7 @@ class SchedulerNode:
518
755
  return not error
519
756
 
520
757
  def summarize(self):
758
+ """Prints a post-run summary of metrics to the logger."""
521
759
  for metric in ['errors', 'warnings']:
522
760
  val = self.__metrics.get(metric, step=self.__step, index=self.__index)
523
761
  if val is not None:
@@ -527,17 +765,22 @@ class SchedulerNode:
527
765
  self.logger.info(f"Finished task in {walltime:.2f}s")
528
766
 
529
767
  def run(self):
530
- '''
531
- Private per node run method called by run().
532
-
533
- The method takes in a step string and index string to indicate what
534
- to run.
535
-
536
- Note that since _runtask occurs in its own process with a separate
537
- address space, any changes made to the `self` object will not
538
- be reflected in the parent. We rely on reading/writing the chip manifest
539
- to the filesystem to communicate updates between processes.
540
- '''
768
+ """
769
+ Executes the full lifecycle for this node.
770
+
771
+ This method orchestrates the entire process of running a node:
772
+ 1. Initializes logging and records metadata.
773
+ 2. Sets up the working directory.
774
+ 3. Determines and links inputs from previous nodes.
775
+ 4. Writes the pre-execution manifest.
776
+ 5. Validates that all inputs and parameters are ready.
777
+ 6. Calls `execute()` to run the tool.
778
+ 7. Stops journaling and returns to the original directory.
779
+
780
+ Note: Since this method may run in its own process with a separate
781
+ address space, any changes made to the schema are communicated through
782
+ reading/writing the chip manifest to the filesystem.
783
+ """
541
784
 
542
785
  # Setup logger
543
786
  self._init_run_logger()
@@ -608,13 +851,25 @@ class SchedulerNode:
608
851
  journal.stop()
609
852
 
610
853
  if self.__pipe:
611
- self.__pipe.send(self.__chip.get("package", field="schema").get_path_cache())
854
+ self.__pipe.send(Resolver.get_cache(self.__chip))
612
855
 
613
856
  def execute(self):
857
+ """
858
+ Handles the core tool execution logic.
859
+
860
+ This method runs the pre-processing, execution, and post-processing
861
+ steps for the node's task. It manages the tool's environment, checks
862
+ for return codes, and handles log file parsing and error reporting.
863
+ """
864
+ from siliconcompiler.tool import TaskSkip
865
+
614
866
  self.logger.info(f'Running in {self.__workdir}')
615
867
 
616
868
  try:
617
869
  self.__task.pre_process()
870
+ except TaskSkip as skip:
871
+ self.logger.warning(f'Removing {self.__step}/{self.__index} due to {skip.why}')
872
+ self.__record.set('status', NodeStatus.SKIPPED, step=self.__step, index=self.__index)
618
873
  except Exception as e:
619
874
  self.logger.error(
620
875
  f"Pre-processing failed for {self.__task.tool()}/{self.__task.task()}")
@@ -628,7 +883,7 @@ class SchedulerNode:
628
883
  required_outputs = set(self.__task.get('output'))
629
884
  in_workdir = self.__chip.getworkdir(step=in_step, index=in_index)
630
885
  for outfile in os.scandir(f"{in_workdir}/outputs"):
631
- if outfile.name == f'{self.__design}.pkg.json':
886
+ if outfile.name == f'{self.__name}.pkg.json':
632
887
  # Dont forward manifest
633
888
  continue
634
889
 
@@ -690,7 +945,7 @@ class SchedulerNode:
690
945
  loglines = logfd.read().splitlines()
691
946
  for logline in loglines[-self.__failed_log_lines:]:
692
947
  self.logger.error(logline)
693
- # No log file for pure-Python tools.
948
+ # No log file for pure-Python tools.
694
949
  msg += f' See log file {os.path.abspath(self.__logs["exe"])}'
695
950
  self.logger.warning(msg)
696
951
  self.__error = True
@@ -742,6 +997,12 @@ class SchedulerNode:
742
997
  send_messages.send(self.__chip, "end", self.__step, self.__index)
743
998
 
744
999
  def __generate_testcase(self):
1000
+ """
1001
+ Private helper to generate a test case upon failure.
1002
+
1003
+ This method packages the failing state (including manifests, inputs,
1004
+ and logs) into a compressed archive for easier debugging.
1005
+ """
745
1006
  from siliconcompiler.utils.issue import generate_testcase
746
1007
  import lambdapdk
747
1008
 
@@ -758,6 +1019,14 @@ class SchedulerNode:
758
1019
  verbose_collect=False)
759
1020
 
760
1021
  def check_logfile(self):
1022
+ """
1023
+ Parses the tool execution log file for patterns.
1024
+
1025
+ This method reads the tool's log file (e.g., 'synthesis.log') and
1026
+ uses regular expressions defined in the schema to find and count
1027
+ errors, warnings, and other specified metrics. The findings are
1028
+ recorded in the schema and printed to the console.
1029
+ """
761
1030
  if self.__record.get('status', step=self.__step, index=self.__index) == NodeStatus.SKIPPED:
762
1031
  return
763
1032
 
@@ -829,18 +1098,19 @@ class SchedulerNode:
829
1098
 
830
1099
  for metric in ("errors", "warnings"):
831
1100
  if metric in matches:
832
- errors = self.__metrics.get(metric, step=self.__step, index=self.__index)
833
- if errors is None:
834
- errors = 0
835
- errors += matches[metric]
1101
+ value = self.__metrics.get(metric, step=self.__step, index=self.__index)
1102
+ if value is None:
1103
+ value = 0
1104
+ value += matches[metric]
836
1105
 
837
1106
  sources = [os.path.basename(self.__logs["exe"])]
838
1107
  if self.__task.get('regex', metric):
839
1108
  sources.append(f'{self.__step}.{metric}')
840
1109
 
841
- record_metric(self.__chip, self.__step, self.__index, metric, errors, sources)
1110
+ self.__task.record_metric(metric, value, source_file=sources)
842
1111
 
843
1112
  def __hash_files_pre_execute(self):
1113
+ """Private helper to hash all relevant input files before execution."""
844
1114
  for task_key in ('refdir', 'prescript', 'postscript', 'script'):
845
1115
  self.__chip.hash_files('tool', self.__task.tool(), 'task', self.__task.task(), task_key,
846
1116
  step=self.__step, index=self.__index, check=False,
@@ -858,6 +1128,7 @@ class SchedulerNode:
858
1128
  check=False, allow_cache=True, verbose=False)
859
1129
 
860
1130
  def __hash_files_post_execute(self):
1131
+ """Private helper to hash all output files after execution."""
861
1132
  # hash all outputs
862
1133
  self.__chip.hash_files('tool', self.__task.tool(), 'task', self.__task.task(), 'output',
863
1134
  step=self.__step, index=self.__index, check=False, verbose=False)
@@ -876,6 +1147,12 @@ class SchedulerNode:
876
1147
  check=False, allow_cache=True, verbose=False)
877
1148
 
878
1149
  def __report_output_files(self):
1150
+ """
1151
+ Private helper to check for missing or unexpected output files.
1152
+
1153
+ Compares the files found in the 'outputs/' directory against the
1154
+ files expected by the task's schema. Reports errors if they don't match.
1155
+ """
879
1156
  if self.__task.tool() == 'builtin':
880
1157
  return
881
1158
 
@@ -912,6 +1189,16 @@ class SchedulerNode:
912
1189
  self.halt()
913
1190
 
914
1191
  def copy_from(self, source):
1192
+ """
1193
+ Imports the results of this node from a different job run.
1194
+
1195
+ This method copies the entire working directory of a node from a
1196
+ specified source job into the current job's working directory. It is
1197
+ used for resuming or branching from a previous run.
1198
+
1199
+ Args:
1200
+ source (str): The jobname of the source run to copy from.
1201
+ """
915
1202
  copy_from = self.__chip.getworkdir(jobname=source, step=self.__step, index=self.__index)
916
1203
 
917
1204
  if not os.path.exists(copy_from):
@@ -936,9 +1223,56 @@ class SchedulerNode:
936
1223
  schema = Schema.from_manifest(manifest)
937
1224
  # delete file as it might be a hard link
938
1225
  os.remove(manifest)
939
- schema.set('option', 'jobname', self.__chip.get('option', 'jobname'))
1226
+ schema.set('option', 'jobname', self.__job)
940
1227
  schema.write_manifest(manifest)
941
1228
 
942
1229
  def clean_directory(self):
1230
+ """Removes the working directory for this node."""
943
1231
  if os.path.exists(self.__workdir):
944
1232
  shutil.rmtree(self.__workdir)
1233
+
1234
+ def archive(self, tar: tarfile.TarFile, include: List[str] = None, verbose: bool = None):
1235
+ """
1236
+ Archives the node's results into a tar file.
1237
+
1238
+ By default, it archives the 'reports' and 'outputs' directories and all
1239
+ log files. The `include` argument allows for custom file selection using
1240
+ glob patterns.
1241
+
1242
+ Args:
1243
+ tar (tarfile.TarFile): The tarfile object to add files to.
1244
+ include (List[str], optional): A list of glob patterns to specify
1245
+ which files to include in the archive. Defaults to None.
1246
+ verbose (bool, optional): If True, prints archiving status messages.
1247
+ Defaults to None.
1248
+ """
1249
+ if not tar:
1250
+ return
1251
+
1252
+ if verbose:
1253
+ self.logger.info(f'Archiving {self.step}/{self.index}...')
1254
+
1255
+ def arcname(path):
1256
+ return os.path.relpath(path, self.__cwd)
1257
+
1258
+ if not os.path.isdir(self.__workdir):
1259
+ if self.project.get('record', 'status', step=self.step, index=self.index) != \
1260
+ NodeStatus.SKIPPED:
1261
+ self.logger.error(f'Unable to archive {self.step}/{self.index} '
1262
+ 'due to missing node directory')
1263
+ return
1264
+
1265
+ if include:
1266
+ if isinstance(include, str):
1267
+ include = [include]
1268
+ for pattern in include:
1269
+ for path in glob.iglob(os.path.join(self.__workdir, pattern)):
1270
+ tar.add(path, arcname=arcname(path))
1271
+ else:
1272
+ for folder in ('reports', 'outputs'):
1273
+ path = os.path.join(self.__workdir, folder)
1274
+ tar.add(path, arcname=arcname(path))
1275
+
1276
+ for logfile in self.__logs.values():
1277
+ if os.path.isfile(logfile):
1278
+ tar.add(logfile, arcname=arcname(logfile))