siliconcompiler 0.34.0__py3-none-any.whl → 0.34.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. siliconcompiler/__init__.py +14 -2
  2. siliconcompiler/_metadata.py +1 -1
  3. siliconcompiler/apps/_common.py +1 -1
  4. siliconcompiler/apps/sc.py +1 -1
  5. siliconcompiler/apps/sc_issue.py +1 -1
  6. siliconcompiler/apps/sc_remote.py +3 -3
  7. siliconcompiler/apps/sc_show.py +3 -3
  8. siliconcompiler/apps/utils/replay.py +4 -4
  9. siliconcompiler/checklist.py +203 -2
  10. siliconcompiler/constraints/__init__.py +17 -0
  11. siliconcompiler/constraints/asic_component.py +378 -0
  12. siliconcompiler/constraints/asic_floorplan.py +449 -0
  13. siliconcompiler/constraints/asic_pins.py +489 -0
  14. siliconcompiler/constraints/asic_timing.py +517 -0
  15. siliconcompiler/core.py +31 -249
  16. siliconcompiler/data/templates/email/general.j2 +3 -3
  17. siliconcompiler/data/templates/email/summary.j2 +1 -1
  18. siliconcompiler/data/templates/issue/README.txt +1 -1
  19. siliconcompiler/data/templates/report/sc_report.j2 +7 -7
  20. siliconcompiler/dependencyschema.py +10 -174
  21. siliconcompiler/design.py +325 -114
  22. siliconcompiler/flowgraph.py +63 -15
  23. siliconcompiler/library.py +133 -0
  24. siliconcompiler/metric.py +94 -72
  25. siliconcompiler/metrics/__init__.py +7 -0
  26. siliconcompiler/metrics/asic.py +245 -0
  27. siliconcompiler/metrics/fpga.py +220 -0
  28. siliconcompiler/optimizer/vizier.py +2 -2
  29. siliconcompiler/package/__init__.py +138 -35
  30. siliconcompiler/package/github.py +6 -10
  31. siliconcompiler/packageschema.py +256 -12
  32. siliconcompiler/pathschema.py +226 -0
  33. siliconcompiler/pdk.py +5 -5
  34. siliconcompiler/project.py +459 -0
  35. siliconcompiler/remote/client.py +18 -12
  36. siliconcompiler/remote/server.py +2 -2
  37. siliconcompiler/report/dashboard/cli/__init__.py +6 -6
  38. siliconcompiler/report/dashboard/cli/board.py +3 -3
  39. siliconcompiler/report/dashboard/web/components/__init__.py +5 -5
  40. siliconcompiler/report/dashboard/web/components/flowgraph.py +4 -4
  41. siliconcompiler/report/dashboard/web/components/graph.py +2 -2
  42. siliconcompiler/report/dashboard/web/state.py +1 -1
  43. siliconcompiler/report/dashboard/web/utils/__init__.py +5 -5
  44. siliconcompiler/report/html_report.py +1 -1
  45. siliconcompiler/report/report.py +4 -4
  46. siliconcompiler/report/summary_table.py +2 -2
  47. siliconcompiler/report/utils.py +5 -5
  48. siliconcompiler/scheduler/docker.py +4 -10
  49. siliconcompiler/scheduler/run_node.py +4 -8
  50. siliconcompiler/scheduler/scheduler.py +18 -24
  51. siliconcompiler/scheduler/schedulernode.py +161 -143
  52. siliconcompiler/scheduler/send_messages.py +3 -3
  53. siliconcompiler/scheduler/slurm.py +5 -3
  54. siliconcompiler/scheduler/taskscheduler.py +10 -8
  55. siliconcompiler/schema/__init__.py +0 -2
  56. siliconcompiler/schema/baseschema.py +148 -26
  57. siliconcompiler/schema/editableschema.py +14 -6
  58. siliconcompiler/schema/journal.py +23 -15
  59. siliconcompiler/schema/namedschema.py +30 -4
  60. siliconcompiler/schema/parameter.py +34 -19
  61. siliconcompiler/schema/parametertype.py +2 -0
  62. siliconcompiler/schema/parametervalue.py +198 -15
  63. siliconcompiler/schema/schema_cfg.py +18 -14
  64. siliconcompiler/schema_obj.py +5 -3
  65. siliconcompiler/tool.py +591 -179
  66. siliconcompiler/tools/__init__.py +2 -0
  67. siliconcompiler/tools/builtin/_common.py +5 -5
  68. siliconcompiler/tools/builtin/concatenate.py +5 -5
  69. siliconcompiler/tools/builtin/minimum.py +4 -4
  70. siliconcompiler/tools/builtin/mux.py +4 -4
  71. siliconcompiler/tools/builtin/nop.py +4 -4
  72. siliconcompiler/tools/builtin/verify.py +7 -7
  73. siliconcompiler/tools/execute/exec_input.py +1 -1
  74. siliconcompiler/tools/genfasm/genfasm.py +1 -6
  75. siliconcompiler/tools/openroad/_apr.py +5 -1
  76. siliconcompiler/tools/openroad/antenna_repair.py +1 -1
  77. siliconcompiler/tools/openroad/macro_placement.py +1 -1
  78. siliconcompiler/tools/openroad/power_grid.py +1 -1
  79. siliconcompiler/tools/openroad/scripts/common/procs.tcl +5 -0
  80. siliconcompiler/tools/opensta/timing.py +26 -3
  81. siliconcompiler/tools/slang/__init__.py +2 -2
  82. siliconcompiler/tools/surfer/__init__.py +0 -0
  83. siliconcompiler/tools/surfer/show.py +53 -0
  84. siliconcompiler/tools/surfer/surfer.py +30 -0
  85. siliconcompiler/tools/vpr/route.py +27 -14
  86. siliconcompiler/tools/vpr/vpr.py +23 -6
  87. siliconcompiler/tools/yosys/__init__.py +1 -1
  88. siliconcompiler/tools/yosys/scripts/procs.tcl +143 -0
  89. siliconcompiler/tools/yosys/{sc_synth_asic.tcl → scripts/sc_synth_asic.tcl} +4 -0
  90. siliconcompiler/tools/yosys/{sc_synth_fpga.tcl → scripts/sc_synth_fpga.tcl} +24 -77
  91. siliconcompiler/tools/yosys/syn_fpga.py +14 -0
  92. siliconcompiler/toolscripts/_tools.json +9 -13
  93. siliconcompiler/toolscripts/rhel9/install-vpr.sh +0 -2
  94. siliconcompiler/toolscripts/ubuntu22/install-surfer.sh +33 -0
  95. siliconcompiler/toolscripts/ubuntu24/install-surfer.sh +33 -0
  96. siliconcompiler/utils/__init__.py +2 -1
  97. siliconcompiler/utils/flowgraph.py +24 -23
  98. siliconcompiler/utils/issue.py +23 -29
  99. siliconcompiler/utils/logging.py +35 -6
  100. siliconcompiler/utils/showtools.py +6 -1
  101. {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/METADATA +15 -25
  102. {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/RECORD +109 -97
  103. siliconcompiler/schema/packageschema.py +0 -101
  104. siliconcompiler/tools/yosys/procs.tcl +0 -71
  105. siliconcompiler/toolscripts/rhel9/install-yosys-parmys.sh +0 -68
  106. siliconcompiler/toolscripts/ubuntu22/install-yosys-parmys.sh +0 -68
  107. siliconcompiler/toolscripts/ubuntu24/install-yosys-parmys.sh +0 -68
  108. /siliconcompiler/tools/yosys/{sc_lec.tcl → scripts/sc_lec.tcl} +0 -0
  109. /siliconcompiler/tools/yosys/{sc_screenshot.tcl → scripts/sc_screenshot.tcl} +0 -0
  110. /siliconcompiler/tools/yosys/{syn_strategies.tcl → scripts/syn_strategies.tcl} +0 -0
  111. {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/WHEEL +0 -0
  112. {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/entry_points.txt +0 -0
  113. {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/licenses/LICENSE +0 -0
  114. {siliconcompiler-0.34.0.dist-info → siliconcompiler-0.34.2.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
1
+ import contextlib
1
2
  import logging
2
3
  import os
3
4
  import shutil
@@ -11,9 +12,10 @@ from logging.handlers import QueueHandler
11
12
  from siliconcompiler import utils, sc_open
12
13
  from siliconcompiler import Schema
13
14
  from siliconcompiler import NodeStatus
15
+ from siliconcompiler.utils.logging import get_console_formatter, SCInRunLoggerFormatter
16
+ from siliconcompiler.schema import utils as schema_utils
14
17
 
15
- from siliconcompiler.tools._common import input_file_node_name, record_metric
16
-
18
+ from siliconcompiler.package import Resolver
17
19
  from siliconcompiler.record import RecordTime, RecordTool
18
20
  from siliconcompiler.schema import Journal
19
21
  from siliconcompiler.scheduler import send_messages
@@ -25,7 +27,8 @@ class SchedulerNode:
25
27
  self.__index = index
26
28
  self.__chip = chip
27
29
 
28
- self.__design = self.__chip.design
30
+ self.__name = self.__chip.design
31
+ self.__topmodule = self.__chip.top(step=step, index=index)
29
32
 
30
33
  self.__job = self.__chip.get('option', 'jobname')
31
34
  self.__record_user_info = self.__chip.get("option", "track",
@@ -49,22 +52,25 @@ class SchedulerNode:
49
52
  self.__workdir = self.__chip.getworkdir(jobname=self.__job,
50
53
  step=self.__step, index=self.__index)
51
54
  self.__manifests = {
52
- "input": os.path.join(self.__workdir, "inputs", f"{self.__design}.pkg.json"),
53
- "output": os.path.join(self.__workdir, "outputs", f"{self.__design}.pkg.json")
55
+ "input": os.path.join(self.__workdir, "inputs", f"{self.__name}.pkg.json"),
56
+ "output": os.path.join(self.__workdir, "outputs", f"{self.__name}.pkg.json")
54
57
  }
55
58
  self.__logs = {
56
- "sc": os.path.join(self.__workdir, f"sc_{self.__step}{self.__index}.log"),
59
+ "sc": os.path.join(self.__workdir, f"sc_{self.__step}_{self.__index}.log"),
57
60
  "exe": os.path.join(self.__workdir, f"{self.__step}.log")
58
61
  }
59
62
  self.__replay_script = os.path.join(self.__workdir, "replay.sh")
60
63
 
61
64
  self.set_queue(None, None)
62
- self.init_state()
63
-
64
- def init_state(self, assign_runtime=False):
65
65
  self.__setup_schema_access()
66
- if assign_runtime:
67
- self.__task.set_runtime(self.__chip, step=self.__step, index=self.__index)
66
+
67
+ @contextlib.contextmanager
68
+ def runtime(self):
69
+ prev_task = self.__task
70
+ with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as runtask:
71
+ self.__task = runtask
72
+ yield
73
+ self.__task = prev_task
68
74
 
69
75
  @staticmethod
70
76
  def init(chip):
@@ -102,8 +108,16 @@ class SchedulerNode:
102
108
  return self.__index
103
109
 
104
110
  @property
105
- def design(self):
106
- return self.__design
111
+ def name(self):
112
+ return self.__name
113
+
114
+ @property
115
+ def topmodule(self):
116
+ return self.__topmodule
117
+
118
+ @property
119
+ def jobname(self):
120
+ return self.__job
107
121
 
108
122
  @property
109
123
  def workdir(self):
@@ -137,24 +151,40 @@ class SchedulerNode:
137
151
 
138
152
  @property
139
153
  def threads(self):
140
- self.__task.set_runtime(self.__chip, step=self.__step, index=self.__index)
141
- thread_count = self.__task.get("task", self.__task.task(), "threads",
142
- step=self.__step, index=self.__index)
143
- self.__task.set_runtime(None)
154
+ with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as task:
155
+ thread_count = task.get("threads")
144
156
  return thread_count
145
157
 
146
158
  def set_queue(self, pipe, queue):
147
159
  self.__pipe = pipe
148
160
  self.__queue = queue
149
161
 
162
+ # Reinit
163
+ self.__setup_schema_access()
164
+
150
165
  def __setup_schema_access(self):
151
166
  flow = self.__chip.get('option', 'flow')
152
167
  self.__flow = self.__chip.get("flowgraph", flow, field="schema")
153
168
 
154
169
  tool = self.__flow.get(self.__step, self.__index, 'tool')
155
- self.__task = self.__chip.schema.get("tool", tool, field="schema")
156
- self.__record = self.__chip.schema.get("record", field="schema")
157
- self.__metrics = self.__chip.schema.get("metric", field="schema")
170
+ task = self.__flow.get(self.__step, self.__index, 'task')
171
+ self.__task = self.__chip.get("tool", tool, "task", task, field="schema")
172
+ self.__record = self.__chip.get("record", field="schema")
173
+ self.__metrics = self.__chip.get("metric", field="schema")
174
+
175
+ def _init_run_logger(self):
176
+ self.__chip._logger_console.setFormatter(
177
+ get_console_formatter(self.__chip, True, self.__step, self.__index))
178
+ self.logger.setLevel(
179
+ schema_utils.translate_loglevel(self.__chip.get('option', 'loglevel',
180
+ step=self.__step, index=self.__index)))
181
+
182
+ if self.__queue:
183
+ formatter = self.__chip._logger_console.formatter
184
+ self.logger.removeHandler(self.__chip._logger_console)
185
+ self.__chip._logger_console = QueueHandler(self.__queue)
186
+ self.__chip._logger_console.setFormatter(formatter)
187
+ self.logger.addHandler(self.__chip._logger_console)
158
188
 
159
189
  def halt(self, msg=None):
160
190
  if msg:
@@ -164,36 +194,33 @@ class SchedulerNode:
164
194
  try:
165
195
  self.__chip.schema.write_manifest(self.__manifests["output"])
166
196
  except FileNotFoundError:
167
- self.logger.error(f"Failed to write manifest for {self.__step}{self.__index}.")
197
+ self.logger.error(f"Failed to write manifest for {self.__step}/{self.__index}.")
168
198
 
169
- self.logger.error(f"Halting {self.__step}{self.__index} due to errors.")
199
+ self.logger.error(f"Halting {self.__step}/{self.__index} due to errors.")
170
200
  send_messages.send(self.__chip, "fail", self.__step, self.__index)
171
201
  sys.exit(1)
172
202
 
173
203
  def setup(self):
174
- self.__task.set_runtime(self.__chip, step=self.__step, index=self.__index)
175
-
176
- # Run node setup.
177
- self.logger.info(f'Setting up node {self.__step}{self.__index} with '
178
- f'{self.__task.tool()}/{self.__task.task()}')
179
- setup_ret = None
180
- try:
181
- setup_ret = self.__task.setup()
182
- except Exception as e:
183
- self.logger.error(f'Failed to run setup() for {self.__step}{self.__index} '
184
- f'with {self.__task.tool()}/{self.__task.task()}')
185
- self.__task.set_runtime(None)
186
- raise e
187
-
188
- self.__task.set_runtime(None)
204
+ with self.__task.runtime(self.__chip, step=self.__step, index=self.__index) as task:
205
+ # Run node setup.
206
+ self.logger.info(f'Setting up node {self.__step}/{self.__index} with '
207
+ f'{task.tool()}/{task.task()}')
208
+ setup_ret = None
209
+ try:
210
+ setup_ret = task.setup()
211
+ except Exception as e:
212
+ self.logger.error(f'Failed to run setup() for {self.__step}/{self.__index} '
213
+ f'with {task.tool()}/{task.task()}')
214
+ raise e
189
215
 
190
- if setup_ret is not None:
191
- self.logger.warning(f'Removing {self.__step}{self.__index} due to {setup_ret}')
192
- self.__record.set('status', NodeStatus.SKIPPED, step=self.__step, index=self.__index)
216
+ if setup_ret is not None:
217
+ self.logger.warning(f'Removing {self.__step}/{self.__index} due to {setup_ret}')
218
+ self.__record.set('status', NodeStatus.SKIPPED,
219
+ step=self.__step, index=self.__index)
193
220
 
194
- return False
221
+ return False
195
222
 
196
- return True
223
+ return True
197
224
 
198
225
  def check_previous_run_status(self, previous_run):
199
226
  # Assume modified if flow does not match
@@ -230,7 +257,7 @@ class SchedulerNode:
230
257
  self.logger.setLevel(log_level)
231
258
  if set(previous_run.__chip.get("record", "inputnode",
232
259
  step=self.__step, index=self.__index)) != set(sel_inputs):
233
- self.logger.warning(f'inputs to {self.__step}{self.__index} has been modified from '
260
+ self.logger.warning(f'inputs to {self.__step}/{self.__index} has been modified from '
234
261
  'previous run')
235
262
  return False
236
263
 
@@ -240,7 +267,7 @@ class SchedulerNode:
240
267
 
241
268
  def check_values_changed(self, previous_run, keys):
242
269
  def print_warning(key):
243
- self.logger.warning(f'[{",".join(key)}] in {self.__step}{self.__index} has been '
270
+ self.logger.warning(f'[{",".join(key)}] in {self.__step}/{self.__index} has been '
244
271
  'modified from previous run')
245
272
 
246
273
  for key in sorted(keys):
@@ -267,7 +294,7 @@ class SchedulerNode:
267
294
  use_hash = self.__hash and previous_run.__hash
268
295
 
269
296
  def print_warning(key, reason):
270
- self.logger.warning(f'[{",".join(key)}] ({reason}) in {self.__step}{self.__index} has '
297
+ self.logger.warning(f'[{",".join(key)}] ({reason}) in {self.__step}/{self.__index} has '
271
298
  'been modified from previous run')
272
299
 
273
300
  def get_file_time(path):
@@ -314,8 +341,7 @@ class SchedulerNode:
314
341
  def get_check_changed_keys(self):
315
342
  all_keys = set()
316
343
 
317
- all_keys.update(self.__task.get('task', self.__task.task(), 'require',
318
- step=self.__step, index=self.__index))
344
+ all_keys.update(self.__task.get('require'))
319
345
 
320
346
  tool_task_prefix = ('tool', self.__task.tool(), 'task', self.__task.task())
321
347
  for key in ('option', 'threads', 'prescript', 'postscript', 'refdir', 'script',):
@@ -353,7 +379,6 @@ class SchedulerNode:
353
379
  self.logger.debug("Input manifest failed to load")
354
380
  return True
355
381
  previous_node = SchedulerNode(chip, self.__step, self.__index)
356
- previous_node.init_state(assign_runtime=True)
357
382
  else:
358
383
  # No manifest found so assume rerun is needed
359
384
  self.logger.debug("Previous run did not generate input manifest")
@@ -368,62 +393,61 @@ class SchedulerNode:
368
393
  self.logger.debug("Output manifest failed to load")
369
394
  return True
370
395
  previous_node_end = SchedulerNode(chip, self.__step, self.__index)
371
- previous_node_end.init_state(assign_runtime=True)
372
396
  else:
373
397
  # No manifest found so assume rerun is needed
374
398
  self.logger.debug("Previous run did not generate output manifest")
375
399
  return True
376
400
 
377
- self.init_state(assign_runtime=True)
378
-
379
- if not self.check_previous_run_status(previous_node_end):
380
- self.__task.set_runtime(None)
381
- self.logger.debug("Previous run state failed")
382
- return True
401
+ with self.runtime():
402
+ if previous_node_end:
403
+ with previous_node_end.runtime():
404
+ if not self.check_previous_run_status(previous_node_end):
405
+ self.logger.debug("Previous run state failed")
406
+ return True
383
407
 
384
- # Generate key paths to check
385
- try:
386
- value_keys, path_keys = self.get_check_changed_keys()
387
- previous_value_keys, previous_path_keys = previous_node.get_check_changed_keys()
388
- value_keys.update(previous_value_keys)
389
- path_keys.update(previous_path_keys)
390
- except KeyError:
391
- self.__task.set_runtime(None)
392
- self.logger.debug("Failed to acquire keys")
393
- return True
408
+ if previous_node:
409
+ with previous_node.runtime():
410
+ # Generate key paths to check
411
+ try:
412
+ value_keys, path_keys = self.get_check_changed_keys()
413
+ previous_value_keys, previous_path_keys = \
414
+ previous_node.get_check_changed_keys()
415
+ value_keys.update(previous_value_keys)
416
+ path_keys.update(previous_path_keys)
417
+ except KeyError:
418
+ self.logger.debug("Failed to acquire keys")
419
+ return True
394
420
 
395
- self.__task.set_runtime(None)
396
- if self.check_values_changed(previous_node, value_keys.union(path_keys)):
397
- self.logger.debug("Key values changed")
398
- return True
421
+ if self.check_values_changed(previous_node, value_keys.union(path_keys)):
422
+ self.logger.debug("Key values changed")
423
+ return True
399
424
 
400
- if self.check_files_changed(previous_node, previous_node_time, path_keys):
401
- self.logger.debug("Files changed")
402
- return True
425
+ if self.check_files_changed(previous_node, previous_node_time, path_keys):
426
+ self.logger.debug("Files changed")
427
+ return True
403
428
 
404
429
  return False
405
430
 
406
431
  def setup_input_directory(self):
407
- in_files = set(self.__task.get('task', self.__task.task(), 'input',
408
- step=self.__step, index=self.__index))
432
+ in_files = set(self.__task.get('input'))
409
433
 
410
434
  for in_step, in_index in self.__record.get('inputnode',
411
435
  step=self.__step, index=self.__index):
412
436
  if NodeStatus.is_error(self.__record.get('status', step=in_step, index=in_index)):
413
- self.halt(f'Halting step due to previous error in {in_step}{in_index}')
437
+ self.halt(f'Halting step due to previous error in {in_step}/{in_index}')
414
438
 
415
439
  output_dir = os.path.join(
416
440
  self.__chip.getworkdir(step=in_step, index=in_index), "outputs")
417
441
  if not os.path.isdir(output_dir):
418
- self.halt(f'Unable to locate outputs directory for {in_step}{in_index}: '
442
+ self.halt(f'Unable to locate outputs directory for {in_step}/{in_index}: '
419
443
  f'{output_dir}')
420
444
 
421
445
  for outfile in os.scandir(output_dir):
422
- if outfile.name == f'{self.__design}.pkg.json':
446
+ if outfile.name == f'{self.__name}.pkg.json':
423
447
  # Dont forward manifest
424
448
  continue
425
449
 
426
- new_name = input_file_node_name(outfile.name, in_step, in_index)
450
+ new_name = self.__task.compute_input_file_node_name(outfile.name, in_step, in_index)
427
451
  if self.__enforce_inputfiles:
428
452
  if outfile.name not in in_files and new_name not in in_files:
429
453
  continue
@@ -451,8 +475,7 @@ class SchedulerNode:
451
475
  '''
452
476
  error = False
453
477
 
454
- required_inputs = self.__task.get('task', self.__task.task(), 'input',
455
- step=self.__step, index=self.__index)
478
+ required_inputs = self.__task.get('input')
456
479
 
457
480
  input_dir = os.path.join(self.__workdir, 'inputs')
458
481
 
@@ -460,11 +483,10 @@ class SchedulerNode:
460
483
  path = os.path.join(input_dir, filename)
461
484
  if not os.path.exists(path):
462
485
  self.logger.error(f'Required input {filename} not received for '
463
- f'{self.__step}{self.__index}.')
486
+ f'{self.__step}/{self.__index}.')
464
487
  error = True
465
488
 
466
- all_required = self.__task.get('task', self.__task.task(), 'require',
467
- step=self.__step, index=self.__index)
489
+ all_required = self.__task.get('require')
468
490
  for item in all_required:
469
491
  keypath = item.split(',')
470
492
  if not self.__chip.valid(*keypath):
@@ -525,15 +547,8 @@ class SchedulerNode:
525
547
  to the filesystem to communicate updates between processes.
526
548
  '''
527
549
 
528
- # Setup chip
529
- self.__chip._init_codecs()
530
- self.__chip._init_logger(self.__step, self.__index, in_run=True)
531
-
532
- if self.__queue:
533
- self.logger.removeHandler(self.logger._console)
534
- self.logger._console = QueueHandler(self.__queue)
535
- self.logger.addHandler(self.logger._console)
536
- self.__chip._init_logger_formats()
550
+ # Setup logger
551
+ self._init_run_logger()
537
552
 
538
553
  self.__chip.set('arg', 'step', self.__step)
539
554
  self.__chip.set('arg', 'index', self.__index)
@@ -543,7 +558,7 @@ class SchedulerNode:
543
558
  journal.start()
544
559
 
545
560
  # Must be after journaling to ensure journal is complete
546
- self.init_state(assign_runtime=True)
561
+ self.__setup_schema_access()
547
562
 
548
563
  # Make record of sc version and machine
549
564
  self.__record.record_version(self.__step, self.__index)
@@ -555,40 +570,44 @@ class SchedulerNode:
555
570
  # Start wall timer
556
571
  self.__record.record_time(self.__step, self.__index, RecordTime.START)
557
572
 
558
- # Setup run directory
559
- self.__task.setup_work_directory(self.__workdir, remove_exist=not self.__replay)
560
-
561
573
  cwd = os.getcwd()
562
- os.chdir(self.__workdir)
574
+ with self.runtime():
575
+ # Setup run directory
576
+ self.__task.setup_work_directory(self.__workdir, remove_exist=not self.__replay)
563
577
 
564
- # Attach siliconcompiler file log handler
565
- self.__chip._add_file_logger(self.__logs["sc"])
578
+ os.chdir(self.__workdir)
566
579
 
567
- # Select the inputs to this node
568
- sel_inputs = self.__task.select_input_nodes()
569
- if not self.__is_entry_node and not sel_inputs:
570
- self.halt(f'No inputs selected for {self.__step}{self.__index}')
571
- self.__record.set("inputnode", sel_inputs, step=self.__step, index=self.__index)
580
+ # Attach siliconcompiler file log handler
581
+ file_log = logging.FileHandler(self.__logs["sc"])
582
+ file_log.setFormatter(
583
+ SCInRunLoggerFormatter(self.__chip, self.__job, self.__step, self.__index))
584
+ self.logger.addHandler(file_log)
572
585
 
573
- if self.__hash:
574
- self.__hash_files_pre_execute()
586
+ # Select the inputs to this node
587
+ sel_inputs = self.__task.select_input_nodes()
588
+ if not self.__is_entry_node and not sel_inputs:
589
+ self.halt(f'No inputs selected for {self.__step}/{self.__index}')
590
+ self.__record.set("inputnode", sel_inputs, step=self.__step, index=self.__index)
575
591
 
576
- # Forward data
577
- if not self.__replay:
578
- self.setup_input_directory()
592
+ if self.__hash:
593
+ self.__hash_files_pre_execute()
579
594
 
580
- # Write manifest prior to step running into inputs
581
- self.__chip.write_manifest(self.__manifests["input"])
595
+ # Forward data
596
+ if not self.__replay:
597
+ self.setup_input_directory()
582
598
 
583
- # Check manifest
584
- if not self.validate():
585
- self.halt("Failed to validate node setup. See previous errors")
599
+ # Write manifest prior to step running into inputs
600
+ self.__chip.write_manifest(self.__manifests["input"])
586
601
 
587
- try:
588
- self.execute()
589
- except Exception as e:
590
- utils.print_traceback(self.logger, e)
591
- self.halt()
602
+ # Check manifest
603
+ if not self.validate():
604
+ self.halt("Failed to validate node setup. See previous errors")
605
+
606
+ try:
607
+ self.execute()
608
+ except Exception as e:
609
+ utils.print_traceback(self.logger, e)
610
+ self.halt()
592
611
 
593
612
  # return to original directory
594
613
  os.chdir(cwd)
@@ -597,7 +616,7 @@ class SchedulerNode:
597
616
  journal.stop()
598
617
 
599
618
  if self.__pipe:
600
- self.__pipe.send(self.__chip.get("package", field="schema").get_path_cache())
619
+ self.__pipe.send(Resolver.get_cache(self.__chip))
601
620
 
602
621
  def execute(self):
603
622
  self.logger.info(f'Running in {self.__workdir}')
@@ -614,12 +633,17 @@ class SchedulerNode:
614
633
  # copy inputs to outputs and skip execution
615
634
  for in_step, in_index in self.__record.get('inputnode',
616
635
  step=self.__step, index=self.__index):
636
+ required_outputs = set(self.__task.get('output'))
617
637
  in_workdir = self.__chip.getworkdir(step=in_step, index=in_index)
618
638
  for outfile in os.scandir(f"{in_workdir}/outputs"):
619
- if outfile.name == f'{self.__design}.pkg.json':
639
+ if outfile.name == f'{self.__name}.pkg.json':
620
640
  # Dont forward manifest
621
641
  continue
622
642
 
643
+ if outfile.name not in required_outputs:
644
+ # Dont forward non-required outputs
645
+ continue
646
+
623
647
  if outfile.is_file() or outfile.is_symlink():
624
648
  utils.link_symlink_copy(outfile.path,
625
649
  f'outputs/{outfile.name}')
@@ -716,7 +740,7 @@ class SchedulerNode:
716
740
  if errors and not self.__chip.get('option', 'continue',
717
741
  step=self.__step, index=self.__index):
718
742
  self.halt(f'{self.__task.tool()}/{self.__task.task()} reported {errors} '
719
- f'errors during {self.__step}{self.__index}')
743
+ f'errors during {self.__step}/{self.__index}')
720
744
 
721
745
  if self.__error:
722
746
  self.halt()
@@ -747,9 +771,8 @@ class SchedulerNode:
747
771
 
748
772
  checks = {}
749
773
  matches = {}
750
- for suffix in self.__task.getkeys('task', self.__task.task(), 'regex'):
751
- regexes = self.__task.get('task', self.__task.task(), 'regex', suffix,
752
- step=self.__step, index=self.__index)
774
+ for suffix in self.__task.getkeys('regex'):
775
+ regexes = self.__task.get('regex', suffix)
753
776
  if not regexes:
754
777
  continue
755
778
 
@@ -814,17 +837,16 @@ class SchedulerNode:
814
837
 
815
838
  for metric in ("errors", "warnings"):
816
839
  if metric in matches:
817
- errors = self.__metrics.get(metric, step=self.__step, index=self.__index)
818
- if errors is None:
819
- errors = 0
820
- errors += matches[metric]
840
+ value = self.__metrics.get(metric, step=self.__step, index=self.__index)
841
+ if value is None:
842
+ value = 0
843
+ value += matches[metric]
821
844
 
822
845
  sources = [os.path.basename(self.__logs["exe"])]
823
- if self.__task.get('task', self.__task.task(), 'regex', metric,
824
- step=self.__step, index=self.__index):
846
+ if self.__task.get('regex', metric):
825
847
  sources.append(f'{self.__step}.{metric}')
826
848
 
827
- record_metric(self.__chip, self.__step, self.__index, metric, errors, sources)
849
+ self.__task.record_metric(metric, value, source_file=sources)
828
850
 
829
851
  def __hash_files_pre_execute(self):
830
852
  for task_key in ('refdir', 'prescript', 'postscript', 'script'):
@@ -833,8 +855,7 @@ class SchedulerNode:
833
855
  allow_cache=True, verbose=False)
834
856
 
835
857
  # hash all requirements
836
- for item in set(self.__task.get('task', self.__task.task(), 'require',
837
- step=self.__step, index=self.__index)):
858
+ for item in set(self.__task.get('require')):
838
859
  args = item.split(',')
839
860
  sc_type = self.__chip.get(*args, field='type')
840
861
  if 'file' in sc_type or 'dir' in sc_type:
@@ -850,8 +871,7 @@ class SchedulerNode:
850
871
  step=self.__step, index=self.__index, check=False, verbose=False)
851
872
 
852
873
  # hash all requirements
853
- for item in set(self.__task.get('task', self.__task.task(), 'require',
854
- step=self.__step, index=self.__index)):
874
+ for item in set(self.__task.get('require')):
855
875
  args = item.split(',')
856
876
  sc_type = self.__chip.get(*args, field='type')
857
877
  if 'file' in sc_type or 'dir' in sc_type:
@@ -883,8 +903,7 @@ class SchedulerNode:
883
903
 
884
904
  outputs = set(outputs)
885
905
 
886
- output_files = set(self.__task.get('task', self.__task.task(), 'output',
887
- step=self.__step, index=self.__index))
906
+ output_files = set(self.__task.get('output'))
888
907
 
889
908
  missing = output_files.difference(outputs)
890
909
  excess = outputs.difference(output_files)
@@ -906,7 +925,7 @@ class SchedulerNode:
906
925
  if not os.path.exists(copy_from):
907
926
  return
908
927
 
909
- self.logger.info(f'Importing {self.__step}{self.__index} from {source}')
928
+ self.logger.info(f'Importing {self.__step}/{self.__index} from {source}')
910
929
  shutil.copytree(
911
930
  copy_from, self.__workdir,
912
931
  dirs_exist_ok=True,
@@ -917,16 +936,15 @@ class SchedulerNode:
917
936
  # delete file as it might be a hard link
918
937
  os.remove(self.__replay_script)
919
938
 
920
- self.__task.set_runtime(self.__chip, step=self.__step, index=self.__index)
921
- self.__task.generate_replay_script(self.__replay_script, self.__workdir)
922
- self.__task.set_runtime(None)
939
+ with self.runtime():
940
+ self.__task.generate_replay_script(self.__replay_script, self.__workdir)
923
941
 
924
942
  for manifest in self.__manifests.values():
925
943
  if os.path.exists(manifest):
926
944
  schema = Schema.from_manifest(manifest)
927
945
  # delete file as it might be a hard link
928
946
  os.remove(manifest)
929
- schema.set('option', 'jobname', self.__chip.get('option', 'jobname'))
947
+ schema.set('option', 'jobname', self.__job)
930
948
  schema.write_manifest(manifest)
931
949
 
932
950
  def clean_directory(self):
@@ -66,7 +66,7 @@ def send(chip, msg_type, step, index):
66
66
  msg = MIMEMultipart()
67
67
 
68
68
  if step and index:
69
- subject = f'SiliconCompiler : {chip.design} | {jobname} | {step}{index} | {msg_type}'
69
+ subject = f'SiliconCompiler : {chip.design} | {jobname} | {step} | {index} | {msg_type}'
70
70
  else:
71
71
  subject = f'SiliconCompiler : {chip.design} | {jobname} | {msg_type}'
72
72
 
@@ -92,7 +92,7 @@ def send(chip, msg_type, step, index):
92
92
  msg.attach(img_attach)
93
93
 
94
94
  runtime = RuntimeFlowgraph(
95
- chip.schema.get("flowgraph", flow, field='schema'),
95
+ chip.get("flowgraph", flow, field='schema'),
96
96
  from_steps=chip.get('option', 'from'),
97
97
  to_steps=chip.get('option', 'to'),
98
98
  prune_nodes=chip.get('option', 'prune'))
@@ -110,7 +110,7 @@ def send(chip, msg_type, step, index):
110
110
  metric_keys=metrics_to_show)
111
111
  else:
112
112
  # Attach logs
113
- for log in (f'sc_{step}{index}.log', f'{step}.log'):
113
+ for log in (f'sc_{step}_{index}.log', f'{step}.log'):
114
114
  log_file = f'{chip.getworkdir(step=step, index=index)}/{log}'
115
115
  if os.path.exists(log_file):
116
116
  with sc_open(log_file) as f:
@@ -36,10 +36,10 @@ class SlurmSchedulerNode(SchedulerNode):
36
36
 
37
37
  collect = False
38
38
  flow = chip.get('option', 'flow')
39
- entry_nodes = chip.schema.get("flowgraph", flow, field="schema").get_entry_nodes()
39
+ entry_nodes = chip.get("flowgraph", flow, field="schema").get_entry_nodes()
40
40
 
41
41
  runtime = RuntimeFlowgraph(
42
- chip.schema.get("flowgraph", flow, field='schema'),
42
+ chip.get("flowgraph", flow, field='schema'),
43
43
  from_steps=chip.get('option', 'from'),
44
44
  to_steps=chip.get('option', 'to'),
45
45
  prune_nodes=chip.get('option', 'prune'))
@@ -65,7 +65,7 @@ class SlurmSchedulerNode(SchedulerNode):
65
65
 
66
66
  @staticmethod
67
67
  def get_job_name(jobhash, step, index):
68
- return f'{jobhash}_{step}{index}'
68
+ return f'{jobhash}_{step}_{index}'
69
69
 
70
70
  @staticmethod
71
71
  def get_runtime_file_name(jobhash, step, index, ext):
@@ -93,6 +93,8 @@ class SlurmSchedulerNode(SchedulerNode):
93
93
  finishes processing this step, and it sets the active/error bits.
94
94
  '''
95
95
 
96
+ self._init_run_logger()
97
+
96
98
  if shutil.which('sinfo') is None:
97
99
  raise RuntimeError('slurm is not available or installed on this machine')
98
100